From 63105b9c93aed8fa016abd889f7d76b18d633d42 Mon Sep 17 00:00:00 2001 From: "Documenter.jl" Date: Fri, 8 Mar 2024 06:58:50 +0000 Subject: [PATCH] build based on dab6152 --- stable | 2 +- v0.20 | 2 +- v0.20.3/.documenter-siteinfo.json | 1 + v0.20.3/about_mlj/index.html | 57 + .../acceleration_and_parallelism/index.html | 2 + .../adding_models_for_general_use/index.html | 2 + v0.20.3/api/index.html | 2 + v0.20.3/assets/documenter.js | 1050 +++++++ v0.20.3/assets/logo.svg | 1 + v0.20.3/assets/themes/documenter-dark.css | 7 + v0.20.3/assets/themes/documenter-light.css | 9 + v0.20.3/assets/themeswap.js | 84 + v0.20.3/assets/warner.js | 52 + v0.20.3/benchmarking/index.html | 2 + v0.20.3/common_mlj_workflows/index.html | 538 ++++ v0.20.3/composing_models/index.html | 2 + .../controlling_iterative_models/index.html | 166 + v0.20.3/correcting_class_imbalance/index.html | 23 + .../evaluating_model_performance/index.html | 153 + v0.20.3/frequently_asked_questions/index.html | 2 + v0.20.3/generating_synthetic_data.ipynb | 2716 +++++++++++++++++ v0.20.3/generating_synthetic_data/index.html | 21 + v0.20.3/getting_started/index.html | 232 ++ v0.20.3/glossary/index.html | 2 + v0.20.3/homogeneous_ensembles/index.html | 8 + v0.20.3/img/MLJLogo2.svg | 82 + v0.20.3/img/MLPackages.png | Bin 0 -> 60357 bytes v0.20.3/img/heatmap.png | Bin 0 -> 17853 bytes v0.20.3/img/latin_hypercube_tuning_plot.png | Bin 0 -> 87082 bytes v0.20.3/img/learning_curve42.png | Bin 0 -> 38412 bytes v0.20.3/img/learning_curve_n.png | Bin 0 -> 33942 bytes v0.20.3/img/learningcurves.png | Bin 0 -> 38242 bytes v0.20.3/img/output_11_0.svg | 3 + v0.20.3/img/output_4_0.svg | 3 + v0.20.3/img/output_5_0.svg | 3 + v0.20.3/img/output_8_0.svg | 3 + v0.20.3/img/random_search_tuning_plot.png | Bin 0 -> 44565 bytes v0.20.3/img/scitypes.png | Bin 0 -> 64286 bytes v0.20.3/img/scitypes_small.png | Bin 0 -> 13481 bytes v0.20.3/img/simple.drawio | 1 + v0.20.3/img/simple.png | Bin 0 -> 19737 bytes v0.20.3/img/simple.svg | 3 + v0.20.3/img/target_transformer.drawio | 1 + v0.20.3/img/target_transformer.png | Bin 0 -> 125524 bytes v0.20.3/img/target_transformer2.drawio | 1 + v0.20.3/img/target_transformer2.png | Bin 0 -> 125500 bytes v0.20.3/img/tuning_plot.png | Bin 0 -> 45949 bytes v0.20.3/img/two_model_stack.png | Bin 0 -> 127719 bytes v0.20.3/img/workflows_learning_curve.png | Bin 0 -> 18869 bytes v0.20.3/img/workflows_learning_curves.png | Bin 0 -> 32301 bytes v0.20.3/img/workflows_tuning_plot.png | Bin 0 -> 62214 bytes v0.20.3/img/wrapped_ridge.png | Bin 0 -> 148789 bytes v0.20.3/index.html | 26 + v0.20.3/internals/index.html | 66 + v0.20.3/learning_curves/index.html | 65 + v0.20.3/learning_mlj/index.html | 2 + v0.20.3/learning_networks/index.html | 417 +++ v0.20.3/linear_pipelines/index.html | 38 + v0.20.3/list_of_supported_models/index.html | 2 + v0.20.3/loading_model_code/index.html | 12 + v0.20.3/logging_workflows/index.html | 2 + v0.20.3/machines/index.html | 116 + v0.20.3/mlj_cheatsheet/index.html | 26 + v0.20.3/model_browser/index.html | 2 + v0.20.3/model_search/index.html | 129 + v0.20.3/model_stacking/index.html | 24 + .../index.html | 13 + .../index.html | 3 + .../index.html | 2 + .../index.html | 2 + .../index.html | 2 + .../index.html | 19 + .../index.html | 2 + .../index.html | 2 + v0.20.3/models/AutoEncoder_BetaML/index.html | 62 + .../models/BM25Transformer_MLJText/index.html | 46 + .../index.html | 2 + .../index.html | 2 + .../index.html | 2 + .../BayesianLDA_MultivariateStats/index.html | 13 + .../index.html | 2 + .../index.html | 2 + .../index.html | 13 + .../index.html | 2 + .../Birch_MLJScikitLearnInterface/index.html | 2 + .../index.html | 2 + .../BorderlineSMOTE1_Imbalance/index.html | 31 + .../index.html | 7 + .../index.html | 3 + .../index.html | 11 + .../index.html | 3 + .../index.html | 2 + .../CatBoostClassifier_CatBoost/index.html | 16 + .../CatBoostRegressor_CatBoost/index.html | 15 + .../ClusterUndersampler_Imbalance/index.html | 32 + .../index.html | 2 + .../ConstantClassifier_MLJModels/index.html | 29 + .../ConstantRegressor_MLJModels/index.html | 13 + .../ContinuousEncoder_MLJModels/index.html | 38 + .../CountTransformer_MLJText/index.html | 46 + v0.20.3/models/DBSCAN_Clustering/index.html | 35 + .../DBSCAN_MLJScikitLearnInterface/index.html | 2 + .../index.html | 11 + .../DecisionTreeClassifier_BetaML/index.html | 30 + .../index.html | 31 + .../DecisionTreeRegressor_BetaML/index.html | 33 + .../index.html | 27 + .../index.html | 2 + .../index.html | 2 + .../index.html | 2 + .../index.html | 2 + .../index.html | 2 + .../ENNUndersampler_Imbalance/index.html | 31 + .../index.html | 2 + .../index.html | 6 + .../index.html | 2 + v0.20.3/models/EpsilonSVR_LIBSVM/index.html | 25 + .../EvoLinearRegressor_EvoLinear/index.html | 3 + .../EvoSplineRegressor_EvoLinear/index.html | 3 + .../EvoTreeClassifier_EvoTrees/index.html | 15 + .../models/EvoTreeCount_EvoTrees/index.html | 18 + .../EvoTreeGaussian_EvoTrees/index.html | 17 + v0.20.3/models/EvoTreeMLE_EvoTrees/index.html | 17 + .../EvoTreeRegressor_EvoTrees/index.html | 14 + .../index.html | 2 + .../index.html | 2 + .../index.html | 11 + .../index.html | 2 + .../FeatureSelector_MLJModels/index.html | 17 + .../models/FillImputer_MLJModels/index.html | 34 + .../index.html | 13 + .../index.html | 37 + .../GaussianMixtureImputer_BetaML/index.html | 36 + .../index.html | 33 + .../index.html | 2 + .../index.html | 13 + .../index.html | 2 + .../index.html | 2 + .../models/GeneralImputer_BetaML/index.html | 60 + .../index.html | 2 + .../index.html | 2 + .../index.html | 4 + .../index.html | 2 + .../index.html | 18 + .../index.html | 2 + .../index.html | 2 + .../HuberRegressor_MLJLinearModels/index.html | 6 + .../index.html | 2 + .../models/ICA_MultivariateStats/index.html | 31 + .../index.html | 8 + .../index.html | 4 + .../models/ImageClassifier_MLJFlux/index.html | 48 + .../index.html | 32 + .../index.html | 6 + .../models/KMeansClusterer_BetaML/index.html | 32 + v0.20.3/models/KMeans_Clustering/index.html | 20 + .../KMeans_MLJScikitLearnInterface/index.html | 2 + .../models/KMeans_ParallelKMeans/index.html | 2 + .../KMedoidsClusterer_BetaML/index.html | 32 + v0.20.3/models/KMedoids_Clustering/index.html | 20 + .../index.html | 12 + .../index.html | 11 + .../index.html | 10 + .../index.html | 10 + .../index.html | 2 + .../index.html | 2 + .../index.html | 2 + .../KernelPCA_MultivariateStats/index.html | 16 + .../index.html | 29 + .../LADRegressor_MLJLinearModels/index.html | 6 + .../models/LDA_MultivariateStats/index.html | 14 + .../models/LGBMClassifier_LightGBM/index.html | 2 + .../models/LGBMRegressor_LightGBM/index.html | 2 + .../index.html | 4 + .../index.html | 3 + .../index.html | 3 + .../index.html | 11 + .../index.html | 9 + .../index.html | 2 + .../index.html | 2 + .../index.html | 2 + .../index.html | 2 + .../index.html | 2 + .../index.html | 2 + .../LassoRegressor_MLJLinearModels/index.html | 6 + .../index.html | 2 + .../LinearBinaryClassifier_GLM/index.html | 28 + .../LinearCountRegressor_GLM/index.html | 34 + v0.20.3/models/LinearRegressor_GLM/index.html | 18 + .../index.html | 6 + .../index.html | 2 + .../index.html | 11 + v0.20.3/models/LinearSVC_LIBSVM/index.html | 28 + .../index.html | 2 + .../index.html | 6 + .../index.html | 2 + .../index.html | 5 + .../index.html | 2 + .../index.html | 2 + .../index.html | 2 + .../index.html | 2 + .../index.html | 2 + .../index.html | 2 + .../index.html | 6 + .../index.html | 2 + .../index.html | 44 + .../index.html | 35 + .../index.html | 31 + .../index.html | 21 + .../index.html | 13 + .../index.html | 41 + .../index.html | 27 + .../index.html | 13 + .../index.html | 20 + .../NeuralNetworkClassifier_BetaML/index.html | 37 + .../index.html | 22 + .../NeuralNetworkRegressor_BetaML/index.html | 38 + .../NeuralNetworkRegressor_MLJFlux/index.html | 45 + v0.20.3/models/NuSVC_LIBSVM/index.html | 28 + v0.20.3/models/NuSVR_LIBSVM/index.html | 25 + .../index.html | 11 + .../OPTICS_MLJScikitLearnInterface/index.html | 2 + v0.20.3/models/OneClassSVM_LIBSVM/index.html | 67 + .../models/OneHotEncoder_MLJModels/index.html | 34 + .../OneRuleClassifier_OneRule/index.html | 30 + .../index.html | 2 + .../index.html | 2 + .../index.html | 11 + .../models/PCA_MultivariateStats/index.html | 11 + .../index.html | 2 + .../models/PPCA_MultivariateStats/index.html | 11 + .../index.html | 2 + .../index.html | 2 + .../PegasosClassifier_BetaML/index.html | 31 + .../PerceptronClassifier_BetaML/index.html | 32 + .../index.html | 2 + .../ProbabilisticNuSVC_LIBSVM/index.html | 30 + .../index.html | 2 + .../models/ProbabilisticSVC_LIBSVM/index.html | 35 + .../index.html | 6 + .../index.html | 2 + .../index.html | 2 + v0.20.3/models/ROSE_Imbalance/index.html | 30 + .../RandomForestClassifier_BetaML/index.html | 31 + .../index.html | 22 + .../index.html | 2 + .../RandomForestImputer_BetaML/index.html | 36 + .../RandomForestRegressor_BetaML/index.html | 36 + .../index.html | 13 + .../index.html | 2 + .../RandomOversampler_Imbalance/index.html | 30 + .../RandomUndersampler_Imbalance/index.html | 31 + .../index.html | 39 + .../index.html | 2 + .../index.html | 2 + .../index.html | 2 + .../RidgeRegressor_MLJLinearModels/index.html | 6 + .../index.html | 2 + .../index.html | 11 + .../index.html | 6 + .../index.html | 2 + .../index.html | 2 + v0.20.3/models/SMOTENC_Imbalance/index.html | 39 + v0.20.3/models/SMOTEN_Imbalance/index.html | 40 + v0.20.3/models/SMOTE_Imbalance/index.html | 31 + .../index.html | 4 + .../index.html | 4 + .../SRRegressor_SymbolicRegression/index.html | 29 + v0.20.3/models/SVC_LIBSVM/index.html | 36 + .../index.html | 2 + .../index.html | 2 + .../index.html | 2 + .../index.html | 2 + .../index.html | 2 + .../index.html | 2 + .../index.html | 10 + .../models/SimpleImputer_BetaML/index.html | 29 + .../index.html | 2 + .../StableForestClassifier_SIRUS/index.html | 2 + .../StableForestRegressor_SIRUS/index.html | 2 + .../StableRulesClassifier_SIRUS/index.html | 2 + .../StableRulesRegressor_SIRUS/index.html | 2 + .../models/Standardizer_MLJModels/index.html | 37 + .../SubspaceLDA_MultivariateStats/index.html | 13 + .../models/TSVDTransformer_TSVD/index.html | 2 + .../TfidfTransformer_MLJText/index.html | 46 + .../index.html | 2 + .../TomekUndersampler_Imbalance/index.html | 28 + .../index.html | 41 + .../index.html | 33 + .../index.html | 37 + .../index.html | 2 + .../index.html | 18 + .../XGBoostClassifier_XGBoost/index.html | 2 + .../models/XGBoostCount_XGBoost/index.html | 2 + .../XGBoostRegressor_XGBoost/index.html | 2 + v0.20.3/models/dummy_file | 1 + v0.20.3/modifying_behavior/index.html | 4 + .../index.html | 26 + v0.20.3/objects.inv | Bin 0 -> 11326 bytes v0.20.3/openml_integration/index.html | 2 + v0.20.3/performance_measures/index.html | 8 + v0.20.3/preparing_data/index.html | 112 + .../index.html | 2 + v0.20.3/search_index.js | 3 + v0.20.3/simple_user_defined_models/index.html | 57 + v0.20.3/siteinfo.js | 1 + v0.20.3/target_transformations/index.html | 63 + v0.20.3/third_party_packages/index.html | 2 + v0.20.3/transformers/index.html | 426 +++ v0.20.3/tuning_models/index.html | 338 ++ v0.20.3/weights/index.html | 12 + .../working_with_categorical_data/index.html | 110 + versions.js | 2 +- 314 files changed, 10562 insertions(+), 3 deletions(-) create mode 100644 v0.20.3/.documenter-siteinfo.json create mode 100644 v0.20.3/about_mlj/index.html create mode 100644 v0.20.3/acceleration_and_parallelism/index.html create mode 100644 v0.20.3/adding_models_for_general_use/index.html create mode 100644 v0.20.3/api/index.html create mode 100644 v0.20.3/assets/documenter.js create mode 100644 v0.20.3/assets/logo.svg create mode 100644 v0.20.3/assets/themes/documenter-dark.css create mode 100644 v0.20.3/assets/themes/documenter-light.css create mode 100644 v0.20.3/assets/themeswap.js create mode 100644 v0.20.3/assets/warner.js create mode 100644 v0.20.3/benchmarking/index.html create mode 100644 v0.20.3/common_mlj_workflows/index.html create mode 100644 v0.20.3/composing_models/index.html create mode 100644 v0.20.3/controlling_iterative_models/index.html create mode 100644 v0.20.3/correcting_class_imbalance/index.html create mode 100644 v0.20.3/evaluating_model_performance/index.html create mode 100644 v0.20.3/frequently_asked_questions/index.html create mode 100644 v0.20.3/generating_synthetic_data.ipynb create mode 100644 v0.20.3/generating_synthetic_data/index.html create mode 100644 v0.20.3/getting_started/index.html create mode 100644 v0.20.3/glossary/index.html create mode 100644 v0.20.3/homogeneous_ensembles/index.html create mode 100644 v0.20.3/img/MLJLogo2.svg create mode 100644 v0.20.3/img/MLPackages.png create mode 100644 v0.20.3/img/heatmap.png create mode 100644 v0.20.3/img/latin_hypercube_tuning_plot.png create mode 100644 v0.20.3/img/learning_curve42.png create mode 100644 v0.20.3/img/learning_curve_n.png create mode 100644 v0.20.3/img/learningcurves.png create mode 100644 v0.20.3/img/output_11_0.svg create mode 100644 v0.20.3/img/output_4_0.svg create mode 100644 v0.20.3/img/output_5_0.svg create mode 100644 v0.20.3/img/output_8_0.svg create mode 100644 v0.20.3/img/random_search_tuning_plot.png create mode 100644 v0.20.3/img/scitypes.png create mode 100644 v0.20.3/img/scitypes_small.png create mode 100644 v0.20.3/img/simple.drawio create mode 100644 v0.20.3/img/simple.png create mode 100644 v0.20.3/img/simple.svg create mode 100644 v0.20.3/img/target_transformer.drawio create mode 100644 v0.20.3/img/target_transformer.png create mode 100644 v0.20.3/img/target_transformer2.drawio create mode 100644 v0.20.3/img/target_transformer2.png create mode 100644 v0.20.3/img/tuning_plot.png create mode 100755 v0.20.3/img/two_model_stack.png create mode 100644 v0.20.3/img/workflows_learning_curve.png create mode 100644 v0.20.3/img/workflows_learning_curves.png create mode 100644 v0.20.3/img/workflows_tuning_plot.png create mode 100755 v0.20.3/img/wrapped_ridge.png create mode 100644 v0.20.3/index.html create mode 100644 v0.20.3/internals/index.html create mode 100644 v0.20.3/learning_curves/index.html create mode 100644 v0.20.3/learning_mlj/index.html create mode 100644 v0.20.3/learning_networks/index.html create mode 100644 v0.20.3/linear_pipelines/index.html create mode 100644 v0.20.3/list_of_supported_models/index.html create mode 100644 v0.20.3/loading_model_code/index.html create mode 100644 v0.20.3/logging_workflows/index.html create mode 100644 v0.20.3/machines/index.html create mode 100644 v0.20.3/mlj_cheatsheet/index.html create mode 100644 v0.20.3/model_browser/index.html create mode 100644 v0.20.3/model_search/index.html create mode 100644 v0.20.3/model_stacking/index.html create mode 100644 v0.20.3/models/ABODDetector_OutlierDetectionNeighbors/index.html create mode 100644 v0.20.3/models/ABODDetector_OutlierDetectionPython/index.html create mode 100644 v0.20.3/models/ARDRegressor_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/AdaBoostClassifier_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/AdaBoostRegressor_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/AdaBoostStumpClassifier_DecisionTree/index.html create mode 100644 v0.20.3/models/AffinityPropagation_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/AgglomerativeClustering_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/AutoEncoder_BetaML/index.html create mode 100644 v0.20.3/models/BM25Transformer_MLJText/index.html create mode 100644 v0.20.3/models/BaggingClassifier_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/BaggingRegressor_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/BayesianLDA_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/BayesianLDA_MultivariateStats/index.html create mode 100644 v0.20.3/models/BayesianQDA_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/BayesianRidgeRegressor_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/BayesianSubspaceLDA_MultivariateStats/index.html create mode 100644 v0.20.3/models/BernoulliNBClassifier_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/Birch_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/BisectingKMeans_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/BorderlineSMOTE1_Imbalance/index.html create mode 100644 v0.20.3/models/CBLOFDetector_OutlierDetectionPython/index.html create mode 100644 v0.20.3/models/CDDetector_OutlierDetectionPython/index.html create mode 100644 v0.20.3/models/COFDetector_OutlierDetectionNeighbors/index.html create mode 100644 v0.20.3/models/COFDetector_OutlierDetectionPython/index.html create mode 100644 v0.20.3/models/COPODDetector_OutlierDetectionPython/index.html create mode 100644 v0.20.3/models/CatBoostClassifier_CatBoost/index.html create mode 100644 v0.20.3/models/CatBoostRegressor_CatBoost/index.html create mode 100644 v0.20.3/models/ClusterUndersampler_Imbalance/index.html create mode 100644 v0.20.3/models/ComplementNBClassifier_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/ConstantClassifier_MLJModels/index.html create mode 100644 v0.20.3/models/ConstantRegressor_MLJModels/index.html create mode 100644 v0.20.3/models/ContinuousEncoder_MLJModels/index.html create mode 100644 v0.20.3/models/CountTransformer_MLJText/index.html create mode 100644 v0.20.3/models/DBSCAN_Clustering/index.html create mode 100644 v0.20.3/models/DBSCAN_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/DNNDetector_OutlierDetectionNeighbors/index.html create mode 100644 v0.20.3/models/DecisionTreeClassifier_BetaML/index.html create mode 100644 v0.20.3/models/DecisionTreeClassifier_DecisionTree/index.html create mode 100644 v0.20.3/models/DecisionTreeRegressor_BetaML/index.html create mode 100644 v0.20.3/models/DecisionTreeRegressor_DecisionTree/index.html create mode 100644 v0.20.3/models/DeterministicConstantClassifier_MLJModels/index.html create mode 100644 v0.20.3/models/DeterministicConstantRegressor_MLJModels/index.html create mode 100644 v0.20.3/models/DummyClassifier_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/DummyRegressor_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/ECODDetector_OutlierDetectionPython/index.html create mode 100644 v0.20.3/models/ENNUndersampler_Imbalance/index.html create mode 100644 v0.20.3/models/ElasticNetCVRegressor_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/ElasticNetRegressor_MLJLinearModels/index.html create mode 100644 v0.20.3/models/ElasticNetRegressor_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/EpsilonSVR_LIBSVM/index.html create mode 100644 v0.20.3/models/EvoLinearRegressor_EvoLinear/index.html create mode 100644 v0.20.3/models/EvoSplineRegressor_EvoLinear/index.html create mode 100644 v0.20.3/models/EvoTreeClassifier_EvoTrees/index.html create mode 100644 v0.20.3/models/EvoTreeCount_EvoTrees/index.html create mode 100644 v0.20.3/models/EvoTreeGaussian_EvoTrees/index.html create mode 100644 v0.20.3/models/EvoTreeMLE_EvoTrees/index.html create mode 100644 v0.20.3/models/EvoTreeRegressor_EvoTrees/index.html create mode 100644 v0.20.3/models/ExtraTreesClassifier_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/ExtraTreesRegressor_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/FactorAnalysis_MultivariateStats/index.html create mode 100644 v0.20.3/models/FeatureAgglomeration_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/FeatureSelector_MLJModels/index.html create mode 100644 v0.20.3/models/FillImputer_MLJModels/index.html create mode 100644 v0.20.3/models/GMMDetector_OutlierDetectionPython/index.html create mode 100644 v0.20.3/models/GaussianMixtureClusterer_BetaML/index.html create mode 100644 v0.20.3/models/GaussianMixtureImputer_BetaML/index.html create mode 100644 v0.20.3/models/GaussianMixtureRegressor_BetaML/index.html create mode 100644 v0.20.3/models/GaussianNBClassifier_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/GaussianNBClassifier_NaiveBayes/index.html create mode 100644 v0.20.3/models/GaussianProcessClassifier_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/GaussianProcessRegressor_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/GeneralImputer_BetaML/index.html create mode 100644 v0.20.3/models/GradientBoostingClassifier_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/GradientBoostingRegressor_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/HBOSDetector_OutlierDetectionPython/index.html create mode 100644 v0.20.3/models/HDBSCAN_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/HierarchicalClustering_Clustering/index.html create mode 100644 v0.20.3/models/HistGradientBoostingClassifier_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/HistGradientBoostingRegressor_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/HuberRegressor_MLJLinearModels/index.html create mode 100644 v0.20.3/models/HuberRegressor_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/ICA_MultivariateStats/index.html create mode 100644 v0.20.3/models/IForestDetector_OutlierDetectionPython/index.html create mode 100644 v0.20.3/models/INNEDetector_OutlierDetectionPython/index.html create mode 100644 v0.20.3/models/ImageClassifier_MLJFlux/index.html create mode 100644 v0.20.3/models/InteractionTransformer_MLJModels/index.html create mode 100644 v0.20.3/models/KDEDetector_OutlierDetectionPython/index.html create mode 100644 v0.20.3/models/KMeansClusterer_BetaML/index.html create mode 100644 v0.20.3/models/KMeans_Clustering/index.html create mode 100644 v0.20.3/models/KMeans_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/KMeans_ParallelKMeans/index.html create mode 100644 v0.20.3/models/KMedoidsClusterer_BetaML/index.html create mode 100644 v0.20.3/models/KMedoids_Clustering/index.html create mode 100644 v0.20.3/models/KNNClassifier_NearestNeighborModels/index.html create mode 100644 v0.20.3/models/KNNDetector_OutlierDetectionNeighbors/index.html create mode 100644 v0.20.3/models/KNNDetector_OutlierDetectionPython/index.html create mode 100644 v0.20.3/models/KNNRegressor_NearestNeighborModels/index.html create mode 100644 v0.20.3/models/KNeighborsClassifier_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/KNeighborsRegressor_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/KPLSRegressor_PartialLeastSquaresRegressor/index.html create mode 100644 v0.20.3/models/KernelPCA_MultivariateStats/index.html create mode 100644 v0.20.3/models/KernelPerceptronClassifier_BetaML/index.html create mode 100644 v0.20.3/models/LADRegressor_MLJLinearModels/index.html create mode 100644 v0.20.3/models/LDA_MultivariateStats/index.html create mode 100644 v0.20.3/models/LGBMClassifier_LightGBM/index.html create mode 100644 v0.20.3/models/LGBMRegressor_LightGBM/index.html create mode 100644 v0.20.3/models/LMDDDetector_OutlierDetectionPython/index.html create mode 100644 v0.20.3/models/LOCIDetector_OutlierDetectionPython/index.html create mode 100644 v0.20.3/models/LODADetector_OutlierDetectionPython/index.html create mode 100644 v0.20.3/models/LOFDetector_OutlierDetectionNeighbors/index.html create mode 100644 v0.20.3/models/LOFDetector_OutlierDetectionPython/index.html create mode 100644 v0.20.3/models/LarsCVRegressor_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/LarsRegressor_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/LassoCVRegressor_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/LassoLarsCVRegressor_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/LassoLarsICRegressor_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/LassoLarsRegressor_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/LassoRegressor_MLJLinearModels/index.html create mode 100644 v0.20.3/models/LassoRegressor_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/LinearBinaryClassifier_GLM/index.html create mode 100644 v0.20.3/models/LinearCountRegressor_GLM/index.html create mode 100644 v0.20.3/models/LinearRegressor_GLM/index.html create mode 100644 v0.20.3/models/LinearRegressor_MLJLinearModels/index.html create mode 100644 v0.20.3/models/LinearRegressor_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/LinearRegressor_MultivariateStats/index.html create mode 100644 v0.20.3/models/LinearSVC_LIBSVM/index.html create mode 100644 v0.20.3/models/LogisticCVClassifier_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/LogisticClassifier_MLJLinearModels/index.html create mode 100644 v0.20.3/models/LogisticClassifier_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/MCDDetector_OutlierDetectionPython/index.html create mode 100644 v0.20.3/models/MeanShift_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/MiniBatchKMeans_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/MultiTaskElasticNetCVRegressor_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/MultiTaskElasticNetRegressor_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/MultiTaskLassoCVRegressor_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/MultiTaskLassoRegressor_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/MultinomialClassifier_MLJLinearModels/index.html create mode 100644 v0.20.3/models/MultinomialNBClassifier_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/MultinomialNBClassifier_NaiveBayes/index.html create mode 100644 v0.20.3/models/MultitargetGaussianMixtureRegressor_BetaML/index.html create mode 100644 v0.20.3/models/MultitargetKNNClassifier_NearestNeighborModels/index.html create mode 100644 v0.20.3/models/MultitargetKNNRegressor_NearestNeighborModels/index.html create mode 100644 v0.20.3/models/MultitargetLinearRegressor_MultivariateStats/index.html create mode 100644 v0.20.3/models/MultitargetNeuralNetworkRegressor_BetaML/index.html create mode 100644 v0.20.3/models/MultitargetNeuralNetworkRegressor_MLJFlux/index.html create mode 100644 v0.20.3/models/MultitargetRidgeRegressor_MultivariateStats/index.html create mode 100644 v0.20.3/models/MultitargetSRRegressor_SymbolicRegression/index.html create mode 100644 v0.20.3/models/NeuralNetworkClassifier_BetaML/index.html create mode 100644 v0.20.3/models/NeuralNetworkClassifier_MLJFlux/index.html create mode 100644 v0.20.3/models/NeuralNetworkRegressor_BetaML/index.html create mode 100644 v0.20.3/models/NeuralNetworkRegressor_MLJFlux/index.html create mode 100644 v0.20.3/models/NuSVC_LIBSVM/index.html create mode 100644 v0.20.3/models/NuSVR_LIBSVM/index.html create mode 100644 v0.20.3/models/OCSVMDetector_OutlierDetectionPython/index.html create mode 100644 v0.20.3/models/OPTICS_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/OneClassSVM_LIBSVM/index.html create mode 100644 v0.20.3/models/OneHotEncoder_MLJModels/index.html create mode 100644 v0.20.3/models/OneRuleClassifier_OneRule/index.html create mode 100644 v0.20.3/models/OrthogonalMatchingPursuitCVRegressor_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/OrthogonalMatchingPursuitRegressor_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/PCADetector_OutlierDetectionPython/index.html create mode 100644 v0.20.3/models/PCA_MultivariateStats/index.html create mode 100644 v0.20.3/models/PLSRegressor_PartialLeastSquaresRegressor/index.html create mode 100644 v0.20.3/models/PPCA_MultivariateStats/index.html create mode 100644 v0.20.3/models/PassiveAggressiveClassifier_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/PassiveAggressiveRegressor_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/PegasosClassifier_BetaML/index.html create mode 100644 v0.20.3/models/PerceptronClassifier_BetaML/index.html create mode 100644 v0.20.3/models/PerceptronClassifier_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/ProbabilisticNuSVC_LIBSVM/index.html create mode 100644 v0.20.3/models/ProbabilisticSGDClassifier_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/ProbabilisticSVC_LIBSVM/index.html create mode 100644 v0.20.3/models/QuantileRegressor_MLJLinearModels/index.html create mode 100644 v0.20.3/models/RANSACRegressor_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/RODDetector_OutlierDetectionPython/index.html create mode 100644 v0.20.3/models/ROSE_Imbalance/index.html create mode 100644 v0.20.3/models/RandomForestClassifier_BetaML/index.html create mode 100644 v0.20.3/models/RandomForestClassifier_DecisionTree/index.html create mode 100644 v0.20.3/models/RandomForestClassifier_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/RandomForestImputer_BetaML/index.html create mode 100644 v0.20.3/models/RandomForestRegressor_BetaML/index.html create mode 100644 v0.20.3/models/RandomForestRegressor_DecisionTree/index.html create mode 100644 v0.20.3/models/RandomForestRegressor_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/RandomOversampler_Imbalance/index.html create mode 100644 v0.20.3/models/RandomUndersampler_Imbalance/index.html create mode 100644 v0.20.3/models/RandomWalkOversampler_Imbalance/index.html create mode 100644 v0.20.3/models/RidgeCVClassifier_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/RidgeCVRegressor_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/RidgeClassifier_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/RidgeRegressor_MLJLinearModels/index.html create mode 100644 v0.20.3/models/RidgeRegressor_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/RidgeRegressor_MultivariateStats/index.html create mode 100644 v0.20.3/models/RobustRegressor_MLJLinearModels/index.html create mode 100644 v0.20.3/models/SGDClassifier_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/SGDRegressor_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/SMOTENC_Imbalance/index.html create mode 100644 v0.20.3/models/SMOTEN_Imbalance/index.html create mode 100644 v0.20.3/models/SMOTE_Imbalance/index.html create mode 100644 v0.20.3/models/SODDetector_OutlierDetectionPython/index.html create mode 100644 v0.20.3/models/SOSDetector_OutlierDetectionPython/index.html create mode 100644 v0.20.3/models/SRRegressor_SymbolicRegression/index.html create mode 100644 v0.20.3/models/SVC_LIBSVM/index.html create mode 100644 v0.20.3/models/SVMClassifier_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/SVMLinearClassifier_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/SVMLinearRegressor_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/SVMNuClassifier_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/SVMNuRegressor_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/SVMRegressor_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/SelfOrganizingMap_SelfOrganizingMaps/index.html create mode 100644 v0.20.3/models/SimpleImputer_BetaML/index.html create mode 100644 v0.20.3/models/SpectralClustering_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/StableForestClassifier_SIRUS/index.html create mode 100644 v0.20.3/models/StableForestRegressor_SIRUS/index.html create mode 100644 v0.20.3/models/StableRulesClassifier_SIRUS/index.html create mode 100644 v0.20.3/models/StableRulesRegressor_SIRUS/index.html create mode 100644 v0.20.3/models/Standardizer_MLJModels/index.html create mode 100644 v0.20.3/models/SubspaceLDA_MultivariateStats/index.html create mode 100644 v0.20.3/models/TSVDTransformer_TSVD/index.html create mode 100644 v0.20.3/models/TfidfTransformer_MLJText/index.html create mode 100644 v0.20.3/models/TheilSenRegressor_MLJScikitLearnInterface/index.html create mode 100644 v0.20.3/models/TomekUndersampler_Imbalance/index.html create mode 100644 v0.20.3/models/UnivariateBoxCoxTransformer_MLJModels/index.html create mode 100644 v0.20.3/models/UnivariateDiscretizer_MLJModels/index.html create mode 100644 v0.20.3/models/UnivariateFillImputer_MLJModels/index.html create mode 100644 v0.20.3/models/UnivariateStandardizer_MLJModels/index.html create mode 100644 v0.20.3/models/UnivariateTimeTypeToContinuous_MLJModels/index.html create mode 100644 v0.20.3/models/XGBoostClassifier_XGBoost/index.html create mode 100644 v0.20.3/models/XGBoostCount_XGBoost/index.html create mode 100644 v0.20.3/models/XGBoostRegressor_XGBoost/index.html create mode 100644 v0.20.3/models/dummy_file create mode 100644 v0.20.3/modifying_behavior/index.html create mode 100644 v0.20.3/more_on_probabilistic_predictors/index.html create mode 100644 v0.20.3/objects.inv create mode 100644 v0.20.3/openml_integration/index.html create mode 100644 v0.20.3/performance_measures/index.html create mode 100644 v0.20.3/preparing_data/index.html create mode 100644 v0.20.3/quick_start_guide_to_adding_models/index.html create mode 100644 v0.20.3/search_index.js create mode 100644 v0.20.3/simple_user_defined_models/index.html create mode 100644 v0.20.3/siteinfo.js create mode 100644 v0.20.3/target_transformations/index.html create mode 100644 v0.20.3/third_party_packages/index.html create mode 100644 v0.20.3/transformers/index.html create mode 100644 v0.20.3/tuning_models/index.html create mode 100644 v0.20.3/weights/index.html create mode 100644 v0.20.3/working_with_categorical_data/index.html diff --git a/stable b/stable index 014ec6192..4646fd924 120000 --- a/stable +++ b/stable @@ -1 +1 @@ -v0.20.2 \ No newline at end of file +v0.20.3 \ No newline at end of file diff --git a/v0.20 b/v0.20 index 014ec6192..4646fd924 120000 --- a/v0.20 +++ b/v0.20 @@ -1 +1 @@ -v0.20.2 \ No newline at end of file +v0.20.3 \ No newline at end of file diff --git a/v0.20.3/.documenter-siteinfo.json b/v0.20.3/.documenter-siteinfo.json new file mode 100644 index 000000000..684ccf635 --- /dev/null +++ b/v0.20.3/.documenter-siteinfo.json @@ -0,0 +1 @@ +{"documenter":{"julia_version":"1.10.2","generation_timestamp":"2024-03-08T06:58:42","documenter_version":"1.3.0"}} \ No newline at end of file diff --git a/v0.20.3/about_mlj/index.html b/v0.20.3/about_mlj/index.html new file mode 100644 index 000000000..fa85cecf0 --- /dev/null +++ b/v0.20.3/about_mlj/index.html @@ -0,0 +1,57 @@ + +About MLJ · MLJ

About MLJ

MLJ (Machine Learning in Julia) is a toolbox written in Julia providing a common interface and meta-algorithms for selecting, tuning, evaluating, composing and comparing over 180 machine learning models written in Julia and other languages. In particular MLJ wraps a large number of scikit-learn models.

MLJ is released under the MIT license.

Lightning tour

For help learning to use MLJ, see Learning MLJ.

A self-contained notebook and julia script of this demonstration is also available here.

The first code snippet below creates a new Julia environment MLJ_tour and installs just those packages needed for the tour. See Installation for more on creating a Julia environment for use with MLJ.

Julia installation instructions are here.

using Pkg
+Pkg.activate("MLJ_tour", shared=true)
+Pkg.add("MLJ")
+Pkg.add("MLJIteration")
+Pkg.add("EvoTrees")

In MLJ a model is just a container for hyper-parameters, and that's all. Here we will apply several kinds of model composition before binding the resulting "meta-model" to data in a machine for evaluation using cross-validation.

Loading and instantiating a gradient tree-boosting model:

using MLJ
+Booster = @load EvoTreeRegressor # loads code defining a model type
+booster = Booster(max_depth=2)   # specify hyper-parameter at construction
+booster.nrounds=50               # or mutate afterwards

This model is an example of an iterative model. As it stands, the number of iterations nrounds is fixed.

Composition 1: Wrapping the model to make it "self-iterating"

Let's create a new model that automatically learns the number of iterations, using the NumberSinceBest(3) criterion, as applied to an out-of-sample l1 loss:

using MLJIteration
+iterated_booster = IteratedModel(model=booster,
+                                 resampling=Holdout(fraction_train=0.8),
+                                 controls=[Step(2), NumberSinceBest(3), NumberLimit(300)],
+                                 measure=l1,
+                                 retrain=true)

Composition 2: Preprocess the input features

Combining the model with categorical feature encoding:

pipe = ContinuousEncoder() |> iterated_booster

Composition 3: Wrapping the model to make it "self-tuning"

First, we define a hyper-parameter range for optimization of a (nested) hyper-parameter:

max_depth_range = range(pipe,
+                        :(deterministic_iterated_model.model.max_depth),
+                        lower = 1,
+                        upper = 10)

Now we can wrap the pipeline model in an optimization strategy to make it "self-tuning":

self_tuning_pipe = TunedModel(model=pipe,
+                              tuning=RandomSearch(),
+                              ranges = max_depth_range,
+                              resampling=CV(nfolds=3, rng=456),
+                              measure=l1,
+                              acceleration=CPUThreads(),
+                              n=50)

Binding to data and evaluating performance

Loading a selection of features and labels from the Ames House Price dataset:

X, y = @load_reduced_ames;

Evaluating the "self-tuning" pipeline model's performance using 5-fold cross-validation (implies multiple layers of nested resampling):

julia> evaluate(self_tuning_pipe, X, y,
+                measures=[l1, l2],
+                resampling=CV(nfolds=5, rng=123),
+                acceleration=CPUThreads(),
+                verbosity=2)
+PerformanceEvaluation object with these fields:
+  measure, measurement, operation, per_fold,
+  per_observation, fitted_params_per_fold,
+  report_per_fold, train_test_pairs
+Extract:
+┌───────────────┬─────────────┬───────────┬───────────────────────────────────────────────┐
+│ measure       │ measurement │ operation │ per_fold                                      │
+├───────────────┼─────────────┼───────────┼───────────────────────────────────────────────┤
+│ LPLoss(p = 1) │ 17200.0     │ predict   │ [16500.0, 17100.0, 16300.0, 17500.0, 18900.0] │
+│ LPLoss(p = 2) │ 6.83e8      │ predict   │ [6.14e8, 6.64e8, 5.98e8, 6.37e8, 9.03e8]      │
+└───────────────┴─────────────┴───────────┴───────────────────────────────────────────────┘

Key goals

  • Offer a consistent way to use, compose and tune machine learning models in Julia,

  • Promote the improvement of the Julia ML/Stats ecosystem by making it easier to use models from a wide range of packages,

  • Unlock performance gains by exploiting Julia's support for parallelism, automatic differentiation, GPU, optimization etc.

Key features

  • Data agnostic, train most models on any data X supported by the Tables.jl interface (needs Tables.istable(X) == true).

  • Extensive, state-of-the-art, support for model composition (pipelines, stacks and, more generally, learning networks). See more below.

  • Convenient syntax to tune and evaluate (composite) models.

  • Consistent interface to handle probabilistic predictions.

  • Extensible tuning interface, to support a growing number of optimization strategies, and designed to play well with model composition.

  • Options to accelerate model evaluation and tuning with multithreading and/or distributed processing.

Model composability

The generic model composition API's provided by other toolboxes we have surveyed share one or more of the following shortcomings, which do not exist in MLJ:

  • Composite models do not inherit all the behavior of ordinary models.

  • Composition is limited to linear (non-branching) pipelines.

  • Supervised components in a linear pipeline can only occur at the end of the pipeline.

  • Only static (unlearned) target transformations/inverse transformations are supported.

  • Hyper-parameters in homogeneous model ensembles cannot be coupled.

  • Model stacking, with out-of-sample predictions for base learners, cannot be implemented (using the generic API alone).

  • Hyper-parameters and/or learned parameters of component models are not easily inspected or manipulated (by tuning algorithms, for example)

  • Composite models cannot implement multiple operations, for example, both a predict and transform method (as in clustering models) or both a transform and inverse_transform method.

Some of these features are demonstrated in this notebook

For more information see the MLJ design paper or our detailed paper on the composition interface.

Getting help and reporting problems

Users are encouraged to provide feedback on their experience using MLJ and to report issues.

For a query to have maximum exposure to maintainers and users, start a discussion thread at Julia Discourse Machine Learning and tag your issue "mlj". Queries can also be posted as issues, or on the #mlj slack workspace in the Julia Slack channel.

Bugs, suggestions, and feature requests can be posted here.

Users are also welcome to join the #mlj Julia slack channel to ask questions and make suggestions.

Installation

Initially, it is recommended that MLJ and associated packages be installed in a new environment to avoid package conflicts. You can do this with

julia> using Pkg; Pkg.activate("my_MLJ_env", shared=true)

Installing MLJ is also done with the package manager:

julia> Pkg.add("MLJ")

Optional: To test your installation, run

julia> Pkg.test("MLJ")

It is important to note that MLJ is essentially a big wrapper providing unified access to model-providing packages. For this reason, one generally needs to add further packages to your environment to make model-specific code available. This happens automatically when you use MLJ's interactive load command @iload, as in

julia> Tree = @iload DecisionTreeClassifier # load type
+julia> tree = Tree() # instance

where you will also be asked to choose a providing package, for more than one provide a DecisionTreeClassifier model. For more on identifying the name of an applicable model, see Model Search. For non-interactive loading of code (e.g., from a module or function) see Loading Model Code.

It is recommended that you start with models from more mature packages such as DecisionTree.jl, ScikitLearn.jl or XGBoost.jl.

MLJ is supported by several satellite packages (MLJTuning, MLJModelInterface, etc) which the general user is not required to install directly. Developers can learn more about these here.

See also the alternative installation instructions for Modifying Behavior.

Funding

MLJ was initially created as a Tools, Practices and Systems project at the Alan Turing Institute in 2019. Current funding is provided by a New Zealand Strategic Science Investment Fund awarded to the University of Auckland.

Citing MLJ

An overview of MLJ design:

DOI

@article{Blaom2020,
+  doi = {10.21105/joss.02704},
+  url = {https://doi.org/10.21105/joss.02704},
+  year = {2020},
+  publisher = {The Open Journal},
+  volume = {5},
+  number = {55},
+  pages = {2704},
+  author = {Anthony D. Blaom and Franz Kiraly and Thibaut Lienart and Yiannis Simillides and Diego Arenas and Sebastian J. Vollmer},
+  title = {{MLJ}: A Julia package for composable machine learning},
+  journal = {Journal of Open Source Software}
+}

An in-depth view of MLJ's model composition design:

arXiv

@misc{blaom2020flexible,
+      title={Flexible model composition in machine learning and its implementation in {MLJ}},
+      author={Anthony D. Blaom and Sebastian J. Vollmer},
+      year={2020},
+      eprint={2012.15505},
+      archivePrefix={arXiv},
+      primaryClass={cs.LG}
+}
diff --git a/v0.20.3/acceleration_and_parallelism/index.html b/v0.20.3/acceleration_and_parallelism/index.html new file mode 100644 index 000000000..5ce80a7e8 --- /dev/null +++ b/v0.20.3/acceleration_and_parallelism/index.html @@ -0,0 +1,2 @@ + +Acceleration and Parallelism · MLJ

Acceleration and Parallelism

User-facing interface

To enable composable, extensible acceleration of core MLJ methods, ComputationalResources.jl is utilized to provide some basic types and functions to make implementing acceleration easy. However, ambitious users or package authors have the option to define their own types to be passed as resources to acceleration, which must be <:ComputationalResources.AbstractResource.

Methods which support some form of acceleration support the acceleration keyword argument, which can be passed a "resource" from ComputationalResources. For example, passing acceleration=CPUProcesses() will utilize Distributed's multiprocessing functionality to accelerate the computation, while acceleration=CPUThreads() will use Julia's PARTR threading model to perform acceleration.

The default computational resource is CPU1(), which is simply serial processing via CPU. The default resource can be changed as in this example: MLJ.default_resource(CPUProcesses()). The argument must always have type <:ComputationalResource.AbstractResource. To inspect the current default, use MLJ.default_resource().

Note

You cannot use CPUThreads() with models wrapping python code.

diff --git a/v0.20.3/adding_models_for_general_use/index.html b/v0.20.3/adding_models_for_general_use/index.html new file mode 100644 index 000000000..936d174be --- /dev/null +++ b/v0.20.3/adding_models_for_general_use/index.html @@ -0,0 +1,2 @@ + +Adding Models for General Use · MLJ
diff --git a/v0.20.3/api/index.html b/v0.20.3/api/index.html new file mode 100644 index 000000000..9226d105f --- /dev/null +++ b/v0.20.3/api/index.html @@ -0,0 +1,2 @@ + +Index of Methods · MLJ

Index of Methods

diff --git a/v0.20.3/assets/documenter.js b/v0.20.3/assets/documenter.js new file mode 100644 index 000000000..c6562b558 --- /dev/null +++ b/v0.20.3/assets/documenter.js @@ -0,0 +1,1050 @@ +// Generated by Documenter.jl +requirejs.config({ + paths: { + 'highlight-julia': 'https://cdnjs.cloudflare.com/ajax/libs/highlight.js/11.8.0/languages/julia.min', + 'headroom': 'https://cdnjs.cloudflare.com/ajax/libs/headroom/0.12.0/headroom.min', + 'jqueryui': 'https://cdnjs.cloudflare.com/ajax/libs/jqueryui/1.13.2/jquery-ui.min', + 'katex-auto-render': 'https://cdnjs.cloudflare.com/ajax/libs/KaTeX/0.16.8/contrib/auto-render.min', + 'jquery': 'https://cdnjs.cloudflare.com/ajax/libs/jquery/3.7.0/jquery.min', + 'headroom-jquery': 'https://cdnjs.cloudflare.com/ajax/libs/headroom/0.12.0/jQuery.headroom.min', + 'katex': 'https://cdnjs.cloudflare.com/ajax/libs/KaTeX/0.16.8/katex.min', + 'highlight': 'https://cdnjs.cloudflare.com/ajax/libs/highlight.js/11.8.0/highlight.min', + 'highlight-julia-repl': 'https://cdnjs.cloudflare.com/ajax/libs/highlight.js/11.8.0/languages/julia-repl.min', + }, + shim: { + "highlight-julia": { + "deps": [ + "highlight" + ] + }, + "katex-auto-render": { + "deps": [ + "katex" + ] + }, + "headroom-jquery": { + "deps": [ + "jquery", + "headroom" + ] + }, + "highlight-julia-repl": { + "deps": [ + "highlight" + ] + } +} +}); +//////////////////////////////////////////////////////////////////////////////// +require(['jquery', 'katex', 'katex-auto-render'], function($, katex, renderMathInElement) { +$(document).ready(function() { + renderMathInElement( + document.body, + { + "delimiters": [ + { + "left": "$", + "right": "$", + "display": false + }, + { + "left": "$$", + "right": "$$", + "display": true + }, + { + "left": "\\[", + "right": "\\]", + "display": true + } + ] +} + + ); +}) + +}) +//////////////////////////////////////////////////////////////////////////////// +require(['jquery', 'highlight', 'highlight-julia', 'highlight-julia-repl'], function($) { +$(document).ready(function() { + hljs.highlightAll(); +}) + +}) +//////////////////////////////////////////////////////////////////////////////// +require(['jquery'], function($) { + +let timer = 0; +var isExpanded = true; + +$(document).on("click", ".docstring header", function () { + let articleToggleTitle = "Expand docstring"; + + debounce(() => { + if ($(this).siblings("section").is(":visible")) { + $(this) + .find(".docstring-article-toggle-button") + .removeClass("fa-chevron-down") + .addClass("fa-chevron-right"); + } else { + $(this) + .find(".docstring-article-toggle-button") + .removeClass("fa-chevron-right") + .addClass("fa-chevron-down"); + + articleToggleTitle = "Collapse docstring"; + } + + $(this) + .find(".docstring-article-toggle-button") + .prop("title", articleToggleTitle); + $(this).siblings("section").slideToggle(); + }); +}); + +$(document).on("click", ".docs-article-toggle-button", function (event) { + let articleToggleTitle = "Expand docstring"; + let navArticleToggleTitle = "Expand all docstrings"; + let animationSpeed = event.noToggleAnimation ? 0 : 400; + + debounce(() => { + if (isExpanded) { + $(this).removeClass("fa-chevron-up").addClass("fa-chevron-down"); + $(".docstring-article-toggle-button") + .removeClass("fa-chevron-down") + .addClass("fa-chevron-right"); + + isExpanded = false; + + $(".docstring section").slideUp(animationSpeed); + } else { + $(this).removeClass("fa-chevron-down").addClass("fa-chevron-up"); + $(".docstring-article-toggle-button") + .removeClass("fa-chevron-right") + .addClass("fa-chevron-down"); + + isExpanded = true; + articleToggleTitle = "Collapse docstring"; + navArticleToggleTitle = "Collapse all docstrings"; + + $(".docstring section").slideDown(animationSpeed); + } + + $(this).prop("title", navArticleToggleTitle); + $(".docstring-article-toggle-button").prop("title", articleToggleTitle); + }); +}); + +function debounce(callback, timeout = 300) { + if (Date.now() - timer > timeout) { + callback(); + } + + clearTimeout(timer); + + timer = Date.now(); +} + +}) +//////////////////////////////////////////////////////////////////////////////// +require([], function() { +function addCopyButtonCallbacks() { + for (const el of document.getElementsByTagName("pre")) { + const button = document.createElement("button"); + button.classList.add("copy-button", "fa-solid", "fa-copy"); + button.setAttribute("aria-label", "Copy this code block"); + button.setAttribute("title", "Copy"); + + el.appendChild(button); + + const success = function () { + button.classList.add("success", "fa-check"); + button.classList.remove("fa-copy"); + }; + + const failure = function () { + button.classList.add("error", "fa-xmark"); + button.classList.remove("fa-copy"); + }; + + button.addEventListener("click", function () { + copyToClipboard(el.innerText).then(success, failure); + + setTimeout(function () { + button.classList.add("fa-copy"); + button.classList.remove("success", "fa-check", "fa-xmark"); + }, 5000); + }); + } +} + +function copyToClipboard(text) { + // clipboard API is only available in secure contexts + if (window.navigator && window.navigator.clipboard) { + return window.navigator.clipboard.writeText(text); + } else { + return new Promise(function (resolve, reject) { + try { + const el = document.createElement("textarea"); + el.textContent = text; + el.style.position = "fixed"; + el.style.opacity = 0; + document.body.appendChild(el); + el.select(); + document.execCommand("copy"); + + resolve(); + } catch (err) { + reject(err); + } finally { + document.body.removeChild(el); + } + }); + } +} + +if (document.readyState === "loading") { + document.addEventListener("DOMContentLoaded", addCopyButtonCallbacks); +} else { + addCopyButtonCallbacks(); +} + +}) +//////////////////////////////////////////////////////////////////////////////// +require(['jquery', 'headroom', 'headroom-jquery'], function($, Headroom) { + +// Manages the top navigation bar (hides it when the user starts scrolling down on the +// mobile). +window.Headroom = Headroom; // work around buggy module loading? +$(document).ready(function () { + $("#documenter .docs-navbar").headroom({ + tolerance: { up: 10, down: 10 }, + }); +}); + +}) +//////////////////////////////////////////////////////////////////////////////// +require(['jquery'], function($) { + +$(document).ready(function () { + let meta = $("div[data-docstringscollapsed]").data(); + + if (meta?.docstringscollapsed) { + $("#documenter-article-toggle-button").trigger({ + type: "click", + noToggleAnimation: true, + }); + } +}); + +}) +//////////////////////////////////////////////////////////////////////////////// +require(['jquery'], function($) { + +/* +To get an in-depth about the thought process you can refer: https://hetarth02.hashnode.dev/series/gsoc + +PSEUDOCODE: + +Searching happens automatically as the user types or adjusts the selected filters. +To preserve responsiveness, as much as possible of the slow parts of the search are done +in a web worker. Searching and result generation are done in the worker, and filtering and +DOM updates are done in the main thread. The filters are in the main thread as they should +be very quick to apply. This lets filters be changed without re-searching with minisearch +(which is possible even if filtering is on the worker thread) and also lets filters be +changed _while_ the worker is searching and without message passing (neither of which are +possible if filtering is on the worker thread) + +SEARCH WORKER: + +Import minisearch + +Build index + +On message from main thread + run search + find the first 200 unique results from each category, and compute their divs for display + note that this is necessary and sufficient information for the main thread to find the + first 200 unique results from any given filter set + post results to main thread + +MAIN: + +Launch worker + +Declare nonconstant globals (worker_is_running, last_search_text, unfiltered_results) + +On text update + if worker is not running, launch_search() + +launch_search + set worker_is_running to true, set last_search_text to the search text + post the search query to worker + +on message from worker + if last_search_text is not the same as the text in the search field, + the latest search result is not reflective of the latest search query, so update again + launch_search() + otherwise + set worker_is_running to false + + regardless, display the new search results to the user + save the unfiltered_results as a global + update_search() + +on filter click + adjust the filter selection + update_search() + +update_search + apply search filters by looping through the unfiltered_results and finding the first 200 + unique results that match the filters + + Update the DOM +*/ + +/////// SEARCH WORKER /////// + +function worker_function(documenterSearchIndex, documenterBaseURL, filters) { + importScripts( + "https://cdn.jsdelivr.net/npm/minisearch@6.1.0/dist/umd/index.min.js" + ); + + let data = documenterSearchIndex.map((x, key) => { + x["id"] = key; // minisearch requires a unique for each object + return x; + }); + + // list below is the lunr 2.1.3 list minus the intersect with names(Base) + // (all, any, get, in, is, only, which) and (do, else, for, let, where, while, with) + // ideally we'd just filter the original list but it's not available as a variable + const stopWords = new Set([ + "a", + "able", + "about", + "across", + "after", + "almost", + "also", + "am", + "among", + "an", + "and", + "are", + "as", + "at", + "be", + "because", + "been", + "but", + "by", + "can", + "cannot", + "could", + "dear", + "did", + "does", + "either", + "ever", + "every", + "from", + "got", + "had", + "has", + "have", + "he", + "her", + "hers", + "him", + "his", + "how", + "however", + "i", + "if", + "into", + "it", + "its", + "just", + "least", + "like", + "likely", + "may", + "me", + "might", + "most", + "must", + "my", + "neither", + "no", + "nor", + "not", + "of", + "off", + "often", + "on", + "or", + "other", + "our", + "own", + "rather", + "said", + "say", + "says", + "she", + "should", + "since", + "so", + "some", + "than", + "that", + "the", + "their", + "them", + "then", + "there", + "these", + "they", + "this", + "tis", + "to", + "too", + "twas", + "us", + "wants", + "was", + "we", + "were", + "what", + "when", + "who", + "whom", + "why", + "will", + "would", + "yet", + "you", + "your", + ]); + + let index = new MiniSearch({ + fields: ["title", "text"], // fields to index for full-text search + storeFields: ["location", "title", "text", "category", "page"], // fields to return with results + processTerm: (term) => { + let word = stopWords.has(term) ? null : term; + if (word) { + // custom trimmer that doesn't strip @ and !, which are used in julia macro and function names + word = word + .replace(/^[^a-zA-Z0-9@!]+/, "") + .replace(/[^a-zA-Z0-9@!]+$/, ""); + + word = word.toLowerCase(); + } + + return word ?? null; + }, + // add . as a separator, because otherwise "title": "Documenter.Anchors.add!", would not + // find anything if searching for "add!", only for the entire qualification + tokenize: (string) => string.split(/[\s\-\.]+/), + // options which will be applied during the search + searchOptions: { + prefix: true, + boost: { title: 100 }, + fuzzy: 2, + }, + }); + + index.addAll(data); + + /** + * Used to map characters to HTML entities. + * Refer: https://github.com/lodash/lodash/blob/main/src/escape.ts + */ + const htmlEscapes = { + "&": "&", + "<": "<", + ">": ">", + '"': """, + "'": "'", + }; + + /** + * Used to match HTML entities and HTML characters. + * Refer: https://github.com/lodash/lodash/blob/main/src/escape.ts + */ + const reUnescapedHtml = /[&<>"']/g; + const reHasUnescapedHtml = RegExp(reUnescapedHtml.source); + + /** + * Escape function from lodash + * Refer: https://github.com/lodash/lodash/blob/main/src/escape.ts + */ + function escape(string) { + return string && reHasUnescapedHtml.test(string) + ? string.replace(reUnescapedHtml, (chr) => htmlEscapes[chr]) + : string || ""; + } + + /** + * Make the result component given a minisearch result data object and the value + * of the search input as queryString. To view the result object structure, refer: + * https://lucaong.github.io/minisearch/modules/_minisearch_.html#searchresult + * + * @param {object} result + * @param {string} querystring + * @returns string + */ + function make_search_result(result, querystring) { + let search_divider = `
`; + let display_link = + result.location.slice(Math.max(0), Math.min(50, result.location.length)) + + (result.location.length > 30 ? "..." : ""); // To cut-off the link because it messes with the overflow of the whole div + + if (result.page !== "") { + display_link += ` (${result.page})`; + } + + let textindex = new RegExp(`${querystring}`, "i").exec(result.text); + let text = + textindex !== null + ? result.text.slice( + Math.max(textindex.index - 100, 0), + Math.min( + textindex.index + querystring.length + 100, + result.text.length + ) + ) + : ""; // cut-off text before and after from the match + + text = text.length ? escape(text) : ""; + + let display_result = text.length + ? "..." + + text.replace( + new RegExp(`${escape(querystring)}`, "i"), // For first occurrence + '$&' + ) + + "..." + : ""; // highlights the match + + let in_code = false; + if (!["page", "section"].includes(result.category.toLowerCase())) { + in_code = true; + } + + // We encode the full url to escape some special characters which can lead to broken links + let result_div = ` + +
+
${escape(result.title)}
+
${result.category}
+
+

+ ${display_result} +

+
+ ${display_link} +
+
+ ${search_divider} + `; + + return result_div; + } + + self.onmessage = function (e) { + let query = e.data; + let results = index.search(query, { + filter: (result) => { + // Only return relevant results + return result.score >= 1; + }, + }); + + // Pre-filter to deduplicate and limit to 200 per category to the extent + // possible without knowing what the filters are. + let filtered_results = []; + let counts = {}; + for (let filter of filters) { + counts[filter] = 0; + } + let present = {}; + + for (let result of results) { + cat = result.category; + cnt = counts[cat]; + if (cnt < 200) { + id = cat + "---" + result.location; + if (present[id]) { + continue; + } + present[id] = true; + filtered_results.push({ + location: result.location, + category: cat, + div: make_search_result(result, query), + }); + } + } + + postMessage(filtered_results); + }; +} + +// `worker = Threads.@spawn worker_function(documenterSearchIndex)`, but in JavaScript! +const filters = [ + ...new Set(documenterSearchIndex["docs"].map((x) => x.category)), +]; +const worker_str = + "(" + + worker_function.toString() + + ")(" + + JSON.stringify(documenterSearchIndex["docs"]) + + "," + + JSON.stringify(documenterBaseURL) + + "," + + JSON.stringify(filters) + + ")"; +const worker_blob = new Blob([worker_str], { type: "text/javascript" }); +const worker = new Worker(URL.createObjectURL(worker_blob)); + +/////// SEARCH MAIN /////// + +// Whether the worker is currently handling a search. This is a boolean +// as the worker only ever handles 1 or 0 searches at a time. +var worker_is_running = false; + +// The last search text that was sent to the worker. This is used to determine +// if the worker should be launched again when it reports back results. +var last_search_text = ""; + +// The results of the last search. This, in combination with the state of the filters +// in the DOM, is used compute the results to display on calls to update_search. +var unfiltered_results = []; + +// Which filter is currently selected +var selected_filter = ""; + +$(document).on("input", ".documenter-search-input", function (event) { + if (!worker_is_running) { + launch_search(); + } +}); + +function launch_search() { + worker_is_running = true; + last_search_text = $(".documenter-search-input").val(); + worker.postMessage(last_search_text); +} + +worker.onmessage = function (e) { + if (last_search_text !== $(".documenter-search-input").val()) { + launch_search(); + } else { + worker_is_running = false; + } + + unfiltered_results = e.data; + update_search(); +}; + +$(document).on("click", ".search-filter", function () { + if ($(this).hasClass("search-filter-selected")) { + selected_filter = ""; + } else { + selected_filter = $(this).text().toLowerCase(); + } + + // This updates search results and toggles classes for UI: + update_search(); +}); + +/** + * Make/Update the search component + */ +function update_search() { + let querystring = $(".documenter-search-input").val(); + + if (querystring.trim()) { + if (selected_filter == "") { + results = unfiltered_results; + } else { + results = unfiltered_results.filter((result) => { + return selected_filter == result.category.toLowerCase(); + }); + } + + let search_result_container = ``; + let modal_filters = make_modal_body_filters(); + let search_divider = `
`; + + if (results.length) { + let links = []; + let count = 0; + let search_results = ""; + + for (var i = 0, n = results.length; i < n && count < 200; ++i) { + let result = results[i]; + if (result.location && !links.includes(result.location)) { + search_results += result.div; + count++; + links.push(result.location); + } + } + + if (count == 1) { + count_str = "1 result"; + } else if (count == 200) { + count_str = "200+ results"; + } else { + count_str = count + " results"; + } + let result_count = `
${count_str}
`; + + search_result_container = ` +
+ ${modal_filters} + ${search_divider} + ${result_count} +
+ ${search_results} +
+
+ `; + } else { + search_result_container = ` +
+ ${modal_filters} + ${search_divider} +
0 result(s)
+
+
No result found!
+ `; + } + + if ($(".search-modal-card-body").hasClass("is-justify-content-center")) { + $(".search-modal-card-body").removeClass("is-justify-content-center"); + } + + $(".search-modal-card-body").html(search_result_container); + } else { + if (!$(".search-modal-card-body").hasClass("is-justify-content-center")) { + $(".search-modal-card-body").addClass("is-justify-content-center"); + } + + $(".search-modal-card-body").html(` +
Type something to get started!
+ `); + } +} + +/** + * Make the modal filter html + * + * @returns string + */ +function make_modal_body_filters() { + let str = filters + .map((val) => { + if (selected_filter == val.toLowerCase()) { + return `${val}`; + } else { + return `${val}`; + } + }) + .join(""); + + return ` +
+ Filters: + ${str} +
`; +} + +}) +//////////////////////////////////////////////////////////////////////////////// +require(['jquery'], function($) { + +// Modal settings dialog +$(document).ready(function () { + var settings = $("#documenter-settings"); + $("#documenter-settings-button").click(function () { + settings.toggleClass("is-active"); + }); + // Close the dialog if X is clicked + $("#documenter-settings button.delete").click(function () { + settings.removeClass("is-active"); + }); + // Close dialog if ESC is pressed + $(document).keyup(function (e) { + if (e.keyCode == 27) settings.removeClass("is-active"); + }); +}); + +}) +//////////////////////////////////////////////////////////////////////////////// +require(['jquery'], function($) { + +$(document).ready(function () { + let search_modal_header = ` + + `; + + let initial_search_body = ` +
Type something to get started!
+ `; + + let search_modal_footer = ` + + `; + + $(document.body).append( + ` + + ` + ); + + document.querySelector(".docs-search-query").addEventListener("click", () => { + openModal(); + }); + + document + .querySelector(".close-search-modal") + .addEventListener("click", () => { + closeModal(); + }); + + $(document).on("click", ".search-result-link", function () { + closeModal(); + }); + + document.addEventListener("keydown", (event) => { + if ((event.ctrlKey || event.metaKey) && event.key === "/") { + openModal(); + } else if (event.key === "Escape") { + closeModal(); + } + + return false; + }); + + // Functions to open and close a modal + function openModal() { + let searchModal = document.querySelector("#search-modal"); + + searchModal.classList.add("is-active"); + document.querySelector(".documenter-search-input").focus(); + } + + function closeModal() { + let searchModal = document.querySelector("#search-modal"); + let initial_search_body = ` +
Type something to get started!
+ `; + + searchModal.classList.remove("is-active"); + document.querySelector(".documenter-search-input").blur(); + + if (!$(".search-modal-card-body").hasClass("is-justify-content-center")) { + $(".search-modal-card-body").addClass("is-justify-content-center"); + } + + $(".documenter-search-input").val(""); + $(".search-modal-card-body").html(initial_search_body); + } + + document + .querySelector("#search-modal .modal-background") + .addEventListener("click", () => { + closeModal(); + }); +}); + +}) +//////////////////////////////////////////////////////////////////////////////// +require(['jquery'], function($) { + +// Manages the showing and hiding of the sidebar. +$(document).ready(function () { + var sidebar = $("#documenter > .docs-sidebar"); + var sidebar_button = $("#documenter-sidebar-button"); + sidebar_button.click(function (ev) { + ev.preventDefault(); + sidebar.toggleClass("visible"); + if (sidebar.hasClass("visible")) { + // Makes sure that the current menu item is visible in the sidebar. + $("#documenter .docs-menu a.is-active").focus(); + } + }); + $("#documenter > .docs-main").bind("click", function (ev) { + if ($(ev.target).is(sidebar_button)) { + return; + } + if (sidebar.hasClass("visible")) { + sidebar.removeClass("visible"); + } + }); +}); + +// Resizes the package name / sitename in the sidebar if it is too wide. +// Inspired by: https://github.com/davatron5000/FitText.js +$(document).ready(function () { + e = $("#documenter .docs-autofit"); + function resize() { + var L = parseInt(e.css("max-width"), 10); + var L0 = e.width(); + if (L0 > L) { + var h0 = parseInt(e.css("font-size"), 10); + e.css("font-size", (L * h0) / L0); + // TODO: make sure it survives resizes? + } + } + // call once and then register events + resize(); + $(window).resize(resize); + $(window).on("orientationchange", resize); +}); + +// Scroll the navigation bar to the currently selected menu item +$(document).ready(function () { + var sidebar = $("#documenter .docs-menu").get(0); + var active = $("#documenter .docs-menu .is-active").get(0); + if (typeof active !== "undefined") { + sidebar.scrollTop = active.offsetTop - sidebar.offsetTop - 15; + } +}); + +}) +//////////////////////////////////////////////////////////////////////////////// +require(['jquery'], function($) { + +// Theme picker setup +$(document).ready(function () { + // onchange callback + $("#documenter-themepicker").change(function themepick_callback(ev) { + var themename = $("#documenter-themepicker option:selected").attr("value"); + if (themename === "auto") { + // set_theme(window.matchMedia('(prefers-color-scheme: dark)').matches ? 'dark' : 'light'); + window.localStorage.removeItem("documenter-theme"); + } else { + // set_theme(themename); + window.localStorage.setItem("documenter-theme", themename); + } + // We re-use the global function from themeswap.js to actually do the swapping. + set_theme_from_local_storage(); + }); + + // Make sure that the themepicker displays the correct theme when the theme is retrieved + // from localStorage + if (typeof window.localStorage !== "undefined") { + var theme = window.localStorage.getItem("documenter-theme"); + if (theme !== null) { + $("#documenter-themepicker option").each(function (i, e) { + e.selected = e.value === theme; + }); + } + } +}); + +}) +//////////////////////////////////////////////////////////////////////////////// +require(['jquery'], function($) { + +// update the version selector with info from the siteinfo.js and ../versions.js files +$(document).ready(function () { + // If the version selector is disabled with DOCUMENTER_VERSION_SELECTOR_DISABLED in the + // siteinfo.js file, we just return immediately and not display the version selector. + if ( + typeof DOCUMENTER_VERSION_SELECTOR_DISABLED === "boolean" && + DOCUMENTER_VERSION_SELECTOR_DISABLED + ) { + return; + } + + var version_selector = $("#documenter .docs-version-selector"); + var version_selector_select = $("#documenter .docs-version-selector select"); + + version_selector_select.change(function (x) { + target_href = version_selector_select + .children("option:selected") + .get(0).value; + window.location.href = target_href; + }); + + // add the current version to the selector based on siteinfo.js, but only if the selector is empty + if ( + typeof DOCUMENTER_CURRENT_VERSION !== "undefined" && + $("#version-selector > option").length == 0 + ) { + var option = $( + "" + ); + version_selector_select.append(option); + } + + if (typeof DOC_VERSIONS !== "undefined") { + var existing_versions = version_selector_select.children("option"); + var existing_versions_texts = existing_versions.map(function (i, x) { + return x.text; + }); + DOC_VERSIONS.forEach(function (each) { + var version_url = documenterBaseURL + "/../" + each + "/"; + var existing_id = $.inArray(each, existing_versions_texts); + // if not already in the version selector, add it as a new option, + // otherwise update the old option with the URL and enable it + if (existing_id == -1) { + var option = $( + "" + ); + version_selector_select.append(option); + } else { + var option = existing_versions[existing_id]; + option.value = version_url; + option.disabled = false; + } + }); + } + + // only show the version selector if the selector has been populated + if (version_selector_select.children("option").length > 0) { + version_selector.toggleClass("visible"); + } +}); + +}) diff --git a/v0.20.3/assets/logo.svg b/v0.20.3/assets/logo.svg new file mode 100644 index 000000000..e06ed9600 --- /dev/null +++ b/v0.20.3/assets/logo.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/v0.20.3/assets/themes/documenter-dark.css b/v0.20.3/assets/themes/documenter-dark.css new file mode 100644 index 000000000..53889fb99 --- /dev/null +++ b/v0.20.3/assets/themes/documenter-dark.css @@ -0,0 +1,7 @@ +html.theme--documenter-dark .pagination-previous,html.theme--documenter-dark .pagination-next,html.theme--documenter-dark .pagination-link,html.theme--documenter-dark .pagination-ellipsis,html.theme--documenter-dark .file-cta,html.theme--documenter-dark .file-name,html.theme--documenter-dark .select select,html.theme--documenter-dark .textarea,html.theme--documenter-dark .input,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input,html.theme--documenter-dark .button{-moz-appearance:none;-webkit-appearance:none;align-items:center;border:1px solid transparent;border-radius:.4em;box-shadow:none;display:inline-flex;font-size:1rem;height:2.5em;justify-content:flex-start;line-height:1.5;padding-bottom:calc(0.5em - 1px);padding-left:calc(0.75em - 1px);padding-right:calc(0.75em - 1px);padding-top:calc(0.5em - 1px);position:relative;vertical-align:top}html.theme--documenter-dark .pagination-previous:focus,html.theme--documenter-dark .pagination-next:focus,html.theme--documenter-dark .pagination-link:focus,html.theme--documenter-dark .pagination-ellipsis:focus,html.theme--documenter-dark .file-cta:focus,html.theme--documenter-dark .file-name:focus,html.theme--documenter-dark .select select:focus,html.theme--documenter-dark .textarea:focus,html.theme--documenter-dark .input:focus,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input:focus,html.theme--documenter-dark .button:focus,html.theme--documenter-dark .is-focused.pagination-previous,html.theme--documenter-dark .is-focused.pagination-next,html.theme--documenter-dark .is-focused.pagination-link,html.theme--documenter-dark .is-focused.pagination-ellipsis,html.theme--documenter-dark .is-focused.file-cta,html.theme--documenter-dark .is-focused.file-name,html.theme--documenter-dark .select select.is-focused,html.theme--documenter-dark .is-focused.textarea,html.theme--documenter-dark .is-focused.input,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.is-focused,html.theme--documenter-dark .is-focused.button,html.theme--documenter-dark .pagination-previous:active,html.theme--documenter-dark .pagination-next:active,html.theme--documenter-dark .pagination-link:active,html.theme--documenter-dark .pagination-ellipsis:active,html.theme--documenter-dark .file-cta:active,html.theme--documenter-dark .file-name:active,html.theme--documenter-dark .select select:active,html.theme--documenter-dark .textarea:active,html.theme--documenter-dark .input:active,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input:active,html.theme--documenter-dark .button:active,html.theme--documenter-dark .is-active.pagination-previous,html.theme--documenter-dark .is-active.pagination-next,html.theme--documenter-dark .is-active.pagination-link,html.theme--documenter-dark .is-active.pagination-ellipsis,html.theme--documenter-dark .is-active.file-cta,html.theme--documenter-dark .is-active.file-name,html.theme--documenter-dark .select select.is-active,html.theme--documenter-dark .is-active.textarea,html.theme--documenter-dark .is-active.input,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.is-active,html.theme--documenter-dark .is-active.button{outline:none}html.theme--documenter-dark .pagination-previous[disabled],html.theme--documenter-dark .pagination-next[disabled],html.theme--documenter-dark .pagination-link[disabled],html.theme--documenter-dark .pagination-ellipsis[disabled],html.theme--documenter-dark .file-cta[disabled],html.theme--documenter-dark .file-name[disabled],html.theme--documenter-dark .select select[disabled],html.theme--documenter-dark .textarea[disabled],html.theme--documenter-dark .input[disabled],html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input[disabled],html.theme--documenter-dark .button[disabled],fieldset[disabled] html.theme--documenter-dark .pagination-previous,html.theme--documenter-dark fieldset[disabled] .pagination-previous,fieldset[disabled] html.theme--documenter-dark .pagination-next,html.theme--documenter-dark fieldset[disabled] .pagination-next,fieldset[disabled] html.theme--documenter-dark .pagination-link,html.theme--documenter-dark fieldset[disabled] .pagination-link,fieldset[disabled] html.theme--documenter-dark .pagination-ellipsis,html.theme--documenter-dark fieldset[disabled] .pagination-ellipsis,fieldset[disabled] html.theme--documenter-dark .file-cta,html.theme--documenter-dark fieldset[disabled] .file-cta,fieldset[disabled] html.theme--documenter-dark .file-name,html.theme--documenter-dark fieldset[disabled] .file-name,fieldset[disabled] html.theme--documenter-dark .select select,fieldset[disabled] html.theme--documenter-dark .textarea,fieldset[disabled] html.theme--documenter-dark .input,fieldset[disabled] html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input,html.theme--documenter-dark fieldset[disabled] .select select,html.theme--documenter-dark .select fieldset[disabled] select,html.theme--documenter-dark fieldset[disabled] .textarea,html.theme--documenter-dark fieldset[disabled] .input,html.theme--documenter-dark fieldset[disabled] #documenter .docs-sidebar form.docs-search>input,html.theme--documenter-dark #documenter .docs-sidebar fieldset[disabled] form.docs-search>input,fieldset[disabled] html.theme--documenter-dark .button,html.theme--documenter-dark fieldset[disabled] .button{cursor:not-allowed}html.theme--documenter-dark .tabs,html.theme--documenter-dark .pagination-previous,html.theme--documenter-dark .pagination-next,html.theme--documenter-dark .pagination-link,html.theme--documenter-dark .pagination-ellipsis,html.theme--documenter-dark .breadcrumb,html.theme--documenter-dark .file,html.theme--documenter-dark .button,.is-unselectable{-webkit-touch-callout:none;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none}html.theme--documenter-dark .navbar-link:not(.is-arrowless)::after,html.theme--documenter-dark .select:not(.is-multiple):not(.is-loading)::after{border:3px solid rgba(0,0,0,0);border-radius:2px;border-right:0;border-top:0;content:" ";display:block;height:0.625em;margin-top:-0.4375em;pointer-events:none;position:absolute;top:50%;transform:rotate(-45deg);transform-origin:center;width:0.625em}html.theme--documenter-dark .admonition:not(:last-child),html.theme--documenter-dark .tabs:not(:last-child),html.theme--documenter-dark .pagination:not(:last-child),html.theme--documenter-dark .message:not(:last-child),html.theme--documenter-dark .level:not(:last-child),html.theme--documenter-dark .breadcrumb:not(:last-child),html.theme--documenter-dark .block:not(:last-child),html.theme--documenter-dark .title:not(:last-child),html.theme--documenter-dark .subtitle:not(:last-child),html.theme--documenter-dark .table-container:not(:last-child),html.theme--documenter-dark .table:not(:last-child),html.theme--documenter-dark .progress:not(:last-child),html.theme--documenter-dark .notification:not(:last-child),html.theme--documenter-dark .content:not(:last-child),html.theme--documenter-dark .box:not(:last-child){margin-bottom:1.5rem}html.theme--documenter-dark .modal-close,html.theme--documenter-dark .delete{-webkit-touch-callout:none;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none;-moz-appearance:none;-webkit-appearance:none;background-color:rgba(10,10,10,0.2);border:none;border-radius:9999px;cursor:pointer;pointer-events:auto;display:inline-block;flex-grow:0;flex-shrink:0;font-size:0;height:20px;max-height:20px;max-width:20px;min-height:20px;min-width:20px;outline:none;position:relative;vertical-align:top;width:20px}html.theme--documenter-dark .modal-close::before,html.theme--documenter-dark .delete::before,html.theme--documenter-dark .modal-close::after,html.theme--documenter-dark .delete::after{background-color:#fff;content:"";display:block;left:50%;position:absolute;top:50%;transform:translateX(-50%) translateY(-50%) rotate(45deg);transform-origin:center center}html.theme--documenter-dark .modal-close::before,html.theme--documenter-dark .delete::before{height:2px;width:50%}html.theme--documenter-dark .modal-close::after,html.theme--documenter-dark .delete::after{height:50%;width:2px}html.theme--documenter-dark .modal-close:hover,html.theme--documenter-dark .delete:hover,html.theme--documenter-dark .modal-close:focus,html.theme--documenter-dark .delete:focus{background-color:rgba(10,10,10,0.3)}html.theme--documenter-dark .modal-close:active,html.theme--documenter-dark .delete:active{background-color:rgba(10,10,10,0.4)}html.theme--documenter-dark .is-small.modal-close,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.modal-close,html.theme--documenter-dark .is-small.delete,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.delete{height:16px;max-height:16px;max-width:16px;min-height:16px;min-width:16px;width:16px}html.theme--documenter-dark .is-medium.modal-close,html.theme--documenter-dark .is-medium.delete{height:24px;max-height:24px;max-width:24px;min-height:24px;min-width:24px;width:24px}html.theme--documenter-dark .is-large.modal-close,html.theme--documenter-dark .is-large.delete{height:32px;max-height:32px;max-width:32px;min-height:32px;min-width:32px;width:32px}html.theme--documenter-dark .control.is-loading::after,html.theme--documenter-dark .select.is-loading::after,html.theme--documenter-dark .loader,html.theme--documenter-dark .button.is-loading::after{animation:spinAround 500ms infinite linear;border:2px solid #dbdee0;border-radius:9999px;border-right-color:transparent;border-top-color:transparent;content:"";display:block;height:1em;position:relative;width:1em}html.theme--documenter-dark .hero-video,html.theme--documenter-dark .modal-background,html.theme--documenter-dark .modal,html.theme--documenter-dark .image.is-square img,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-square img,html.theme--documenter-dark .image.is-square .has-ratio,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-square .has-ratio,html.theme--documenter-dark .image.is-1by1 img,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-1by1 img,html.theme--documenter-dark .image.is-1by1 .has-ratio,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-1by1 .has-ratio,html.theme--documenter-dark .image.is-5by4 img,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-5by4 img,html.theme--documenter-dark .image.is-5by4 .has-ratio,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-5by4 .has-ratio,html.theme--documenter-dark .image.is-4by3 img,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-4by3 img,html.theme--documenter-dark .image.is-4by3 .has-ratio,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-4by3 .has-ratio,html.theme--documenter-dark .image.is-3by2 img,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-3by2 img,html.theme--documenter-dark .image.is-3by2 .has-ratio,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-3by2 .has-ratio,html.theme--documenter-dark .image.is-5by3 img,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-5by3 img,html.theme--documenter-dark .image.is-5by3 .has-ratio,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-5by3 .has-ratio,html.theme--documenter-dark .image.is-16by9 img,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-16by9 img,html.theme--documenter-dark .image.is-16by9 .has-ratio,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-16by9 .has-ratio,html.theme--documenter-dark .image.is-2by1 img,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-2by1 img,html.theme--documenter-dark .image.is-2by1 .has-ratio,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-2by1 .has-ratio,html.theme--documenter-dark .image.is-3by1 img,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-3by1 img,html.theme--documenter-dark .image.is-3by1 .has-ratio,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-3by1 .has-ratio,html.theme--documenter-dark .image.is-4by5 img,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-4by5 img,html.theme--documenter-dark .image.is-4by5 .has-ratio,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-4by5 .has-ratio,html.theme--documenter-dark .image.is-3by4 img,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-3by4 img,html.theme--documenter-dark .image.is-3by4 .has-ratio,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-3by4 .has-ratio,html.theme--documenter-dark .image.is-2by3 img,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-2by3 img,html.theme--documenter-dark .image.is-2by3 .has-ratio,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-2by3 .has-ratio,html.theme--documenter-dark .image.is-3by5 img,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-3by5 img,html.theme--documenter-dark .image.is-3by5 .has-ratio,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-3by5 .has-ratio,html.theme--documenter-dark .image.is-9by16 img,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-9by16 img,html.theme--documenter-dark .image.is-9by16 .has-ratio,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-9by16 .has-ratio,html.theme--documenter-dark .image.is-1by2 img,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-1by2 img,html.theme--documenter-dark .image.is-1by2 .has-ratio,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-1by2 .has-ratio,html.theme--documenter-dark .image.is-1by3 img,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-1by3 img,html.theme--documenter-dark .image.is-1by3 .has-ratio,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-1by3 .has-ratio,.is-overlay{bottom:0;left:0;position:absolute;right:0;top:0}html.theme--documenter-dark .navbar-burger{-moz-appearance:none;-webkit-appearance:none;appearance:none;background:none;border:none;color:currentColor;font-family:inherit;font-size:1em;margin:0;padding:0}/*! minireset.css v0.0.6 | MIT License | github.com/jgthms/minireset.css */html,body,p,ol,ul,li,dl,dt,dd,blockquote,figure,fieldset,legend,textarea,pre,iframe,hr,h1,h2,h3,h4,h5,h6{margin:0;padding:0}h1,h2,h3,h4,h5,h6{font-size:100%;font-weight:normal}ul{list-style:none}button,input,select,textarea{margin:0}html{box-sizing:border-box}*,*::before,*::after{box-sizing:inherit}img,video{height:auto;max-width:100%}iframe{border:0}table{border-collapse:collapse;border-spacing:0}td,th{padding:0}td:not([align]),th:not([align]){text-align:inherit}.has-text-white{color:#fff !important}a.has-text-white:hover,a.has-text-white:focus{color:#e6e6e6 !important}.has-background-white{background-color:#fff !important}.has-text-black{color:#0a0a0a !important}a.has-text-black:hover,a.has-text-black:focus{color:#000 !important}.has-background-black{background-color:#0a0a0a !important}.has-text-light{color:#ecf0f1 !important}a.has-text-light:hover,a.has-text-light:focus{color:#cfd9db !important}.has-background-light{background-color:#ecf0f1 !important}.has-text-dark{color:#282f2f !important}a.has-text-dark:hover,a.has-text-dark:focus{color:#111414 !important}.has-background-dark{background-color:#282f2f !important}.has-text-primary{color:#375a7f !important}a.has-text-primary:hover,a.has-text-primary:focus{color:#28415b !important}.has-background-primary{background-color:#375a7f !important}.has-text-primary-light{color:#f1f5f9 !important}a.has-text-primary-light:hover,a.has-text-primary-light:focus{color:#cddbe9 !important}.has-background-primary-light{background-color:#f1f5f9 !important}.has-text-primary-dark{color:#4d7eb2 !important}a.has-text-primary-dark:hover,a.has-text-primary-dark:focus{color:#7198c1 !important}.has-background-primary-dark{background-color:#4d7eb2 !important}.has-text-link{color:#1abc9c !important}a.has-text-link:hover,a.has-text-link:focus{color:#148f77 !important}.has-background-link{background-color:#1abc9c !important}.has-text-link-light{color:#edfdf9 !important}a.has-text-link-light:hover,a.has-text-link-light:focus{color:#c0f6ec !important}.has-background-link-light{background-color:#edfdf9 !important}.has-text-link-dark{color:#15987e !important}a.has-text-link-dark:hover,a.has-text-link-dark:focus{color:#1bc5a4 !important}.has-background-link-dark{background-color:#15987e !important}.has-text-info{color:#024c7d !important}a.has-text-info:hover,a.has-text-info:focus{color:#012d4b !important}.has-background-info{background-color:#024c7d !important}.has-text-info-light{color:#ebf7ff !important}a.has-text-info-light:hover,a.has-text-info-light:focus{color:#b9e2fe !important}.has-background-info-light{background-color:#ebf7ff !important}.has-text-info-dark{color:#0e9dfb !important}a.has-text-info-dark:hover,a.has-text-info-dark:focus{color:#40b1fc !important}.has-background-info-dark{background-color:#0e9dfb !important}.has-text-success{color:#008438 !important}a.has-text-success:hover,a.has-text-success:focus{color:#005122 !important}.has-background-success{background-color:#008438 !important}.has-text-success-light{color:#ebfff3 !important}a.has-text-success-light:hover,a.has-text-success-light:focus{color:#b8ffd6 !important}.has-background-success-light{background-color:#ebfff3 !important}.has-text-success-dark{color:#00eb64 !important}a.has-text-success-dark:hover,a.has-text-success-dark:focus{color:#1fff7e !important}.has-background-success-dark{background-color:#00eb64 !important}.has-text-warning{color:#ad8100 !important}a.has-text-warning:hover,a.has-text-warning:focus{color:#7a5b00 !important}.has-background-warning{background-color:#ad8100 !important}.has-text-warning-light{color:#fffaeb !important}a.has-text-warning-light:hover,a.has-text-warning-light:focus{color:#ffedb8 !important}.has-background-warning-light{background-color:#fffaeb !important}.has-text-warning-dark{color:#d19c00 !important}a.has-text-warning-dark:hover,a.has-text-warning-dark:focus{color:#ffbf05 !important}.has-background-warning-dark{background-color:#d19c00 !important}.has-text-danger{color:#9e1b0d !important}a.has-text-danger:hover,a.has-text-danger:focus{color:#6f1309 !important}.has-background-danger{background-color:#9e1b0d !important}.has-text-danger-light{color:#fdeeec !important}a.has-text-danger-light:hover,a.has-text-danger-light:focus{color:#fac3bd !important}.has-background-danger-light{background-color:#fdeeec !important}.has-text-danger-dark{color:#ec311d !important}a.has-text-danger-dark:hover,a.has-text-danger-dark:focus{color:#f05c4c !important}.has-background-danger-dark{background-color:#ec311d !important}.has-text-black-bis{color:#121212 !important}.has-background-black-bis{background-color:#121212 !important}.has-text-black-ter{color:#242424 !important}.has-background-black-ter{background-color:#242424 !important}.has-text-grey-darker{color:#282f2f !important}.has-background-grey-darker{background-color:#282f2f !important}.has-text-grey-dark{color:#343c3d !important}.has-background-grey-dark{background-color:#343c3d !important}.has-text-grey{color:#5e6d6f !important}.has-background-grey{background-color:#5e6d6f !important}.has-text-grey-light{color:#8c9b9d !important}.has-background-grey-light{background-color:#8c9b9d !important}.has-text-grey-lighter{color:#dbdee0 !important}.has-background-grey-lighter{background-color:#dbdee0 !important}.has-text-white-ter{color:#ecf0f1 !important}.has-background-white-ter{background-color:#ecf0f1 !important}.has-text-white-bis{color:#fafafa !important}.has-background-white-bis{background-color:#fafafa !important}.is-flex-direction-row{flex-direction:row !important}.is-flex-direction-row-reverse{flex-direction:row-reverse !important}.is-flex-direction-column{flex-direction:column !important}.is-flex-direction-column-reverse{flex-direction:column-reverse !important}.is-flex-wrap-nowrap{flex-wrap:nowrap !important}.is-flex-wrap-wrap{flex-wrap:wrap !important}.is-flex-wrap-wrap-reverse{flex-wrap:wrap-reverse !important}.is-justify-content-flex-start{justify-content:flex-start !important}.is-justify-content-flex-end{justify-content:flex-end !important}.is-justify-content-center{justify-content:center !important}.is-justify-content-space-between{justify-content:space-between !important}.is-justify-content-space-around{justify-content:space-around !important}.is-justify-content-space-evenly{justify-content:space-evenly !important}.is-justify-content-start{justify-content:start !important}.is-justify-content-end{justify-content:end !important}.is-justify-content-left{justify-content:left !important}.is-justify-content-right{justify-content:right !important}.is-align-content-flex-start{align-content:flex-start !important}.is-align-content-flex-end{align-content:flex-end !important}.is-align-content-center{align-content:center !important}.is-align-content-space-between{align-content:space-between !important}.is-align-content-space-around{align-content:space-around !important}.is-align-content-space-evenly{align-content:space-evenly !important}.is-align-content-stretch{align-content:stretch !important}.is-align-content-start{align-content:start !important}.is-align-content-end{align-content:end !important}.is-align-content-baseline{align-content:baseline !important}.is-align-items-stretch{align-items:stretch !important}.is-align-items-flex-start{align-items:flex-start !important}.is-align-items-flex-end{align-items:flex-end !important}.is-align-items-center{align-items:center !important}.is-align-items-baseline{align-items:baseline !important}.is-align-items-start{align-items:start !important}.is-align-items-end{align-items:end !important}.is-align-items-self-start{align-items:self-start !important}.is-align-items-self-end{align-items:self-end !important}.is-align-self-auto{align-self:auto !important}.is-align-self-flex-start{align-self:flex-start !important}.is-align-self-flex-end{align-self:flex-end !important}.is-align-self-center{align-self:center !important}.is-align-self-baseline{align-self:baseline !important}.is-align-self-stretch{align-self:stretch !important}.is-flex-grow-0{flex-grow:0 !important}.is-flex-grow-1{flex-grow:1 !important}.is-flex-grow-2{flex-grow:2 !important}.is-flex-grow-3{flex-grow:3 !important}.is-flex-grow-4{flex-grow:4 !important}.is-flex-grow-5{flex-grow:5 !important}.is-flex-shrink-0{flex-shrink:0 !important}.is-flex-shrink-1{flex-shrink:1 !important}.is-flex-shrink-2{flex-shrink:2 !important}.is-flex-shrink-3{flex-shrink:3 !important}.is-flex-shrink-4{flex-shrink:4 !important}.is-flex-shrink-5{flex-shrink:5 !important}.is-clearfix::after{clear:both;content:" ";display:table}.is-pulled-left{float:left !important}.is-pulled-right{float:right !important}.is-radiusless{border-radius:0 !important}.is-shadowless{box-shadow:none !important}.is-clickable{cursor:pointer !important;pointer-events:all !important}.is-clipped{overflow:hidden !important}.is-relative{position:relative !important}.is-marginless{margin:0 !important}.is-paddingless{padding:0 !important}.m-0{margin:0 !important}.mt-0{margin-top:0 !important}.mr-0{margin-right:0 !important}.mb-0{margin-bottom:0 !important}.ml-0{margin-left:0 !important}.mx-0{margin-left:0 !important;margin-right:0 !important}.my-0{margin-top:0 !important;margin-bottom:0 !important}.m-1{margin:.25rem !important}.mt-1{margin-top:.25rem !important}.mr-1{margin-right:.25rem !important}.mb-1{margin-bottom:.25rem !important}.ml-1{margin-left:.25rem !important}.mx-1{margin-left:.25rem !important;margin-right:.25rem !important}.my-1{margin-top:.25rem !important;margin-bottom:.25rem !important}.m-2{margin:.5rem !important}.mt-2{margin-top:.5rem !important}.mr-2{margin-right:.5rem !important}.mb-2{margin-bottom:.5rem !important}.ml-2{margin-left:.5rem !important}.mx-2{margin-left:.5rem !important;margin-right:.5rem !important}.my-2{margin-top:.5rem !important;margin-bottom:.5rem !important}.m-3{margin:.75rem !important}.mt-3{margin-top:.75rem !important}.mr-3{margin-right:.75rem !important}.mb-3{margin-bottom:.75rem !important}.ml-3{margin-left:.75rem !important}.mx-3{margin-left:.75rem !important;margin-right:.75rem !important}.my-3{margin-top:.75rem !important;margin-bottom:.75rem !important}.m-4{margin:1rem !important}.mt-4{margin-top:1rem !important}.mr-4{margin-right:1rem !important}.mb-4{margin-bottom:1rem !important}.ml-4{margin-left:1rem !important}.mx-4{margin-left:1rem !important;margin-right:1rem !important}.my-4{margin-top:1rem !important;margin-bottom:1rem !important}.m-5{margin:1.5rem !important}.mt-5{margin-top:1.5rem !important}.mr-5{margin-right:1.5rem !important}.mb-5{margin-bottom:1.5rem !important}.ml-5{margin-left:1.5rem !important}.mx-5{margin-left:1.5rem !important;margin-right:1.5rem !important}.my-5{margin-top:1.5rem !important;margin-bottom:1.5rem !important}.m-6{margin:3rem !important}.mt-6{margin-top:3rem !important}.mr-6{margin-right:3rem !important}.mb-6{margin-bottom:3rem !important}.ml-6{margin-left:3rem !important}.mx-6{margin-left:3rem !important;margin-right:3rem !important}.my-6{margin-top:3rem !important;margin-bottom:3rem !important}.m-auto{margin:auto !important}.mt-auto{margin-top:auto !important}.mr-auto{margin-right:auto !important}.mb-auto{margin-bottom:auto !important}.ml-auto{margin-left:auto !important}.mx-auto{margin-left:auto !important;margin-right:auto !important}.my-auto{margin-top:auto !important;margin-bottom:auto !important}.p-0{padding:0 !important}.pt-0{padding-top:0 !important}.pr-0{padding-right:0 !important}.pb-0{padding-bottom:0 !important}.pl-0{padding-left:0 !important}.px-0{padding-left:0 !important;padding-right:0 !important}.py-0{padding-top:0 !important;padding-bottom:0 !important}.p-1{padding:.25rem !important}.pt-1{padding-top:.25rem !important}.pr-1{padding-right:.25rem !important}.pb-1{padding-bottom:.25rem !important}.pl-1{padding-left:.25rem !important}.px-1{padding-left:.25rem !important;padding-right:.25rem !important}.py-1{padding-top:.25rem !important;padding-bottom:.25rem !important}.p-2{padding:.5rem !important}.pt-2{padding-top:.5rem !important}.pr-2{padding-right:.5rem !important}.pb-2{padding-bottom:.5rem !important}.pl-2{padding-left:.5rem !important}.px-2{padding-left:.5rem !important;padding-right:.5rem !important}.py-2{padding-top:.5rem !important;padding-bottom:.5rem !important}.p-3{padding:.75rem !important}.pt-3{padding-top:.75rem !important}.pr-3{padding-right:.75rem !important}.pb-3{padding-bottom:.75rem !important}.pl-3{padding-left:.75rem !important}.px-3{padding-left:.75rem !important;padding-right:.75rem !important}.py-3{padding-top:.75rem !important;padding-bottom:.75rem !important}.p-4{padding:1rem !important}.pt-4{padding-top:1rem !important}.pr-4{padding-right:1rem !important}.pb-4{padding-bottom:1rem !important}.pl-4{padding-left:1rem !important}.px-4{padding-left:1rem !important;padding-right:1rem !important}.py-4{padding-top:1rem !important;padding-bottom:1rem !important}.p-5{padding:1.5rem !important}.pt-5{padding-top:1.5rem !important}.pr-5{padding-right:1.5rem !important}.pb-5{padding-bottom:1.5rem !important}.pl-5{padding-left:1.5rem !important}.px-5{padding-left:1.5rem !important;padding-right:1.5rem !important}.py-5{padding-top:1.5rem !important;padding-bottom:1.5rem !important}.p-6{padding:3rem !important}.pt-6{padding-top:3rem !important}.pr-6{padding-right:3rem !important}.pb-6{padding-bottom:3rem !important}.pl-6{padding-left:3rem !important}.px-6{padding-left:3rem !important;padding-right:3rem !important}.py-6{padding-top:3rem !important;padding-bottom:3rem !important}.p-auto{padding:auto !important}.pt-auto{padding-top:auto !important}.pr-auto{padding-right:auto !important}.pb-auto{padding-bottom:auto !important}.pl-auto{padding-left:auto !important}.px-auto{padding-left:auto !important;padding-right:auto !important}.py-auto{padding-top:auto !important;padding-bottom:auto !important}.is-size-1{font-size:3rem !important}.is-size-2{font-size:2.5rem !important}.is-size-3{font-size:2rem !important}.is-size-4{font-size:1.5rem !important}.is-size-5{font-size:1.25rem !important}.is-size-6{font-size:1rem !important}.is-size-7,html.theme--documenter-dark .docstring>section>a.docs-sourcelink{font-size:.75rem !important}@media screen and (max-width: 768px){.is-size-1-mobile{font-size:3rem !important}.is-size-2-mobile{font-size:2.5rem !important}.is-size-3-mobile{font-size:2rem !important}.is-size-4-mobile{font-size:1.5rem !important}.is-size-5-mobile{font-size:1.25rem !important}.is-size-6-mobile{font-size:1rem !important}.is-size-7-mobile{font-size:.75rem !important}}@media screen and (min-width: 769px),print{.is-size-1-tablet{font-size:3rem !important}.is-size-2-tablet{font-size:2.5rem !important}.is-size-3-tablet{font-size:2rem !important}.is-size-4-tablet{font-size:1.5rem !important}.is-size-5-tablet{font-size:1.25rem !important}.is-size-6-tablet{font-size:1rem !important}.is-size-7-tablet{font-size:.75rem !important}}@media screen and (max-width: 1055px){.is-size-1-touch{font-size:3rem !important}.is-size-2-touch{font-size:2.5rem !important}.is-size-3-touch{font-size:2rem !important}.is-size-4-touch{font-size:1.5rem !important}.is-size-5-touch{font-size:1.25rem !important}.is-size-6-touch{font-size:1rem !important}.is-size-7-touch{font-size:.75rem !important}}@media screen and (min-width: 1056px){.is-size-1-desktop{font-size:3rem !important}.is-size-2-desktop{font-size:2.5rem !important}.is-size-3-desktop{font-size:2rem !important}.is-size-4-desktop{font-size:1.5rem !important}.is-size-5-desktop{font-size:1.25rem !important}.is-size-6-desktop{font-size:1rem !important}.is-size-7-desktop{font-size:.75rem !important}}@media screen and (min-width: 1216px){.is-size-1-widescreen{font-size:3rem !important}.is-size-2-widescreen{font-size:2.5rem !important}.is-size-3-widescreen{font-size:2rem !important}.is-size-4-widescreen{font-size:1.5rem !important}.is-size-5-widescreen{font-size:1.25rem !important}.is-size-6-widescreen{font-size:1rem !important}.is-size-7-widescreen{font-size:.75rem !important}}@media screen and (min-width: 1408px){.is-size-1-fullhd{font-size:3rem !important}.is-size-2-fullhd{font-size:2.5rem !important}.is-size-3-fullhd{font-size:2rem !important}.is-size-4-fullhd{font-size:1.5rem !important}.is-size-5-fullhd{font-size:1.25rem !important}.is-size-6-fullhd{font-size:1rem !important}.is-size-7-fullhd{font-size:.75rem !important}}.has-text-centered{text-align:center !important}.has-text-justified{text-align:justify !important}.has-text-left{text-align:left !important}.has-text-right{text-align:right !important}@media screen and (max-width: 768px){.has-text-centered-mobile{text-align:center !important}}@media screen and (min-width: 769px),print{.has-text-centered-tablet{text-align:center !important}}@media screen and (min-width: 769px) and (max-width: 1055px){.has-text-centered-tablet-only{text-align:center !important}}@media screen and (max-width: 1055px){.has-text-centered-touch{text-align:center !important}}@media screen and (min-width: 1056px){.has-text-centered-desktop{text-align:center !important}}@media screen and (min-width: 1056px) and (max-width: 1215px){.has-text-centered-desktop-only{text-align:center !important}}@media screen and (min-width: 1216px){.has-text-centered-widescreen{text-align:center !important}}@media screen and (min-width: 1216px) and (max-width: 1407px){.has-text-centered-widescreen-only{text-align:center !important}}@media screen and (min-width: 1408px){.has-text-centered-fullhd{text-align:center !important}}@media screen and (max-width: 768px){.has-text-justified-mobile{text-align:justify !important}}@media screen and (min-width: 769px),print{.has-text-justified-tablet{text-align:justify !important}}@media screen and (min-width: 769px) and (max-width: 1055px){.has-text-justified-tablet-only{text-align:justify !important}}@media screen and (max-width: 1055px){.has-text-justified-touch{text-align:justify !important}}@media screen and (min-width: 1056px){.has-text-justified-desktop{text-align:justify !important}}@media screen and (min-width: 1056px) and (max-width: 1215px){.has-text-justified-desktop-only{text-align:justify !important}}@media screen and (min-width: 1216px){.has-text-justified-widescreen{text-align:justify !important}}@media screen and (min-width: 1216px) and (max-width: 1407px){.has-text-justified-widescreen-only{text-align:justify !important}}@media screen and (min-width: 1408px){.has-text-justified-fullhd{text-align:justify !important}}@media screen and (max-width: 768px){.has-text-left-mobile{text-align:left !important}}@media screen and (min-width: 769px),print{.has-text-left-tablet{text-align:left !important}}@media screen and (min-width: 769px) and (max-width: 1055px){.has-text-left-tablet-only{text-align:left !important}}@media screen and (max-width: 1055px){.has-text-left-touch{text-align:left !important}}@media screen and (min-width: 1056px){.has-text-left-desktop{text-align:left !important}}@media screen and (min-width: 1056px) and (max-width: 1215px){.has-text-left-desktop-only{text-align:left !important}}@media screen and (min-width: 1216px){.has-text-left-widescreen{text-align:left !important}}@media screen and (min-width: 1216px) and (max-width: 1407px){.has-text-left-widescreen-only{text-align:left !important}}@media screen and (min-width: 1408px){.has-text-left-fullhd{text-align:left !important}}@media screen and (max-width: 768px){.has-text-right-mobile{text-align:right !important}}@media screen and (min-width: 769px),print{.has-text-right-tablet{text-align:right !important}}@media screen and (min-width: 769px) and (max-width: 1055px){.has-text-right-tablet-only{text-align:right !important}}@media screen and (max-width: 1055px){.has-text-right-touch{text-align:right !important}}@media screen and (min-width: 1056px){.has-text-right-desktop{text-align:right !important}}@media screen and (min-width: 1056px) and (max-width: 1215px){.has-text-right-desktop-only{text-align:right !important}}@media screen and (min-width: 1216px){.has-text-right-widescreen{text-align:right !important}}@media screen and (min-width: 1216px) and (max-width: 1407px){.has-text-right-widescreen-only{text-align:right !important}}@media screen and (min-width: 1408px){.has-text-right-fullhd{text-align:right !important}}.is-capitalized{text-transform:capitalize !important}.is-lowercase{text-transform:lowercase !important}.is-uppercase{text-transform:uppercase !important}.is-italic{font-style:italic !important}.is-underlined{text-decoration:underline !important}.has-text-weight-light{font-weight:300 !important}.has-text-weight-normal{font-weight:400 !important}.has-text-weight-medium{font-weight:500 !important}.has-text-weight-semibold{font-weight:600 !important}.has-text-weight-bold{font-weight:700 !important}.is-family-primary{font-family:"Lato Medium",-apple-system,BlinkMacSystemFont,"Segoe UI","Helvetica Neue","Helvetica","Arial",sans-serif !important}.is-family-secondary{font-family:"Lato Medium",-apple-system,BlinkMacSystemFont,"Segoe UI","Helvetica Neue","Helvetica","Arial",sans-serif !important}.is-family-sans-serif{font-family:"Lato Medium",-apple-system,BlinkMacSystemFont,"Segoe UI","Helvetica Neue","Helvetica","Arial",sans-serif !important}.is-family-monospace{font-family:"JuliaMono","SFMono-Regular","Menlo","Consolas","Liberation Mono","DejaVu Sans Mono",monospace !important}.is-family-code{font-family:"JuliaMono","SFMono-Regular","Menlo","Consolas","Liberation Mono","DejaVu Sans Mono",monospace !important}.is-block{display:block !important}@media screen and (max-width: 768px){.is-block-mobile{display:block !important}}@media screen and (min-width: 769px),print{.is-block-tablet{display:block !important}}@media screen and (min-width: 769px) and (max-width: 1055px){.is-block-tablet-only{display:block !important}}@media screen and (max-width: 1055px){.is-block-touch{display:block !important}}@media screen and (min-width: 1056px){.is-block-desktop{display:block !important}}@media screen and (min-width: 1056px) and (max-width: 1215px){.is-block-desktop-only{display:block !important}}@media screen and (min-width: 1216px){.is-block-widescreen{display:block !important}}@media screen and (min-width: 1216px) and (max-width: 1407px){.is-block-widescreen-only{display:block !important}}@media screen and (min-width: 1408px){.is-block-fullhd{display:block !important}}.is-flex{display:flex !important}@media screen and (max-width: 768px){.is-flex-mobile{display:flex !important}}@media screen and (min-width: 769px),print{.is-flex-tablet{display:flex !important}}@media screen and (min-width: 769px) and (max-width: 1055px){.is-flex-tablet-only{display:flex !important}}@media screen and (max-width: 1055px){.is-flex-touch{display:flex !important}}@media screen and (min-width: 1056px){.is-flex-desktop{display:flex !important}}@media screen and (min-width: 1056px) and (max-width: 1215px){.is-flex-desktop-only{display:flex !important}}@media screen and (min-width: 1216px){.is-flex-widescreen{display:flex !important}}@media screen and (min-width: 1216px) and (max-width: 1407px){.is-flex-widescreen-only{display:flex !important}}@media screen and (min-width: 1408px){.is-flex-fullhd{display:flex !important}}.is-inline{display:inline !important}@media screen and (max-width: 768px){.is-inline-mobile{display:inline !important}}@media screen and (min-width: 769px),print{.is-inline-tablet{display:inline !important}}@media screen and (min-width: 769px) and (max-width: 1055px){.is-inline-tablet-only{display:inline !important}}@media screen and (max-width: 1055px){.is-inline-touch{display:inline !important}}@media screen and (min-width: 1056px){.is-inline-desktop{display:inline !important}}@media screen and (min-width: 1056px) and (max-width: 1215px){.is-inline-desktop-only{display:inline !important}}@media screen and (min-width: 1216px){.is-inline-widescreen{display:inline !important}}@media screen and (min-width: 1216px) and (max-width: 1407px){.is-inline-widescreen-only{display:inline !important}}@media screen and (min-width: 1408px){.is-inline-fullhd{display:inline !important}}.is-inline-block{display:inline-block !important}@media screen and (max-width: 768px){.is-inline-block-mobile{display:inline-block !important}}@media screen and (min-width: 769px),print{.is-inline-block-tablet{display:inline-block !important}}@media screen and (min-width: 769px) and (max-width: 1055px){.is-inline-block-tablet-only{display:inline-block !important}}@media screen and (max-width: 1055px){.is-inline-block-touch{display:inline-block !important}}@media screen and (min-width: 1056px){.is-inline-block-desktop{display:inline-block !important}}@media screen and (min-width: 1056px) and (max-width: 1215px){.is-inline-block-desktop-only{display:inline-block !important}}@media screen and (min-width: 1216px){.is-inline-block-widescreen{display:inline-block !important}}@media screen and (min-width: 1216px) and (max-width: 1407px){.is-inline-block-widescreen-only{display:inline-block !important}}@media screen and (min-width: 1408px){.is-inline-block-fullhd{display:inline-block !important}}.is-inline-flex{display:inline-flex !important}@media screen and (max-width: 768px){.is-inline-flex-mobile{display:inline-flex !important}}@media screen and (min-width: 769px),print{.is-inline-flex-tablet{display:inline-flex !important}}@media screen and (min-width: 769px) and (max-width: 1055px){.is-inline-flex-tablet-only{display:inline-flex !important}}@media screen and (max-width: 1055px){.is-inline-flex-touch{display:inline-flex !important}}@media screen and (min-width: 1056px){.is-inline-flex-desktop{display:inline-flex !important}}@media screen and (min-width: 1056px) and (max-width: 1215px){.is-inline-flex-desktop-only{display:inline-flex !important}}@media screen and (min-width: 1216px){.is-inline-flex-widescreen{display:inline-flex !important}}@media screen and (min-width: 1216px) and (max-width: 1407px){.is-inline-flex-widescreen-only{display:inline-flex !important}}@media screen and (min-width: 1408px){.is-inline-flex-fullhd{display:inline-flex !important}}.is-hidden{display:none !important}.is-sr-only{border:none !important;clip:rect(0, 0, 0, 0) !important;height:0.01em !important;overflow:hidden !important;padding:0 !important;position:absolute !important;white-space:nowrap !important;width:0.01em !important}@media screen and (max-width: 768px){.is-hidden-mobile{display:none !important}}@media screen and (min-width: 769px),print{.is-hidden-tablet{display:none !important}}@media screen and (min-width: 769px) and (max-width: 1055px){.is-hidden-tablet-only{display:none !important}}@media screen and (max-width: 1055px){.is-hidden-touch{display:none !important}}@media screen and (min-width: 1056px){.is-hidden-desktop{display:none !important}}@media screen and (min-width: 1056px) and (max-width: 1215px){.is-hidden-desktop-only{display:none !important}}@media screen and (min-width: 1216px){.is-hidden-widescreen{display:none !important}}@media screen and (min-width: 1216px) and (max-width: 1407px){.is-hidden-widescreen-only{display:none !important}}@media screen and (min-width: 1408px){.is-hidden-fullhd{display:none !important}}.is-invisible{visibility:hidden !important}@media screen and (max-width: 768px){.is-invisible-mobile{visibility:hidden !important}}@media screen and (min-width: 769px),print{.is-invisible-tablet{visibility:hidden !important}}@media screen and (min-width: 769px) and (max-width: 1055px){.is-invisible-tablet-only{visibility:hidden !important}}@media screen and (max-width: 1055px){.is-invisible-touch{visibility:hidden !important}}@media screen and (min-width: 1056px){.is-invisible-desktop{visibility:hidden !important}}@media screen and (min-width: 1056px) and (max-width: 1215px){.is-invisible-desktop-only{visibility:hidden !important}}@media screen and (min-width: 1216px){.is-invisible-widescreen{visibility:hidden !important}}@media screen and (min-width: 1216px) and (max-width: 1407px){.is-invisible-widescreen-only{visibility:hidden !important}}@media screen and (min-width: 1408px){.is-invisible-fullhd{visibility:hidden !important}}html.theme--documenter-dark{/*! + Theme: a11y-dark + Author: @ericwbailey + Maintainer: @ericwbailey + + Based on the Tomorrow Night Eighties theme: https://github.com/isagalaev/highlight.js/blob/master/src/styles/tomorrow-night-eighties.css +*/}html.theme--documenter-dark html{background-color:#1f2424;font-size:16px;-moz-osx-font-smoothing:grayscale;-webkit-font-smoothing:antialiased;min-width:300px;overflow-x:auto;overflow-y:scroll;text-rendering:optimizeLegibility;text-size-adjust:100%}html.theme--documenter-dark article,html.theme--documenter-dark aside,html.theme--documenter-dark figure,html.theme--documenter-dark footer,html.theme--documenter-dark header,html.theme--documenter-dark hgroup,html.theme--documenter-dark section{display:block}html.theme--documenter-dark body,html.theme--documenter-dark button,html.theme--documenter-dark input,html.theme--documenter-dark optgroup,html.theme--documenter-dark select,html.theme--documenter-dark textarea{font-family:"Lato Medium",-apple-system,BlinkMacSystemFont,"Segoe UI","Helvetica Neue","Helvetica","Arial",sans-serif}html.theme--documenter-dark code,html.theme--documenter-dark pre{-moz-osx-font-smoothing:auto;-webkit-font-smoothing:auto;font-family:"JuliaMono","SFMono-Regular","Menlo","Consolas","Liberation Mono","DejaVu Sans Mono",monospace}html.theme--documenter-dark body{color:#fff;font-size:1em;font-weight:400;line-height:1.5}html.theme--documenter-dark a{color:#1abc9c;cursor:pointer;text-decoration:none}html.theme--documenter-dark a strong{color:currentColor}html.theme--documenter-dark a:hover{color:#1dd2af}html.theme--documenter-dark code{background-color:rgba(255,255,255,0.05);color:#ececec;font-size:.875em;font-weight:normal;padding:.1em}html.theme--documenter-dark hr{background-color:#282f2f;border:none;display:block;height:2px;margin:1.5rem 0}html.theme--documenter-dark img{height:auto;max-width:100%}html.theme--documenter-dark input[type="checkbox"],html.theme--documenter-dark input[type="radio"]{vertical-align:baseline}html.theme--documenter-dark small{font-size:.875em}html.theme--documenter-dark span{font-style:inherit;font-weight:inherit}html.theme--documenter-dark strong{color:#f2f2f2;font-weight:700}html.theme--documenter-dark fieldset{border:none}html.theme--documenter-dark pre{-webkit-overflow-scrolling:touch;background-color:#282f2f;color:#fff;font-size:.875em;overflow-x:auto;padding:1.25rem 1.5rem;white-space:pre;word-wrap:normal}html.theme--documenter-dark pre code{background-color:transparent;color:currentColor;font-size:1em;padding:0}html.theme--documenter-dark table td,html.theme--documenter-dark table th{vertical-align:top}html.theme--documenter-dark table td:not([align]),html.theme--documenter-dark table th:not([align]){text-align:inherit}html.theme--documenter-dark table th{color:#f2f2f2}html.theme--documenter-dark .box{background-color:#343c3d;border-radius:8px;box-shadow:none;color:#fff;display:block;padding:1.25rem}html.theme--documenter-dark a.box:hover,html.theme--documenter-dark a.box:focus{box-shadow:0 0.5em 1em -0.125em rgba(10,10,10,0.1),0 0 0 1px #1abc9c}html.theme--documenter-dark a.box:active{box-shadow:inset 0 1px 2px rgba(10,10,10,0.2),0 0 0 1px #1abc9c}html.theme--documenter-dark .button{background-color:#282f2f;border-color:#4c5759;border-width:1px;color:#375a7f;cursor:pointer;justify-content:center;padding-bottom:calc(0.5em - 1px);padding-left:1em;padding-right:1em;padding-top:calc(0.5em - 1px);text-align:center;white-space:nowrap}html.theme--documenter-dark .button strong{color:inherit}html.theme--documenter-dark .button .icon,html.theme--documenter-dark .button .icon.is-small,html.theme--documenter-dark .button #documenter .docs-sidebar form.docs-search>input.icon,html.theme--documenter-dark #documenter .docs-sidebar .button form.docs-search>input.icon,html.theme--documenter-dark .button .icon.is-medium,html.theme--documenter-dark .button .icon.is-large{height:1.5em;width:1.5em}html.theme--documenter-dark .button .icon:first-child:not(:last-child){margin-left:calc(-0.5em - 1px);margin-right:.25em}html.theme--documenter-dark .button .icon:last-child:not(:first-child){margin-left:.25em;margin-right:calc(-0.5em - 1px)}html.theme--documenter-dark .button .icon:first-child:last-child{margin-left:calc(-0.5em - 1px);margin-right:calc(-0.5em - 1px)}html.theme--documenter-dark .button:hover,html.theme--documenter-dark .button.is-hovered{border-color:#8c9b9d;color:#f2f2f2}html.theme--documenter-dark .button:focus,html.theme--documenter-dark .button.is-focused{border-color:#8c9b9d;color:#17a689}html.theme--documenter-dark .button:focus:not(:active),html.theme--documenter-dark .button.is-focused:not(:active){box-shadow:0 0 0 0.125em rgba(26,188,156,0.25)}html.theme--documenter-dark .button:active,html.theme--documenter-dark .button.is-active{border-color:#343c3d;color:#f2f2f2}html.theme--documenter-dark .button.is-text{background-color:transparent;border-color:transparent;color:#fff;text-decoration:underline}html.theme--documenter-dark .button.is-text:hover,html.theme--documenter-dark .button.is-text.is-hovered,html.theme--documenter-dark .button.is-text:focus,html.theme--documenter-dark .button.is-text.is-focused{background-color:#282f2f;color:#f2f2f2}html.theme--documenter-dark .button.is-text:active,html.theme--documenter-dark .button.is-text.is-active{background-color:#1d2122;color:#f2f2f2}html.theme--documenter-dark .button.is-text[disabled],fieldset[disabled] html.theme--documenter-dark .button.is-text{background-color:transparent;border-color:transparent;box-shadow:none}html.theme--documenter-dark .button.is-ghost{background:none;border-color:rgba(0,0,0,0);color:#1abc9c;text-decoration:none}html.theme--documenter-dark .button.is-ghost:hover,html.theme--documenter-dark .button.is-ghost.is-hovered{color:#1abc9c;text-decoration:underline}html.theme--documenter-dark .button.is-white{background-color:#fff;border-color:transparent;color:#0a0a0a}html.theme--documenter-dark .button.is-white:hover,html.theme--documenter-dark .button.is-white.is-hovered{background-color:#f9f9f9;border-color:transparent;color:#0a0a0a}html.theme--documenter-dark .button.is-white:focus,html.theme--documenter-dark .button.is-white.is-focused{border-color:transparent;color:#0a0a0a}html.theme--documenter-dark .button.is-white:focus:not(:active),html.theme--documenter-dark .button.is-white.is-focused:not(:active){box-shadow:0 0 0 0.125em rgba(255,255,255,0.25)}html.theme--documenter-dark .button.is-white:active,html.theme--documenter-dark .button.is-white.is-active{background-color:#f2f2f2;border-color:transparent;color:#0a0a0a}html.theme--documenter-dark .button.is-white[disabled],fieldset[disabled] html.theme--documenter-dark .button.is-white{background-color:#fff;border-color:#fff;box-shadow:none}html.theme--documenter-dark .button.is-white.is-inverted{background-color:#0a0a0a;color:#fff}html.theme--documenter-dark .button.is-white.is-inverted:hover,html.theme--documenter-dark .button.is-white.is-inverted.is-hovered{background-color:#000}html.theme--documenter-dark .button.is-white.is-inverted[disabled],fieldset[disabled] html.theme--documenter-dark .button.is-white.is-inverted{background-color:#0a0a0a;border-color:transparent;box-shadow:none;color:#fff}html.theme--documenter-dark .button.is-white.is-loading::after{border-color:transparent transparent #0a0a0a #0a0a0a !important}html.theme--documenter-dark .button.is-white.is-outlined{background-color:transparent;border-color:#fff;color:#fff}html.theme--documenter-dark .button.is-white.is-outlined:hover,html.theme--documenter-dark .button.is-white.is-outlined.is-hovered,html.theme--documenter-dark .button.is-white.is-outlined:focus,html.theme--documenter-dark .button.is-white.is-outlined.is-focused{background-color:#fff;border-color:#fff;color:#0a0a0a}html.theme--documenter-dark .button.is-white.is-outlined.is-loading::after{border-color:transparent transparent #fff #fff !important}html.theme--documenter-dark .button.is-white.is-outlined.is-loading:hover::after,html.theme--documenter-dark .button.is-white.is-outlined.is-loading.is-hovered::after,html.theme--documenter-dark .button.is-white.is-outlined.is-loading:focus::after,html.theme--documenter-dark .button.is-white.is-outlined.is-loading.is-focused::after{border-color:transparent transparent #0a0a0a #0a0a0a !important}html.theme--documenter-dark .button.is-white.is-outlined[disabled],fieldset[disabled] html.theme--documenter-dark .button.is-white.is-outlined{background-color:transparent;border-color:#fff;box-shadow:none;color:#fff}html.theme--documenter-dark .button.is-white.is-inverted.is-outlined{background-color:transparent;border-color:#0a0a0a;color:#0a0a0a}html.theme--documenter-dark .button.is-white.is-inverted.is-outlined:hover,html.theme--documenter-dark .button.is-white.is-inverted.is-outlined.is-hovered,html.theme--documenter-dark .button.is-white.is-inverted.is-outlined:focus,html.theme--documenter-dark .button.is-white.is-inverted.is-outlined.is-focused{background-color:#0a0a0a;color:#fff}html.theme--documenter-dark .button.is-white.is-inverted.is-outlined.is-loading:hover::after,html.theme--documenter-dark .button.is-white.is-inverted.is-outlined.is-loading.is-hovered::after,html.theme--documenter-dark .button.is-white.is-inverted.is-outlined.is-loading:focus::after,html.theme--documenter-dark .button.is-white.is-inverted.is-outlined.is-loading.is-focused::after{border-color:transparent transparent #fff #fff !important}html.theme--documenter-dark .button.is-white.is-inverted.is-outlined[disabled],fieldset[disabled] html.theme--documenter-dark .button.is-white.is-inverted.is-outlined{background-color:transparent;border-color:#0a0a0a;box-shadow:none;color:#0a0a0a}html.theme--documenter-dark .button.is-black{background-color:#0a0a0a;border-color:transparent;color:#fff}html.theme--documenter-dark .button.is-black:hover,html.theme--documenter-dark .button.is-black.is-hovered{background-color:#040404;border-color:transparent;color:#fff}html.theme--documenter-dark .button.is-black:focus,html.theme--documenter-dark .button.is-black.is-focused{border-color:transparent;color:#fff}html.theme--documenter-dark .button.is-black:focus:not(:active),html.theme--documenter-dark .button.is-black.is-focused:not(:active){box-shadow:0 0 0 0.125em rgba(10,10,10,0.25)}html.theme--documenter-dark .button.is-black:active,html.theme--documenter-dark .button.is-black.is-active{background-color:#000;border-color:transparent;color:#fff}html.theme--documenter-dark .button.is-black[disabled],fieldset[disabled] html.theme--documenter-dark .button.is-black{background-color:#0a0a0a;border-color:#0a0a0a;box-shadow:none}html.theme--documenter-dark .button.is-black.is-inverted{background-color:#fff;color:#0a0a0a}html.theme--documenter-dark .button.is-black.is-inverted:hover,html.theme--documenter-dark .button.is-black.is-inverted.is-hovered{background-color:#f2f2f2}html.theme--documenter-dark .button.is-black.is-inverted[disabled],fieldset[disabled] html.theme--documenter-dark .button.is-black.is-inverted{background-color:#fff;border-color:transparent;box-shadow:none;color:#0a0a0a}html.theme--documenter-dark .button.is-black.is-loading::after{border-color:transparent transparent #fff #fff !important}html.theme--documenter-dark .button.is-black.is-outlined{background-color:transparent;border-color:#0a0a0a;color:#0a0a0a}html.theme--documenter-dark .button.is-black.is-outlined:hover,html.theme--documenter-dark .button.is-black.is-outlined.is-hovered,html.theme--documenter-dark .button.is-black.is-outlined:focus,html.theme--documenter-dark .button.is-black.is-outlined.is-focused{background-color:#0a0a0a;border-color:#0a0a0a;color:#fff}html.theme--documenter-dark .button.is-black.is-outlined.is-loading::after{border-color:transparent transparent #0a0a0a #0a0a0a !important}html.theme--documenter-dark .button.is-black.is-outlined.is-loading:hover::after,html.theme--documenter-dark .button.is-black.is-outlined.is-loading.is-hovered::after,html.theme--documenter-dark .button.is-black.is-outlined.is-loading:focus::after,html.theme--documenter-dark .button.is-black.is-outlined.is-loading.is-focused::after{border-color:transparent transparent #fff #fff !important}html.theme--documenter-dark .button.is-black.is-outlined[disabled],fieldset[disabled] html.theme--documenter-dark .button.is-black.is-outlined{background-color:transparent;border-color:#0a0a0a;box-shadow:none;color:#0a0a0a}html.theme--documenter-dark .button.is-black.is-inverted.is-outlined{background-color:transparent;border-color:#fff;color:#fff}html.theme--documenter-dark .button.is-black.is-inverted.is-outlined:hover,html.theme--documenter-dark .button.is-black.is-inverted.is-outlined.is-hovered,html.theme--documenter-dark .button.is-black.is-inverted.is-outlined:focus,html.theme--documenter-dark .button.is-black.is-inverted.is-outlined.is-focused{background-color:#fff;color:#0a0a0a}html.theme--documenter-dark .button.is-black.is-inverted.is-outlined.is-loading:hover::after,html.theme--documenter-dark .button.is-black.is-inverted.is-outlined.is-loading.is-hovered::after,html.theme--documenter-dark .button.is-black.is-inverted.is-outlined.is-loading:focus::after,html.theme--documenter-dark .button.is-black.is-inverted.is-outlined.is-loading.is-focused::after{border-color:transparent transparent #0a0a0a #0a0a0a !important}html.theme--documenter-dark .button.is-black.is-inverted.is-outlined[disabled],fieldset[disabled] html.theme--documenter-dark .button.is-black.is-inverted.is-outlined{background-color:transparent;border-color:#fff;box-shadow:none;color:#fff}html.theme--documenter-dark .button.is-light{background-color:#ecf0f1;border-color:transparent;color:rgba(0,0,0,0.7)}html.theme--documenter-dark .button.is-light:hover,html.theme--documenter-dark .button.is-light.is-hovered{background-color:#e5eaec;border-color:transparent;color:rgba(0,0,0,0.7)}html.theme--documenter-dark .button.is-light:focus,html.theme--documenter-dark .button.is-light.is-focused{border-color:transparent;color:rgba(0,0,0,0.7)}html.theme--documenter-dark .button.is-light:focus:not(:active),html.theme--documenter-dark .button.is-light.is-focused:not(:active){box-shadow:0 0 0 0.125em rgba(236,240,241,0.25)}html.theme--documenter-dark .button.is-light:active,html.theme--documenter-dark .button.is-light.is-active{background-color:#dde4e6;border-color:transparent;color:rgba(0,0,0,0.7)}html.theme--documenter-dark .button.is-light[disabled],fieldset[disabled] html.theme--documenter-dark .button.is-light{background-color:#ecf0f1;border-color:#ecf0f1;box-shadow:none}html.theme--documenter-dark .button.is-light.is-inverted{background-color:rgba(0,0,0,0.7);color:#ecf0f1}html.theme--documenter-dark .button.is-light.is-inverted:hover,html.theme--documenter-dark .button.is-light.is-inverted.is-hovered{background-color:rgba(0,0,0,0.7)}html.theme--documenter-dark .button.is-light.is-inverted[disabled],fieldset[disabled] html.theme--documenter-dark .button.is-light.is-inverted{background-color:rgba(0,0,0,0.7);border-color:transparent;box-shadow:none;color:#ecf0f1}html.theme--documenter-dark .button.is-light.is-loading::after{border-color:transparent transparent rgba(0,0,0,0.7) rgba(0,0,0,0.7) !important}html.theme--documenter-dark .button.is-light.is-outlined{background-color:transparent;border-color:#ecf0f1;color:#ecf0f1}html.theme--documenter-dark .button.is-light.is-outlined:hover,html.theme--documenter-dark .button.is-light.is-outlined.is-hovered,html.theme--documenter-dark .button.is-light.is-outlined:focus,html.theme--documenter-dark .button.is-light.is-outlined.is-focused{background-color:#ecf0f1;border-color:#ecf0f1;color:rgba(0,0,0,0.7)}html.theme--documenter-dark .button.is-light.is-outlined.is-loading::after{border-color:transparent transparent #ecf0f1 #ecf0f1 !important}html.theme--documenter-dark .button.is-light.is-outlined.is-loading:hover::after,html.theme--documenter-dark .button.is-light.is-outlined.is-loading.is-hovered::after,html.theme--documenter-dark .button.is-light.is-outlined.is-loading:focus::after,html.theme--documenter-dark .button.is-light.is-outlined.is-loading.is-focused::after{border-color:transparent transparent rgba(0,0,0,0.7) rgba(0,0,0,0.7) !important}html.theme--documenter-dark .button.is-light.is-outlined[disabled],fieldset[disabled] html.theme--documenter-dark .button.is-light.is-outlined{background-color:transparent;border-color:#ecf0f1;box-shadow:none;color:#ecf0f1}html.theme--documenter-dark .button.is-light.is-inverted.is-outlined{background-color:transparent;border-color:rgba(0,0,0,0.7);color:rgba(0,0,0,0.7)}html.theme--documenter-dark .button.is-light.is-inverted.is-outlined:hover,html.theme--documenter-dark .button.is-light.is-inverted.is-outlined.is-hovered,html.theme--documenter-dark .button.is-light.is-inverted.is-outlined:focus,html.theme--documenter-dark .button.is-light.is-inverted.is-outlined.is-focused{background-color:rgba(0,0,0,0.7);color:#ecf0f1}html.theme--documenter-dark .button.is-light.is-inverted.is-outlined.is-loading:hover::after,html.theme--documenter-dark .button.is-light.is-inverted.is-outlined.is-loading.is-hovered::after,html.theme--documenter-dark .button.is-light.is-inverted.is-outlined.is-loading:focus::after,html.theme--documenter-dark .button.is-light.is-inverted.is-outlined.is-loading.is-focused::after{border-color:transparent transparent #ecf0f1 #ecf0f1 !important}html.theme--documenter-dark .button.is-light.is-inverted.is-outlined[disabled],fieldset[disabled] html.theme--documenter-dark .button.is-light.is-inverted.is-outlined{background-color:transparent;border-color:rgba(0,0,0,0.7);box-shadow:none;color:rgba(0,0,0,0.7)}html.theme--documenter-dark .button.is-dark,html.theme--documenter-dark .content kbd.button{background-color:#282f2f;border-color:transparent;color:#fff}html.theme--documenter-dark .button.is-dark:hover,html.theme--documenter-dark .content kbd.button:hover,html.theme--documenter-dark .button.is-dark.is-hovered,html.theme--documenter-dark .content kbd.button.is-hovered{background-color:#232829;border-color:transparent;color:#fff}html.theme--documenter-dark .button.is-dark:focus,html.theme--documenter-dark .content kbd.button:focus,html.theme--documenter-dark .button.is-dark.is-focused,html.theme--documenter-dark .content kbd.button.is-focused{border-color:transparent;color:#fff}html.theme--documenter-dark .button.is-dark:focus:not(:active),html.theme--documenter-dark .content kbd.button:focus:not(:active),html.theme--documenter-dark .button.is-dark.is-focused:not(:active),html.theme--documenter-dark .content kbd.button.is-focused:not(:active){box-shadow:0 0 0 0.125em rgba(40,47,47,0.25)}html.theme--documenter-dark .button.is-dark:active,html.theme--documenter-dark .content kbd.button:active,html.theme--documenter-dark .button.is-dark.is-active,html.theme--documenter-dark .content kbd.button.is-active{background-color:#1d2122;border-color:transparent;color:#fff}html.theme--documenter-dark .button.is-dark[disabled],html.theme--documenter-dark .content kbd.button[disabled],fieldset[disabled] html.theme--documenter-dark .button.is-dark,fieldset[disabled] html.theme--documenter-dark .content kbd.button{background-color:#282f2f;border-color:#282f2f;box-shadow:none}html.theme--documenter-dark .button.is-dark.is-inverted,html.theme--documenter-dark .content kbd.button.is-inverted{background-color:#fff;color:#282f2f}html.theme--documenter-dark .button.is-dark.is-inverted:hover,html.theme--documenter-dark .content kbd.button.is-inverted:hover,html.theme--documenter-dark .button.is-dark.is-inverted.is-hovered,html.theme--documenter-dark .content kbd.button.is-inverted.is-hovered{background-color:#f2f2f2}html.theme--documenter-dark .button.is-dark.is-inverted[disabled],html.theme--documenter-dark .content kbd.button.is-inverted[disabled],fieldset[disabled] html.theme--documenter-dark .button.is-dark.is-inverted,fieldset[disabled] html.theme--documenter-dark .content kbd.button.is-inverted{background-color:#fff;border-color:transparent;box-shadow:none;color:#282f2f}html.theme--documenter-dark .button.is-dark.is-loading::after,html.theme--documenter-dark .content kbd.button.is-loading::after{border-color:transparent transparent #fff #fff !important}html.theme--documenter-dark .button.is-dark.is-outlined,html.theme--documenter-dark .content kbd.button.is-outlined{background-color:transparent;border-color:#282f2f;color:#282f2f}html.theme--documenter-dark .button.is-dark.is-outlined:hover,html.theme--documenter-dark .content kbd.button.is-outlined:hover,html.theme--documenter-dark .button.is-dark.is-outlined.is-hovered,html.theme--documenter-dark .content kbd.button.is-outlined.is-hovered,html.theme--documenter-dark .button.is-dark.is-outlined:focus,html.theme--documenter-dark .content kbd.button.is-outlined:focus,html.theme--documenter-dark .button.is-dark.is-outlined.is-focused,html.theme--documenter-dark .content kbd.button.is-outlined.is-focused{background-color:#282f2f;border-color:#282f2f;color:#fff}html.theme--documenter-dark .button.is-dark.is-outlined.is-loading::after,html.theme--documenter-dark .content kbd.button.is-outlined.is-loading::after{border-color:transparent transparent #282f2f #282f2f !important}html.theme--documenter-dark .button.is-dark.is-outlined.is-loading:hover::after,html.theme--documenter-dark .content kbd.button.is-outlined.is-loading:hover::after,html.theme--documenter-dark .button.is-dark.is-outlined.is-loading.is-hovered::after,html.theme--documenter-dark .content kbd.button.is-outlined.is-loading.is-hovered::after,html.theme--documenter-dark .button.is-dark.is-outlined.is-loading:focus::after,html.theme--documenter-dark .content kbd.button.is-outlined.is-loading:focus::after,html.theme--documenter-dark .button.is-dark.is-outlined.is-loading.is-focused::after,html.theme--documenter-dark .content kbd.button.is-outlined.is-loading.is-focused::after{border-color:transparent transparent #fff #fff !important}html.theme--documenter-dark .button.is-dark.is-outlined[disabled],html.theme--documenter-dark .content kbd.button.is-outlined[disabled],fieldset[disabled] html.theme--documenter-dark .button.is-dark.is-outlined,fieldset[disabled] html.theme--documenter-dark .content kbd.button.is-outlined{background-color:transparent;border-color:#282f2f;box-shadow:none;color:#282f2f}html.theme--documenter-dark .button.is-dark.is-inverted.is-outlined,html.theme--documenter-dark .content kbd.button.is-inverted.is-outlined{background-color:transparent;border-color:#fff;color:#fff}html.theme--documenter-dark .button.is-dark.is-inverted.is-outlined:hover,html.theme--documenter-dark .content kbd.button.is-inverted.is-outlined:hover,html.theme--documenter-dark .button.is-dark.is-inverted.is-outlined.is-hovered,html.theme--documenter-dark .content kbd.button.is-inverted.is-outlined.is-hovered,html.theme--documenter-dark .button.is-dark.is-inverted.is-outlined:focus,html.theme--documenter-dark .content kbd.button.is-inverted.is-outlined:focus,html.theme--documenter-dark .button.is-dark.is-inverted.is-outlined.is-focused,html.theme--documenter-dark .content kbd.button.is-inverted.is-outlined.is-focused{background-color:#fff;color:#282f2f}html.theme--documenter-dark .button.is-dark.is-inverted.is-outlined.is-loading:hover::after,html.theme--documenter-dark .content kbd.button.is-inverted.is-outlined.is-loading:hover::after,html.theme--documenter-dark .button.is-dark.is-inverted.is-outlined.is-loading.is-hovered::after,html.theme--documenter-dark .content kbd.button.is-inverted.is-outlined.is-loading.is-hovered::after,html.theme--documenter-dark .button.is-dark.is-inverted.is-outlined.is-loading:focus::after,html.theme--documenter-dark .content kbd.button.is-inverted.is-outlined.is-loading:focus::after,html.theme--documenter-dark .button.is-dark.is-inverted.is-outlined.is-loading.is-focused::after,html.theme--documenter-dark .content kbd.button.is-inverted.is-outlined.is-loading.is-focused::after{border-color:transparent transparent #282f2f #282f2f !important}html.theme--documenter-dark .button.is-dark.is-inverted.is-outlined[disabled],html.theme--documenter-dark .content kbd.button.is-inverted.is-outlined[disabled],fieldset[disabled] html.theme--documenter-dark .button.is-dark.is-inverted.is-outlined,fieldset[disabled] html.theme--documenter-dark .content kbd.button.is-inverted.is-outlined{background-color:transparent;border-color:#fff;box-shadow:none;color:#fff}html.theme--documenter-dark .button.is-primary,html.theme--documenter-dark .docstring>section>a.button.docs-sourcelink{background-color:#375a7f;border-color:transparent;color:#fff}html.theme--documenter-dark .button.is-primary:hover,html.theme--documenter-dark .docstring>section>a.button.docs-sourcelink:hover,html.theme--documenter-dark .button.is-primary.is-hovered,html.theme--documenter-dark .docstring>section>a.button.is-hovered.docs-sourcelink{background-color:#335476;border-color:transparent;color:#fff}html.theme--documenter-dark .button.is-primary:focus,html.theme--documenter-dark .docstring>section>a.button.docs-sourcelink:focus,html.theme--documenter-dark .button.is-primary.is-focused,html.theme--documenter-dark .docstring>section>a.button.is-focused.docs-sourcelink{border-color:transparent;color:#fff}html.theme--documenter-dark .button.is-primary:focus:not(:active),html.theme--documenter-dark .docstring>section>a.button.docs-sourcelink:focus:not(:active),html.theme--documenter-dark .button.is-primary.is-focused:not(:active),html.theme--documenter-dark .docstring>section>a.button.is-focused.docs-sourcelink:not(:active){box-shadow:0 0 0 0.125em rgba(55,90,127,0.25)}html.theme--documenter-dark .button.is-primary:active,html.theme--documenter-dark .docstring>section>a.button.docs-sourcelink:active,html.theme--documenter-dark .button.is-primary.is-active,html.theme--documenter-dark .docstring>section>a.button.is-active.docs-sourcelink{background-color:#2f4d6d;border-color:transparent;color:#fff}html.theme--documenter-dark .button.is-primary[disabled],html.theme--documenter-dark .docstring>section>a.button.docs-sourcelink[disabled],fieldset[disabled] html.theme--documenter-dark .button.is-primary,fieldset[disabled] html.theme--documenter-dark .docstring>section>a.button.docs-sourcelink{background-color:#375a7f;border-color:#375a7f;box-shadow:none}html.theme--documenter-dark .button.is-primary.is-inverted,html.theme--documenter-dark .docstring>section>a.button.is-inverted.docs-sourcelink{background-color:#fff;color:#375a7f}html.theme--documenter-dark .button.is-primary.is-inverted:hover,html.theme--documenter-dark .docstring>section>a.button.is-inverted.docs-sourcelink:hover,html.theme--documenter-dark .button.is-primary.is-inverted.is-hovered,html.theme--documenter-dark .docstring>section>a.button.is-inverted.is-hovered.docs-sourcelink{background-color:#f2f2f2}html.theme--documenter-dark .button.is-primary.is-inverted[disabled],html.theme--documenter-dark .docstring>section>a.button.is-inverted.docs-sourcelink[disabled],fieldset[disabled] html.theme--documenter-dark .button.is-primary.is-inverted,fieldset[disabled] html.theme--documenter-dark .docstring>section>a.button.is-inverted.docs-sourcelink{background-color:#fff;border-color:transparent;box-shadow:none;color:#375a7f}html.theme--documenter-dark .button.is-primary.is-loading::after,html.theme--documenter-dark .docstring>section>a.button.is-loading.docs-sourcelink::after{border-color:transparent transparent #fff #fff !important}html.theme--documenter-dark .button.is-primary.is-outlined,html.theme--documenter-dark .docstring>section>a.button.is-outlined.docs-sourcelink{background-color:transparent;border-color:#375a7f;color:#375a7f}html.theme--documenter-dark .button.is-primary.is-outlined:hover,html.theme--documenter-dark .docstring>section>a.button.is-outlined.docs-sourcelink:hover,html.theme--documenter-dark .button.is-primary.is-outlined.is-hovered,html.theme--documenter-dark .docstring>section>a.button.is-outlined.is-hovered.docs-sourcelink,html.theme--documenter-dark .button.is-primary.is-outlined:focus,html.theme--documenter-dark .docstring>section>a.button.is-outlined.docs-sourcelink:focus,html.theme--documenter-dark .button.is-primary.is-outlined.is-focused,html.theme--documenter-dark .docstring>section>a.button.is-outlined.is-focused.docs-sourcelink{background-color:#375a7f;border-color:#375a7f;color:#fff}html.theme--documenter-dark .button.is-primary.is-outlined.is-loading::after,html.theme--documenter-dark .docstring>section>a.button.is-outlined.is-loading.docs-sourcelink::after{border-color:transparent transparent #375a7f #375a7f !important}html.theme--documenter-dark .button.is-primary.is-outlined.is-loading:hover::after,html.theme--documenter-dark .docstring>section>a.button.is-outlined.is-loading.docs-sourcelink:hover::after,html.theme--documenter-dark .button.is-primary.is-outlined.is-loading.is-hovered::after,html.theme--documenter-dark .docstring>section>a.button.is-outlined.is-loading.is-hovered.docs-sourcelink::after,html.theme--documenter-dark .button.is-primary.is-outlined.is-loading:focus::after,html.theme--documenter-dark .docstring>section>a.button.is-outlined.is-loading.docs-sourcelink:focus::after,html.theme--documenter-dark .button.is-primary.is-outlined.is-loading.is-focused::after,html.theme--documenter-dark .docstring>section>a.button.is-outlined.is-loading.is-focused.docs-sourcelink::after{border-color:transparent transparent #fff #fff !important}html.theme--documenter-dark .button.is-primary.is-outlined[disabled],html.theme--documenter-dark .docstring>section>a.button.is-outlined.docs-sourcelink[disabled],fieldset[disabled] html.theme--documenter-dark .button.is-primary.is-outlined,fieldset[disabled] html.theme--documenter-dark .docstring>section>a.button.is-outlined.docs-sourcelink{background-color:transparent;border-color:#375a7f;box-shadow:none;color:#375a7f}html.theme--documenter-dark .button.is-primary.is-inverted.is-outlined,html.theme--documenter-dark .docstring>section>a.button.is-inverted.is-outlined.docs-sourcelink{background-color:transparent;border-color:#fff;color:#fff}html.theme--documenter-dark .button.is-primary.is-inverted.is-outlined:hover,html.theme--documenter-dark .docstring>section>a.button.is-inverted.is-outlined.docs-sourcelink:hover,html.theme--documenter-dark .button.is-primary.is-inverted.is-outlined.is-hovered,html.theme--documenter-dark .docstring>section>a.button.is-inverted.is-outlined.is-hovered.docs-sourcelink,html.theme--documenter-dark .button.is-primary.is-inverted.is-outlined:focus,html.theme--documenter-dark .docstring>section>a.button.is-inverted.is-outlined.docs-sourcelink:focus,html.theme--documenter-dark .button.is-primary.is-inverted.is-outlined.is-focused,html.theme--documenter-dark .docstring>section>a.button.is-inverted.is-outlined.is-focused.docs-sourcelink{background-color:#fff;color:#375a7f}html.theme--documenter-dark .button.is-primary.is-inverted.is-outlined.is-loading:hover::after,html.theme--documenter-dark .docstring>section>a.button.is-inverted.is-outlined.is-loading.docs-sourcelink:hover::after,html.theme--documenter-dark .button.is-primary.is-inverted.is-outlined.is-loading.is-hovered::after,html.theme--documenter-dark .docstring>section>a.button.is-inverted.is-outlined.is-loading.is-hovered.docs-sourcelink::after,html.theme--documenter-dark .button.is-primary.is-inverted.is-outlined.is-loading:focus::after,html.theme--documenter-dark .docstring>section>a.button.is-inverted.is-outlined.is-loading.docs-sourcelink:focus::after,html.theme--documenter-dark .button.is-primary.is-inverted.is-outlined.is-loading.is-focused::after,html.theme--documenter-dark .docstring>section>a.button.is-inverted.is-outlined.is-loading.is-focused.docs-sourcelink::after{border-color:transparent transparent #375a7f #375a7f !important}html.theme--documenter-dark .button.is-primary.is-inverted.is-outlined[disabled],html.theme--documenter-dark .docstring>section>a.button.is-inverted.is-outlined.docs-sourcelink[disabled],fieldset[disabled] html.theme--documenter-dark .button.is-primary.is-inverted.is-outlined,fieldset[disabled] html.theme--documenter-dark .docstring>section>a.button.is-inverted.is-outlined.docs-sourcelink{background-color:transparent;border-color:#fff;box-shadow:none;color:#fff}html.theme--documenter-dark .button.is-primary.is-light,html.theme--documenter-dark .docstring>section>a.button.is-light.docs-sourcelink{background-color:#f1f5f9;color:#4d7eb2}html.theme--documenter-dark .button.is-primary.is-light:hover,html.theme--documenter-dark .docstring>section>a.button.is-light.docs-sourcelink:hover,html.theme--documenter-dark .button.is-primary.is-light.is-hovered,html.theme--documenter-dark .docstring>section>a.button.is-light.is-hovered.docs-sourcelink{background-color:#e8eef5;border-color:transparent;color:#4d7eb2}html.theme--documenter-dark .button.is-primary.is-light:active,html.theme--documenter-dark .docstring>section>a.button.is-light.docs-sourcelink:active,html.theme--documenter-dark .button.is-primary.is-light.is-active,html.theme--documenter-dark .docstring>section>a.button.is-light.is-active.docs-sourcelink{background-color:#dfe8f1;border-color:transparent;color:#4d7eb2}html.theme--documenter-dark .button.is-link{background-color:#1abc9c;border-color:transparent;color:#fff}html.theme--documenter-dark .button.is-link:hover,html.theme--documenter-dark .button.is-link.is-hovered{background-color:#18b193;border-color:transparent;color:#fff}html.theme--documenter-dark .button.is-link:focus,html.theme--documenter-dark .button.is-link.is-focused{border-color:transparent;color:#fff}html.theme--documenter-dark .button.is-link:focus:not(:active),html.theme--documenter-dark .button.is-link.is-focused:not(:active){box-shadow:0 0 0 0.125em rgba(26,188,156,0.25)}html.theme--documenter-dark .button.is-link:active,html.theme--documenter-dark .button.is-link.is-active{background-color:#17a689;border-color:transparent;color:#fff}html.theme--documenter-dark .button.is-link[disabled],fieldset[disabled] html.theme--documenter-dark .button.is-link{background-color:#1abc9c;border-color:#1abc9c;box-shadow:none}html.theme--documenter-dark .button.is-link.is-inverted{background-color:#fff;color:#1abc9c}html.theme--documenter-dark .button.is-link.is-inverted:hover,html.theme--documenter-dark .button.is-link.is-inverted.is-hovered{background-color:#f2f2f2}html.theme--documenter-dark .button.is-link.is-inverted[disabled],fieldset[disabled] html.theme--documenter-dark .button.is-link.is-inverted{background-color:#fff;border-color:transparent;box-shadow:none;color:#1abc9c}html.theme--documenter-dark .button.is-link.is-loading::after{border-color:transparent transparent #fff #fff !important}html.theme--documenter-dark .button.is-link.is-outlined{background-color:transparent;border-color:#1abc9c;color:#1abc9c}html.theme--documenter-dark .button.is-link.is-outlined:hover,html.theme--documenter-dark .button.is-link.is-outlined.is-hovered,html.theme--documenter-dark .button.is-link.is-outlined:focus,html.theme--documenter-dark .button.is-link.is-outlined.is-focused{background-color:#1abc9c;border-color:#1abc9c;color:#fff}html.theme--documenter-dark .button.is-link.is-outlined.is-loading::after{border-color:transparent transparent #1abc9c #1abc9c !important}html.theme--documenter-dark .button.is-link.is-outlined.is-loading:hover::after,html.theme--documenter-dark .button.is-link.is-outlined.is-loading.is-hovered::after,html.theme--documenter-dark .button.is-link.is-outlined.is-loading:focus::after,html.theme--documenter-dark .button.is-link.is-outlined.is-loading.is-focused::after{border-color:transparent transparent #fff #fff !important}html.theme--documenter-dark .button.is-link.is-outlined[disabled],fieldset[disabled] html.theme--documenter-dark .button.is-link.is-outlined{background-color:transparent;border-color:#1abc9c;box-shadow:none;color:#1abc9c}html.theme--documenter-dark .button.is-link.is-inverted.is-outlined{background-color:transparent;border-color:#fff;color:#fff}html.theme--documenter-dark .button.is-link.is-inverted.is-outlined:hover,html.theme--documenter-dark .button.is-link.is-inverted.is-outlined.is-hovered,html.theme--documenter-dark .button.is-link.is-inverted.is-outlined:focus,html.theme--documenter-dark .button.is-link.is-inverted.is-outlined.is-focused{background-color:#fff;color:#1abc9c}html.theme--documenter-dark .button.is-link.is-inverted.is-outlined.is-loading:hover::after,html.theme--documenter-dark .button.is-link.is-inverted.is-outlined.is-loading.is-hovered::after,html.theme--documenter-dark .button.is-link.is-inverted.is-outlined.is-loading:focus::after,html.theme--documenter-dark .button.is-link.is-inverted.is-outlined.is-loading.is-focused::after{border-color:transparent transparent #1abc9c #1abc9c !important}html.theme--documenter-dark .button.is-link.is-inverted.is-outlined[disabled],fieldset[disabled] html.theme--documenter-dark .button.is-link.is-inverted.is-outlined{background-color:transparent;border-color:#fff;box-shadow:none;color:#fff}html.theme--documenter-dark .button.is-link.is-light{background-color:#edfdf9;color:#15987e}html.theme--documenter-dark .button.is-link.is-light:hover,html.theme--documenter-dark .button.is-link.is-light.is-hovered{background-color:#e2fbf6;border-color:transparent;color:#15987e}html.theme--documenter-dark .button.is-link.is-light:active,html.theme--documenter-dark .button.is-link.is-light.is-active{background-color:#d7f9f3;border-color:transparent;color:#15987e}html.theme--documenter-dark .button.is-info{background-color:#024c7d;border-color:transparent;color:#fff}html.theme--documenter-dark .button.is-info:hover,html.theme--documenter-dark .button.is-info.is-hovered{background-color:#024470;border-color:transparent;color:#fff}html.theme--documenter-dark .button.is-info:focus,html.theme--documenter-dark .button.is-info.is-focused{border-color:transparent;color:#fff}html.theme--documenter-dark .button.is-info:focus:not(:active),html.theme--documenter-dark .button.is-info.is-focused:not(:active){box-shadow:0 0 0 0.125em rgba(2,76,125,0.25)}html.theme--documenter-dark .button.is-info:active,html.theme--documenter-dark .button.is-info.is-active{background-color:#023d64;border-color:transparent;color:#fff}html.theme--documenter-dark .button.is-info[disabled],fieldset[disabled] html.theme--documenter-dark .button.is-info{background-color:#024c7d;border-color:#024c7d;box-shadow:none}html.theme--documenter-dark .button.is-info.is-inverted{background-color:#fff;color:#024c7d}html.theme--documenter-dark .button.is-info.is-inverted:hover,html.theme--documenter-dark .button.is-info.is-inverted.is-hovered{background-color:#f2f2f2}html.theme--documenter-dark .button.is-info.is-inverted[disabled],fieldset[disabled] html.theme--documenter-dark .button.is-info.is-inverted{background-color:#fff;border-color:transparent;box-shadow:none;color:#024c7d}html.theme--documenter-dark .button.is-info.is-loading::after{border-color:transparent transparent #fff #fff !important}html.theme--documenter-dark .button.is-info.is-outlined{background-color:transparent;border-color:#024c7d;color:#024c7d}html.theme--documenter-dark .button.is-info.is-outlined:hover,html.theme--documenter-dark .button.is-info.is-outlined.is-hovered,html.theme--documenter-dark .button.is-info.is-outlined:focus,html.theme--documenter-dark .button.is-info.is-outlined.is-focused{background-color:#024c7d;border-color:#024c7d;color:#fff}html.theme--documenter-dark .button.is-info.is-outlined.is-loading::after{border-color:transparent transparent #024c7d #024c7d !important}html.theme--documenter-dark .button.is-info.is-outlined.is-loading:hover::after,html.theme--documenter-dark .button.is-info.is-outlined.is-loading.is-hovered::after,html.theme--documenter-dark .button.is-info.is-outlined.is-loading:focus::after,html.theme--documenter-dark .button.is-info.is-outlined.is-loading.is-focused::after{border-color:transparent transparent #fff #fff !important}html.theme--documenter-dark .button.is-info.is-outlined[disabled],fieldset[disabled] html.theme--documenter-dark .button.is-info.is-outlined{background-color:transparent;border-color:#024c7d;box-shadow:none;color:#024c7d}html.theme--documenter-dark .button.is-info.is-inverted.is-outlined{background-color:transparent;border-color:#fff;color:#fff}html.theme--documenter-dark .button.is-info.is-inverted.is-outlined:hover,html.theme--documenter-dark .button.is-info.is-inverted.is-outlined.is-hovered,html.theme--documenter-dark .button.is-info.is-inverted.is-outlined:focus,html.theme--documenter-dark .button.is-info.is-inverted.is-outlined.is-focused{background-color:#fff;color:#024c7d}html.theme--documenter-dark .button.is-info.is-inverted.is-outlined.is-loading:hover::after,html.theme--documenter-dark .button.is-info.is-inverted.is-outlined.is-loading.is-hovered::after,html.theme--documenter-dark .button.is-info.is-inverted.is-outlined.is-loading:focus::after,html.theme--documenter-dark .button.is-info.is-inverted.is-outlined.is-loading.is-focused::after{border-color:transparent transparent #024c7d #024c7d !important}html.theme--documenter-dark .button.is-info.is-inverted.is-outlined[disabled],fieldset[disabled] html.theme--documenter-dark .button.is-info.is-inverted.is-outlined{background-color:transparent;border-color:#fff;box-shadow:none;color:#fff}html.theme--documenter-dark .button.is-info.is-light{background-color:#ebf7ff;color:#0e9dfb}html.theme--documenter-dark .button.is-info.is-light:hover,html.theme--documenter-dark .button.is-info.is-light.is-hovered{background-color:#def2fe;border-color:transparent;color:#0e9dfb}html.theme--documenter-dark .button.is-info.is-light:active,html.theme--documenter-dark .button.is-info.is-light.is-active{background-color:#d2edfe;border-color:transparent;color:#0e9dfb}html.theme--documenter-dark .button.is-success{background-color:#008438;border-color:transparent;color:#fff}html.theme--documenter-dark .button.is-success:hover,html.theme--documenter-dark .button.is-success.is-hovered{background-color:#073;border-color:transparent;color:#fff}html.theme--documenter-dark .button.is-success:focus,html.theme--documenter-dark .button.is-success.is-focused{border-color:transparent;color:#fff}html.theme--documenter-dark .button.is-success:focus:not(:active),html.theme--documenter-dark .button.is-success.is-focused:not(:active){box-shadow:0 0 0 0.125em rgba(0,132,56,0.25)}html.theme--documenter-dark .button.is-success:active,html.theme--documenter-dark .button.is-success.is-active{background-color:#006b2d;border-color:transparent;color:#fff}html.theme--documenter-dark .button.is-success[disabled],fieldset[disabled] html.theme--documenter-dark .button.is-success{background-color:#008438;border-color:#008438;box-shadow:none}html.theme--documenter-dark .button.is-success.is-inverted{background-color:#fff;color:#008438}html.theme--documenter-dark .button.is-success.is-inverted:hover,html.theme--documenter-dark .button.is-success.is-inverted.is-hovered{background-color:#f2f2f2}html.theme--documenter-dark .button.is-success.is-inverted[disabled],fieldset[disabled] html.theme--documenter-dark .button.is-success.is-inverted{background-color:#fff;border-color:transparent;box-shadow:none;color:#008438}html.theme--documenter-dark .button.is-success.is-loading::after{border-color:transparent transparent #fff #fff !important}html.theme--documenter-dark .button.is-success.is-outlined{background-color:transparent;border-color:#008438;color:#008438}html.theme--documenter-dark .button.is-success.is-outlined:hover,html.theme--documenter-dark .button.is-success.is-outlined.is-hovered,html.theme--documenter-dark .button.is-success.is-outlined:focus,html.theme--documenter-dark .button.is-success.is-outlined.is-focused{background-color:#008438;border-color:#008438;color:#fff}html.theme--documenter-dark .button.is-success.is-outlined.is-loading::after{border-color:transparent transparent #008438 #008438 !important}html.theme--documenter-dark .button.is-success.is-outlined.is-loading:hover::after,html.theme--documenter-dark .button.is-success.is-outlined.is-loading.is-hovered::after,html.theme--documenter-dark .button.is-success.is-outlined.is-loading:focus::after,html.theme--documenter-dark .button.is-success.is-outlined.is-loading.is-focused::after{border-color:transparent transparent #fff #fff !important}html.theme--documenter-dark .button.is-success.is-outlined[disabled],fieldset[disabled] html.theme--documenter-dark .button.is-success.is-outlined{background-color:transparent;border-color:#008438;box-shadow:none;color:#008438}html.theme--documenter-dark .button.is-success.is-inverted.is-outlined{background-color:transparent;border-color:#fff;color:#fff}html.theme--documenter-dark .button.is-success.is-inverted.is-outlined:hover,html.theme--documenter-dark .button.is-success.is-inverted.is-outlined.is-hovered,html.theme--documenter-dark .button.is-success.is-inverted.is-outlined:focus,html.theme--documenter-dark .button.is-success.is-inverted.is-outlined.is-focused{background-color:#fff;color:#008438}html.theme--documenter-dark .button.is-success.is-inverted.is-outlined.is-loading:hover::after,html.theme--documenter-dark .button.is-success.is-inverted.is-outlined.is-loading.is-hovered::after,html.theme--documenter-dark .button.is-success.is-inverted.is-outlined.is-loading:focus::after,html.theme--documenter-dark .button.is-success.is-inverted.is-outlined.is-loading.is-focused::after{border-color:transparent transparent #008438 #008438 !important}html.theme--documenter-dark .button.is-success.is-inverted.is-outlined[disabled],fieldset[disabled] html.theme--documenter-dark .button.is-success.is-inverted.is-outlined{background-color:transparent;border-color:#fff;box-shadow:none;color:#fff}html.theme--documenter-dark .button.is-success.is-light{background-color:#ebfff3;color:#00eb64}html.theme--documenter-dark .button.is-success.is-light:hover,html.theme--documenter-dark .button.is-success.is-light.is-hovered{background-color:#deffec;border-color:transparent;color:#00eb64}html.theme--documenter-dark .button.is-success.is-light:active,html.theme--documenter-dark .button.is-success.is-light.is-active{background-color:#d1ffe5;border-color:transparent;color:#00eb64}html.theme--documenter-dark .button.is-warning{background-color:#ad8100;border-color:transparent;color:#fff}html.theme--documenter-dark .button.is-warning:hover,html.theme--documenter-dark .button.is-warning.is-hovered{background-color:#a07700;border-color:transparent;color:#fff}html.theme--documenter-dark .button.is-warning:focus,html.theme--documenter-dark .button.is-warning.is-focused{border-color:transparent;color:#fff}html.theme--documenter-dark .button.is-warning:focus:not(:active),html.theme--documenter-dark .button.is-warning.is-focused:not(:active){box-shadow:0 0 0 0.125em rgba(173,129,0,0.25)}html.theme--documenter-dark .button.is-warning:active,html.theme--documenter-dark .button.is-warning.is-active{background-color:#946e00;border-color:transparent;color:#fff}html.theme--documenter-dark .button.is-warning[disabled],fieldset[disabled] html.theme--documenter-dark .button.is-warning{background-color:#ad8100;border-color:#ad8100;box-shadow:none}html.theme--documenter-dark .button.is-warning.is-inverted{background-color:#fff;color:#ad8100}html.theme--documenter-dark .button.is-warning.is-inverted:hover,html.theme--documenter-dark .button.is-warning.is-inverted.is-hovered{background-color:#f2f2f2}html.theme--documenter-dark .button.is-warning.is-inverted[disabled],fieldset[disabled] html.theme--documenter-dark .button.is-warning.is-inverted{background-color:#fff;border-color:transparent;box-shadow:none;color:#ad8100}html.theme--documenter-dark .button.is-warning.is-loading::after{border-color:transparent transparent #fff #fff !important}html.theme--documenter-dark .button.is-warning.is-outlined{background-color:transparent;border-color:#ad8100;color:#ad8100}html.theme--documenter-dark .button.is-warning.is-outlined:hover,html.theme--documenter-dark .button.is-warning.is-outlined.is-hovered,html.theme--documenter-dark .button.is-warning.is-outlined:focus,html.theme--documenter-dark .button.is-warning.is-outlined.is-focused{background-color:#ad8100;border-color:#ad8100;color:#fff}html.theme--documenter-dark .button.is-warning.is-outlined.is-loading::after{border-color:transparent transparent #ad8100 #ad8100 !important}html.theme--documenter-dark .button.is-warning.is-outlined.is-loading:hover::after,html.theme--documenter-dark .button.is-warning.is-outlined.is-loading.is-hovered::after,html.theme--documenter-dark .button.is-warning.is-outlined.is-loading:focus::after,html.theme--documenter-dark .button.is-warning.is-outlined.is-loading.is-focused::after{border-color:transparent transparent #fff #fff !important}html.theme--documenter-dark .button.is-warning.is-outlined[disabled],fieldset[disabled] html.theme--documenter-dark .button.is-warning.is-outlined{background-color:transparent;border-color:#ad8100;box-shadow:none;color:#ad8100}html.theme--documenter-dark .button.is-warning.is-inverted.is-outlined{background-color:transparent;border-color:#fff;color:#fff}html.theme--documenter-dark .button.is-warning.is-inverted.is-outlined:hover,html.theme--documenter-dark .button.is-warning.is-inverted.is-outlined.is-hovered,html.theme--documenter-dark .button.is-warning.is-inverted.is-outlined:focus,html.theme--documenter-dark .button.is-warning.is-inverted.is-outlined.is-focused{background-color:#fff;color:#ad8100}html.theme--documenter-dark .button.is-warning.is-inverted.is-outlined.is-loading:hover::after,html.theme--documenter-dark .button.is-warning.is-inverted.is-outlined.is-loading.is-hovered::after,html.theme--documenter-dark .button.is-warning.is-inverted.is-outlined.is-loading:focus::after,html.theme--documenter-dark .button.is-warning.is-inverted.is-outlined.is-loading.is-focused::after{border-color:transparent transparent #ad8100 #ad8100 !important}html.theme--documenter-dark .button.is-warning.is-inverted.is-outlined[disabled],fieldset[disabled] html.theme--documenter-dark .button.is-warning.is-inverted.is-outlined{background-color:transparent;border-color:#fff;box-shadow:none;color:#fff}html.theme--documenter-dark .button.is-warning.is-light{background-color:#fffaeb;color:#d19c00}html.theme--documenter-dark .button.is-warning.is-light:hover,html.theme--documenter-dark .button.is-warning.is-light.is-hovered{background-color:#fff7de;border-color:transparent;color:#d19c00}html.theme--documenter-dark .button.is-warning.is-light:active,html.theme--documenter-dark .button.is-warning.is-light.is-active{background-color:#fff3d1;border-color:transparent;color:#d19c00}html.theme--documenter-dark .button.is-danger{background-color:#9e1b0d;border-color:transparent;color:#fff}html.theme--documenter-dark .button.is-danger:hover,html.theme--documenter-dark .button.is-danger.is-hovered{background-color:#92190c;border-color:transparent;color:#fff}html.theme--documenter-dark .button.is-danger:focus,html.theme--documenter-dark .button.is-danger.is-focused{border-color:transparent;color:#fff}html.theme--documenter-dark .button.is-danger:focus:not(:active),html.theme--documenter-dark .button.is-danger.is-focused:not(:active){box-shadow:0 0 0 0.125em rgba(158,27,13,0.25)}html.theme--documenter-dark .button.is-danger:active,html.theme--documenter-dark .button.is-danger.is-active{background-color:#86170b;border-color:transparent;color:#fff}html.theme--documenter-dark .button.is-danger[disabled],fieldset[disabled] html.theme--documenter-dark .button.is-danger{background-color:#9e1b0d;border-color:#9e1b0d;box-shadow:none}html.theme--documenter-dark .button.is-danger.is-inverted{background-color:#fff;color:#9e1b0d}html.theme--documenter-dark .button.is-danger.is-inverted:hover,html.theme--documenter-dark .button.is-danger.is-inverted.is-hovered{background-color:#f2f2f2}html.theme--documenter-dark .button.is-danger.is-inverted[disabled],fieldset[disabled] html.theme--documenter-dark .button.is-danger.is-inverted{background-color:#fff;border-color:transparent;box-shadow:none;color:#9e1b0d}html.theme--documenter-dark .button.is-danger.is-loading::after{border-color:transparent transparent #fff #fff !important}html.theme--documenter-dark .button.is-danger.is-outlined{background-color:transparent;border-color:#9e1b0d;color:#9e1b0d}html.theme--documenter-dark .button.is-danger.is-outlined:hover,html.theme--documenter-dark .button.is-danger.is-outlined.is-hovered,html.theme--documenter-dark .button.is-danger.is-outlined:focus,html.theme--documenter-dark .button.is-danger.is-outlined.is-focused{background-color:#9e1b0d;border-color:#9e1b0d;color:#fff}html.theme--documenter-dark .button.is-danger.is-outlined.is-loading::after{border-color:transparent transparent #9e1b0d #9e1b0d !important}html.theme--documenter-dark .button.is-danger.is-outlined.is-loading:hover::after,html.theme--documenter-dark .button.is-danger.is-outlined.is-loading.is-hovered::after,html.theme--documenter-dark .button.is-danger.is-outlined.is-loading:focus::after,html.theme--documenter-dark .button.is-danger.is-outlined.is-loading.is-focused::after{border-color:transparent transparent #fff #fff !important}html.theme--documenter-dark .button.is-danger.is-outlined[disabled],fieldset[disabled] html.theme--documenter-dark .button.is-danger.is-outlined{background-color:transparent;border-color:#9e1b0d;box-shadow:none;color:#9e1b0d}html.theme--documenter-dark .button.is-danger.is-inverted.is-outlined{background-color:transparent;border-color:#fff;color:#fff}html.theme--documenter-dark .button.is-danger.is-inverted.is-outlined:hover,html.theme--documenter-dark .button.is-danger.is-inverted.is-outlined.is-hovered,html.theme--documenter-dark .button.is-danger.is-inverted.is-outlined:focus,html.theme--documenter-dark .button.is-danger.is-inverted.is-outlined.is-focused{background-color:#fff;color:#9e1b0d}html.theme--documenter-dark .button.is-danger.is-inverted.is-outlined.is-loading:hover::after,html.theme--documenter-dark .button.is-danger.is-inverted.is-outlined.is-loading.is-hovered::after,html.theme--documenter-dark .button.is-danger.is-inverted.is-outlined.is-loading:focus::after,html.theme--documenter-dark .button.is-danger.is-inverted.is-outlined.is-loading.is-focused::after{border-color:transparent transparent #9e1b0d #9e1b0d !important}html.theme--documenter-dark .button.is-danger.is-inverted.is-outlined[disabled],fieldset[disabled] html.theme--documenter-dark .button.is-danger.is-inverted.is-outlined{background-color:transparent;border-color:#fff;box-shadow:none;color:#fff}html.theme--documenter-dark .button.is-danger.is-light{background-color:#fdeeec;color:#ec311d}html.theme--documenter-dark .button.is-danger.is-light:hover,html.theme--documenter-dark .button.is-danger.is-light.is-hovered{background-color:#fce3e0;border-color:transparent;color:#ec311d}html.theme--documenter-dark .button.is-danger.is-light:active,html.theme--documenter-dark .button.is-danger.is-light.is-active{background-color:#fcd8d5;border-color:transparent;color:#ec311d}html.theme--documenter-dark .button.is-small,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.button{font-size:.75rem}html.theme--documenter-dark .button.is-small:not(.is-rounded),html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.button:not(.is-rounded){border-radius:3px}html.theme--documenter-dark .button.is-normal{font-size:1rem}html.theme--documenter-dark .button.is-medium{font-size:1.25rem}html.theme--documenter-dark .button.is-large{font-size:1.5rem}html.theme--documenter-dark .button[disabled],fieldset[disabled] html.theme--documenter-dark .button{background-color:#8c9b9d;border-color:#5e6d6f;box-shadow:none;opacity:.5}html.theme--documenter-dark .button.is-fullwidth{display:flex;width:100%}html.theme--documenter-dark .button.is-loading{color:transparent !important;pointer-events:none}html.theme--documenter-dark .button.is-loading::after{position:absolute;left:calc(50% - (1em * 0.5));top:calc(50% - (1em * 0.5));position:absolute !important}html.theme--documenter-dark .button.is-static{background-color:#282f2f;border-color:#5e6d6f;color:#dbdee0;box-shadow:none;pointer-events:none}html.theme--documenter-dark .button.is-rounded,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.button{border-radius:9999px;padding-left:calc(1em + 0.25em);padding-right:calc(1em + 0.25em)}html.theme--documenter-dark .buttons{align-items:center;display:flex;flex-wrap:wrap;justify-content:flex-start}html.theme--documenter-dark .buttons .button{margin-bottom:0.5rem}html.theme--documenter-dark .buttons .button:not(:last-child):not(.is-fullwidth){margin-right:.5rem}html.theme--documenter-dark .buttons:last-child{margin-bottom:-0.5rem}html.theme--documenter-dark .buttons:not(:last-child){margin-bottom:1rem}html.theme--documenter-dark .buttons.are-small .button:not(.is-normal):not(.is-medium):not(.is-large){font-size:.75rem}html.theme--documenter-dark .buttons.are-small .button:not(.is-normal):not(.is-medium):not(.is-large):not(.is-rounded){border-radius:3px}html.theme--documenter-dark .buttons.are-medium .button:not(.is-small):not(.is-normal):not(.is-large){font-size:1.25rem}html.theme--documenter-dark .buttons.are-large .button:not(.is-small):not(.is-normal):not(.is-medium){font-size:1.5rem}html.theme--documenter-dark .buttons.has-addons .button:not(:first-child){border-bottom-left-radius:0;border-top-left-radius:0}html.theme--documenter-dark .buttons.has-addons .button:not(:last-child){border-bottom-right-radius:0;border-top-right-radius:0;margin-right:-1px}html.theme--documenter-dark .buttons.has-addons .button:last-child{margin-right:0}html.theme--documenter-dark .buttons.has-addons .button:hover,html.theme--documenter-dark .buttons.has-addons .button.is-hovered{z-index:2}html.theme--documenter-dark .buttons.has-addons .button:focus,html.theme--documenter-dark .buttons.has-addons .button.is-focused,html.theme--documenter-dark .buttons.has-addons .button:active,html.theme--documenter-dark .buttons.has-addons .button.is-active,html.theme--documenter-dark .buttons.has-addons .button.is-selected{z-index:3}html.theme--documenter-dark .buttons.has-addons .button:focus:hover,html.theme--documenter-dark .buttons.has-addons .button.is-focused:hover,html.theme--documenter-dark .buttons.has-addons .button:active:hover,html.theme--documenter-dark .buttons.has-addons .button.is-active:hover,html.theme--documenter-dark .buttons.has-addons .button.is-selected:hover{z-index:4}html.theme--documenter-dark .buttons.has-addons .button.is-expanded{flex-grow:1;flex-shrink:1}html.theme--documenter-dark .buttons.is-centered{justify-content:center}html.theme--documenter-dark .buttons.is-centered:not(.has-addons) .button:not(.is-fullwidth){margin-left:0.25rem;margin-right:0.25rem}html.theme--documenter-dark .buttons.is-right{justify-content:flex-end}html.theme--documenter-dark .buttons.is-right:not(.has-addons) .button:not(.is-fullwidth){margin-left:0.25rem;margin-right:0.25rem}@media screen and (max-width: 768px){html.theme--documenter-dark .button.is-responsive.is-small,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.is-responsive{font-size:.5625rem}html.theme--documenter-dark .button.is-responsive,html.theme--documenter-dark .button.is-responsive.is-normal{font-size:.65625rem}html.theme--documenter-dark .button.is-responsive.is-medium{font-size:.75rem}html.theme--documenter-dark .button.is-responsive.is-large{font-size:1rem}}@media screen and (min-width: 769px) and (max-width: 1055px){html.theme--documenter-dark .button.is-responsive.is-small,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.is-responsive{font-size:.65625rem}html.theme--documenter-dark .button.is-responsive,html.theme--documenter-dark .button.is-responsive.is-normal{font-size:.75rem}html.theme--documenter-dark .button.is-responsive.is-medium{font-size:1rem}html.theme--documenter-dark .button.is-responsive.is-large{font-size:1.25rem}}html.theme--documenter-dark .container{flex-grow:1;margin:0 auto;position:relative;width:auto}html.theme--documenter-dark .container.is-fluid{max-width:none !important;padding-left:32px;padding-right:32px;width:100%}@media screen and (min-width: 1056px){html.theme--documenter-dark .container{max-width:992px}}@media screen and (max-width: 1215px){html.theme--documenter-dark .container.is-widescreen:not(.is-max-desktop){max-width:1152px}}@media screen and (max-width: 1407px){html.theme--documenter-dark .container.is-fullhd:not(.is-max-desktop):not(.is-max-widescreen){max-width:1344px}}@media screen and (min-width: 1216px){html.theme--documenter-dark .container:not(.is-max-desktop){max-width:1152px}}@media screen and (min-width: 1408px){html.theme--documenter-dark .container:not(.is-max-desktop):not(.is-max-widescreen){max-width:1344px}}html.theme--documenter-dark .content li+li{margin-top:0.25em}html.theme--documenter-dark .content p:not(:last-child),html.theme--documenter-dark .content dl:not(:last-child),html.theme--documenter-dark .content ol:not(:last-child),html.theme--documenter-dark .content ul:not(:last-child),html.theme--documenter-dark .content blockquote:not(:last-child),html.theme--documenter-dark .content pre:not(:last-child),html.theme--documenter-dark .content table:not(:last-child){margin-bottom:1em}html.theme--documenter-dark .content h1,html.theme--documenter-dark .content h2,html.theme--documenter-dark .content h3,html.theme--documenter-dark .content h4,html.theme--documenter-dark .content h5,html.theme--documenter-dark .content h6{color:#f2f2f2;font-weight:600;line-height:1.125}html.theme--documenter-dark .content h1{font-size:2em;margin-bottom:0.5em}html.theme--documenter-dark .content h1:not(:first-child){margin-top:1em}html.theme--documenter-dark .content h2{font-size:1.75em;margin-bottom:0.5714em}html.theme--documenter-dark .content h2:not(:first-child){margin-top:1.1428em}html.theme--documenter-dark .content h3{font-size:1.5em;margin-bottom:0.6666em}html.theme--documenter-dark .content h3:not(:first-child){margin-top:1.3333em}html.theme--documenter-dark .content h4{font-size:1.25em;margin-bottom:0.8em}html.theme--documenter-dark .content h5{font-size:1.125em;margin-bottom:0.8888em}html.theme--documenter-dark .content h6{font-size:1em;margin-bottom:1em}html.theme--documenter-dark .content blockquote{background-color:#282f2f;border-left:5px solid #5e6d6f;padding:1.25em 1.5em}html.theme--documenter-dark .content ol{list-style-position:outside;margin-left:2em;margin-top:1em}html.theme--documenter-dark .content ol:not([type]){list-style-type:decimal}html.theme--documenter-dark .content ol.is-lower-alpha:not([type]){list-style-type:lower-alpha}html.theme--documenter-dark .content ol.is-lower-roman:not([type]){list-style-type:lower-roman}html.theme--documenter-dark .content ol.is-upper-alpha:not([type]){list-style-type:upper-alpha}html.theme--documenter-dark .content ol.is-upper-roman:not([type]){list-style-type:upper-roman}html.theme--documenter-dark .content ul{list-style:disc outside;margin-left:2em;margin-top:1em}html.theme--documenter-dark .content ul ul{list-style-type:circle;margin-top:0.5em}html.theme--documenter-dark .content ul ul ul{list-style-type:square}html.theme--documenter-dark .content dd{margin-left:2em}html.theme--documenter-dark .content figure{margin-left:2em;margin-right:2em;text-align:center}html.theme--documenter-dark .content figure:not(:first-child){margin-top:2em}html.theme--documenter-dark .content figure:not(:last-child){margin-bottom:2em}html.theme--documenter-dark .content figure img{display:inline-block}html.theme--documenter-dark .content figure figcaption{font-style:italic}html.theme--documenter-dark .content pre{-webkit-overflow-scrolling:touch;overflow-x:auto;padding:0;white-space:pre;word-wrap:normal}html.theme--documenter-dark .content sup,html.theme--documenter-dark .content sub{font-size:75%}html.theme--documenter-dark .content table{width:100%}html.theme--documenter-dark .content table td,html.theme--documenter-dark .content table th{border:1px solid #5e6d6f;border-width:0 0 1px;padding:0.5em 0.75em;vertical-align:top}html.theme--documenter-dark .content table th{color:#f2f2f2}html.theme--documenter-dark .content table th:not([align]){text-align:inherit}html.theme--documenter-dark .content table thead td,html.theme--documenter-dark .content table thead th{border-width:0 0 2px;color:#f2f2f2}html.theme--documenter-dark .content table tfoot td,html.theme--documenter-dark .content table tfoot th{border-width:2px 0 0;color:#f2f2f2}html.theme--documenter-dark .content table tbody tr:last-child td,html.theme--documenter-dark .content table tbody tr:last-child th{border-bottom-width:0}html.theme--documenter-dark .content .tabs li+li{margin-top:0}html.theme--documenter-dark .content.is-small,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.content{font-size:.75rem}html.theme--documenter-dark .content.is-normal{font-size:1rem}html.theme--documenter-dark .content.is-medium{font-size:1.25rem}html.theme--documenter-dark .content.is-large{font-size:1.5rem}html.theme--documenter-dark .icon{align-items:center;display:inline-flex;justify-content:center;height:1.5rem;width:1.5rem}html.theme--documenter-dark .icon.is-small,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.icon{height:1rem;width:1rem}html.theme--documenter-dark .icon.is-medium{height:2rem;width:2rem}html.theme--documenter-dark .icon.is-large{height:3rem;width:3rem}html.theme--documenter-dark .icon-text{align-items:flex-start;color:inherit;display:inline-flex;flex-wrap:wrap;line-height:1.5rem;vertical-align:top}html.theme--documenter-dark .icon-text .icon{flex-grow:0;flex-shrink:0}html.theme--documenter-dark .icon-text .icon:not(:last-child){margin-right:.25em}html.theme--documenter-dark .icon-text .icon:not(:first-child){margin-left:.25em}html.theme--documenter-dark div.icon-text{display:flex}html.theme--documenter-dark .image,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img{display:block;position:relative}html.theme--documenter-dark .image img,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img img{display:block;height:auto;width:100%}html.theme--documenter-dark .image img.is-rounded,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img img.is-rounded{border-radius:9999px}html.theme--documenter-dark .image.is-fullwidth,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-fullwidth{width:100%}html.theme--documenter-dark .image.is-square img,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-square img,html.theme--documenter-dark .image.is-square .has-ratio,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-square .has-ratio,html.theme--documenter-dark .image.is-1by1 img,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-1by1 img,html.theme--documenter-dark .image.is-1by1 .has-ratio,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-1by1 .has-ratio,html.theme--documenter-dark .image.is-5by4 img,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-5by4 img,html.theme--documenter-dark .image.is-5by4 .has-ratio,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-5by4 .has-ratio,html.theme--documenter-dark .image.is-4by3 img,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-4by3 img,html.theme--documenter-dark .image.is-4by3 .has-ratio,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-4by3 .has-ratio,html.theme--documenter-dark .image.is-3by2 img,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-3by2 img,html.theme--documenter-dark .image.is-3by2 .has-ratio,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-3by2 .has-ratio,html.theme--documenter-dark .image.is-5by3 img,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-5by3 img,html.theme--documenter-dark .image.is-5by3 .has-ratio,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-5by3 .has-ratio,html.theme--documenter-dark .image.is-16by9 img,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-16by9 img,html.theme--documenter-dark .image.is-16by9 .has-ratio,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-16by9 .has-ratio,html.theme--documenter-dark .image.is-2by1 img,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-2by1 img,html.theme--documenter-dark .image.is-2by1 .has-ratio,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-2by1 .has-ratio,html.theme--documenter-dark .image.is-3by1 img,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-3by1 img,html.theme--documenter-dark .image.is-3by1 .has-ratio,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-3by1 .has-ratio,html.theme--documenter-dark .image.is-4by5 img,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-4by5 img,html.theme--documenter-dark .image.is-4by5 .has-ratio,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-4by5 .has-ratio,html.theme--documenter-dark .image.is-3by4 img,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-3by4 img,html.theme--documenter-dark .image.is-3by4 .has-ratio,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-3by4 .has-ratio,html.theme--documenter-dark .image.is-2by3 img,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-2by3 img,html.theme--documenter-dark .image.is-2by3 .has-ratio,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-2by3 .has-ratio,html.theme--documenter-dark .image.is-3by5 img,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-3by5 img,html.theme--documenter-dark .image.is-3by5 .has-ratio,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-3by5 .has-ratio,html.theme--documenter-dark .image.is-9by16 img,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-9by16 img,html.theme--documenter-dark .image.is-9by16 .has-ratio,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-9by16 .has-ratio,html.theme--documenter-dark .image.is-1by2 img,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-1by2 img,html.theme--documenter-dark .image.is-1by2 .has-ratio,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-1by2 .has-ratio,html.theme--documenter-dark .image.is-1by3 img,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-1by3 img,html.theme--documenter-dark .image.is-1by3 .has-ratio,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-1by3 .has-ratio{height:100%;width:100%}html.theme--documenter-dark .image.is-square,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-square,html.theme--documenter-dark .image.is-1by1,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-1by1{padding-top:100%}html.theme--documenter-dark .image.is-5by4,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-5by4{padding-top:80%}html.theme--documenter-dark .image.is-4by3,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-4by3{padding-top:75%}html.theme--documenter-dark .image.is-3by2,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-3by2{padding-top:66.6666%}html.theme--documenter-dark .image.is-5by3,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-5by3{padding-top:60%}html.theme--documenter-dark .image.is-16by9,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-16by9{padding-top:56.25%}html.theme--documenter-dark .image.is-2by1,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-2by1{padding-top:50%}html.theme--documenter-dark .image.is-3by1,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-3by1{padding-top:33.3333%}html.theme--documenter-dark .image.is-4by5,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-4by5{padding-top:125%}html.theme--documenter-dark .image.is-3by4,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-3by4{padding-top:133.3333%}html.theme--documenter-dark .image.is-2by3,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-2by3{padding-top:150%}html.theme--documenter-dark .image.is-3by5,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-3by5{padding-top:166.6666%}html.theme--documenter-dark .image.is-9by16,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-9by16{padding-top:177.7777%}html.theme--documenter-dark .image.is-1by2,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-1by2{padding-top:200%}html.theme--documenter-dark .image.is-1by3,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-1by3{padding-top:300%}html.theme--documenter-dark .image.is-16x16,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-16x16{height:16px;width:16px}html.theme--documenter-dark .image.is-24x24,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-24x24{height:24px;width:24px}html.theme--documenter-dark .image.is-32x32,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-32x32{height:32px;width:32px}html.theme--documenter-dark .image.is-48x48,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-48x48{height:48px;width:48px}html.theme--documenter-dark .image.is-64x64,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-64x64{height:64px;width:64px}html.theme--documenter-dark .image.is-96x96,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-96x96{height:96px;width:96px}html.theme--documenter-dark .image.is-128x128,html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img.is-128x128{height:128px;width:128px}html.theme--documenter-dark .notification{background-color:#282f2f;border-radius:.4em;position:relative;padding:1.25rem 2.5rem 1.25rem 1.5rem}html.theme--documenter-dark .notification a:not(.button):not(.dropdown-item){color:currentColor;text-decoration:underline}html.theme--documenter-dark .notification strong{color:currentColor}html.theme--documenter-dark .notification code,html.theme--documenter-dark .notification pre{background:#fff}html.theme--documenter-dark .notification pre code{background:transparent}html.theme--documenter-dark .notification>.delete{right:.5rem;position:absolute;top:0.5rem}html.theme--documenter-dark .notification .title,html.theme--documenter-dark .notification .subtitle,html.theme--documenter-dark .notification .content{color:currentColor}html.theme--documenter-dark .notification.is-white{background-color:#fff;color:#0a0a0a}html.theme--documenter-dark .notification.is-black{background-color:#0a0a0a;color:#fff}html.theme--documenter-dark .notification.is-light{background-color:#ecf0f1;color:rgba(0,0,0,0.7)}html.theme--documenter-dark .notification.is-dark,html.theme--documenter-dark .content kbd.notification{background-color:#282f2f;color:#fff}html.theme--documenter-dark .notification.is-primary,html.theme--documenter-dark .docstring>section>a.notification.docs-sourcelink{background-color:#375a7f;color:#fff}html.theme--documenter-dark .notification.is-primary.is-light,html.theme--documenter-dark .docstring>section>a.notification.is-light.docs-sourcelink{background-color:#f1f5f9;color:#4d7eb2}html.theme--documenter-dark .notification.is-link{background-color:#1abc9c;color:#fff}html.theme--documenter-dark .notification.is-link.is-light{background-color:#edfdf9;color:#15987e}html.theme--documenter-dark .notification.is-info{background-color:#024c7d;color:#fff}html.theme--documenter-dark .notification.is-info.is-light{background-color:#ebf7ff;color:#0e9dfb}html.theme--documenter-dark .notification.is-success{background-color:#008438;color:#fff}html.theme--documenter-dark .notification.is-success.is-light{background-color:#ebfff3;color:#00eb64}html.theme--documenter-dark .notification.is-warning{background-color:#ad8100;color:#fff}html.theme--documenter-dark .notification.is-warning.is-light{background-color:#fffaeb;color:#d19c00}html.theme--documenter-dark .notification.is-danger{background-color:#9e1b0d;color:#fff}html.theme--documenter-dark .notification.is-danger.is-light{background-color:#fdeeec;color:#ec311d}html.theme--documenter-dark .progress{-moz-appearance:none;-webkit-appearance:none;border:none;border-radius:9999px;display:block;height:1rem;overflow:hidden;padding:0;width:100%}html.theme--documenter-dark .progress::-webkit-progress-bar{background-color:#343c3d}html.theme--documenter-dark .progress::-webkit-progress-value{background-color:#dbdee0}html.theme--documenter-dark .progress::-moz-progress-bar{background-color:#dbdee0}html.theme--documenter-dark .progress::-ms-fill{background-color:#dbdee0;border:none}html.theme--documenter-dark .progress.is-white::-webkit-progress-value{background-color:#fff}html.theme--documenter-dark .progress.is-white::-moz-progress-bar{background-color:#fff}html.theme--documenter-dark .progress.is-white::-ms-fill{background-color:#fff}html.theme--documenter-dark .progress.is-white:indeterminate{background-image:linear-gradient(to right, #fff 30%, #343c3d 30%)}html.theme--documenter-dark .progress.is-black::-webkit-progress-value{background-color:#0a0a0a}html.theme--documenter-dark .progress.is-black::-moz-progress-bar{background-color:#0a0a0a}html.theme--documenter-dark .progress.is-black::-ms-fill{background-color:#0a0a0a}html.theme--documenter-dark .progress.is-black:indeterminate{background-image:linear-gradient(to right, #0a0a0a 30%, #343c3d 30%)}html.theme--documenter-dark .progress.is-light::-webkit-progress-value{background-color:#ecf0f1}html.theme--documenter-dark .progress.is-light::-moz-progress-bar{background-color:#ecf0f1}html.theme--documenter-dark .progress.is-light::-ms-fill{background-color:#ecf0f1}html.theme--documenter-dark .progress.is-light:indeterminate{background-image:linear-gradient(to right, #ecf0f1 30%, #343c3d 30%)}html.theme--documenter-dark .progress.is-dark::-webkit-progress-value,html.theme--documenter-dark .content kbd.progress::-webkit-progress-value{background-color:#282f2f}html.theme--documenter-dark .progress.is-dark::-moz-progress-bar,html.theme--documenter-dark .content kbd.progress::-moz-progress-bar{background-color:#282f2f}html.theme--documenter-dark .progress.is-dark::-ms-fill,html.theme--documenter-dark .content kbd.progress::-ms-fill{background-color:#282f2f}html.theme--documenter-dark .progress.is-dark:indeterminate,html.theme--documenter-dark .content kbd.progress:indeterminate{background-image:linear-gradient(to right, #282f2f 30%, #343c3d 30%)}html.theme--documenter-dark .progress.is-primary::-webkit-progress-value,html.theme--documenter-dark .docstring>section>a.progress.docs-sourcelink::-webkit-progress-value{background-color:#375a7f}html.theme--documenter-dark .progress.is-primary::-moz-progress-bar,html.theme--documenter-dark .docstring>section>a.progress.docs-sourcelink::-moz-progress-bar{background-color:#375a7f}html.theme--documenter-dark .progress.is-primary::-ms-fill,html.theme--documenter-dark .docstring>section>a.progress.docs-sourcelink::-ms-fill{background-color:#375a7f}html.theme--documenter-dark .progress.is-primary:indeterminate,html.theme--documenter-dark .docstring>section>a.progress.docs-sourcelink:indeterminate{background-image:linear-gradient(to right, #375a7f 30%, #343c3d 30%)}html.theme--documenter-dark .progress.is-link::-webkit-progress-value{background-color:#1abc9c}html.theme--documenter-dark .progress.is-link::-moz-progress-bar{background-color:#1abc9c}html.theme--documenter-dark .progress.is-link::-ms-fill{background-color:#1abc9c}html.theme--documenter-dark .progress.is-link:indeterminate{background-image:linear-gradient(to right, #1abc9c 30%, #343c3d 30%)}html.theme--documenter-dark .progress.is-info::-webkit-progress-value{background-color:#024c7d}html.theme--documenter-dark .progress.is-info::-moz-progress-bar{background-color:#024c7d}html.theme--documenter-dark .progress.is-info::-ms-fill{background-color:#024c7d}html.theme--documenter-dark .progress.is-info:indeterminate{background-image:linear-gradient(to right, #024c7d 30%, #343c3d 30%)}html.theme--documenter-dark .progress.is-success::-webkit-progress-value{background-color:#008438}html.theme--documenter-dark .progress.is-success::-moz-progress-bar{background-color:#008438}html.theme--documenter-dark .progress.is-success::-ms-fill{background-color:#008438}html.theme--documenter-dark .progress.is-success:indeterminate{background-image:linear-gradient(to right, #008438 30%, #343c3d 30%)}html.theme--documenter-dark .progress.is-warning::-webkit-progress-value{background-color:#ad8100}html.theme--documenter-dark .progress.is-warning::-moz-progress-bar{background-color:#ad8100}html.theme--documenter-dark .progress.is-warning::-ms-fill{background-color:#ad8100}html.theme--documenter-dark .progress.is-warning:indeterminate{background-image:linear-gradient(to right, #ad8100 30%, #343c3d 30%)}html.theme--documenter-dark .progress.is-danger::-webkit-progress-value{background-color:#9e1b0d}html.theme--documenter-dark .progress.is-danger::-moz-progress-bar{background-color:#9e1b0d}html.theme--documenter-dark .progress.is-danger::-ms-fill{background-color:#9e1b0d}html.theme--documenter-dark .progress.is-danger:indeterminate{background-image:linear-gradient(to right, #9e1b0d 30%, #343c3d 30%)}html.theme--documenter-dark .progress:indeterminate{animation-duration:1.5s;animation-iteration-count:infinite;animation-name:moveIndeterminate;animation-timing-function:linear;background-color:#343c3d;background-image:linear-gradient(to right, #fff 30%, #343c3d 30%);background-position:top left;background-repeat:no-repeat;background-size:150% 150%}html.theme--documenter-dark .progress:indeterminate::-webkit-progress-bar{background-color:transparent}html.theme--documenter-dark .progress:indeterminate::-moz-progress-bar{background-color:transparent}html.theme--documenter-dark .progress:indeterminate::-ms-fill{animation-name:none}html.theme--documenter-dark .progress.is-small,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.progress{height:.75rem}html.theme--documenter-dark .progress.is-medium{height:1.25rem}html.theme--documenter-dark .progress.is-large{height:1.5rem}@keyframes moveIndeterminate{from{background-position:200% 0}to{background-position:-200% 0}}html.theme--documenter-dark .table{background-color:#343c3d;color:#fff}html.theme--documenter-dark .table td,html.theme--documenter-dark .table th{border:1px solid #5e6d6f;border-width:0 0 1px;padding:0.5em 0.75em;vertical-align:top}html.theme--documenter-dark .table td.is-white,html.theme--documenter-dark .table th.is-white{background-color:#fff;border-color:#fff;color:#0a0a0a}html.theme--documenter-dark .table td.is-black,html.theme--documenter-dark .table th.is-black{background-color:#0a0a0a;border-color:#0a0a0a;color:#fff}html.theme--documenter-dark .table td.is-light,html.theme--documenter-dark .table th.is-light{background-color:#ecf0f1;border-color:#ecf0f1;color:rgba(0,0,0,0.7)}html.theme--documenter-dark .table td.is-dark,html.theme--documenter-dark .table th.is-dark{background-color:#282f2f;border-color:#282f2f;color:#fff}html.theme--documenter-dark .table td.is-primary,html.theme--documenter-dark .table th.is-primary{background-color:#375a7f;border-color:#375a7f;color:#fff}html.theme--documenter-dark .table td.is-link,html.theme--documenter-dark .table th.is-link{background-color:#1abc9c;border-color:#1abc9c;color:#fff}html.theme--documenter-dark .table td.is-info,html.theme--documenter-dark .table th.is-info{background-color:#024c7d;border-color:#024c7d;color:#fff}html.theme--documenter-dark .table td.is-success,html.theme--documenter-dark .table th.is-success{background-color:#008438;border-color:#008438;color:#fff}html.theme--documenter-dark .table td.is-warning,html.theme--documenter-dark .table th.is-warning{background-color:#ad8100;border-color:#ad8100;color:#fff}html.theme--documenter-dark .table td.is-danger,html.theme--documenter-dark .table th.is-danger{background-color:#9e1b0d;border-color:#9e1b0d;color:#fff}html.theme--documenter-dark .table td.is-narrow,html.theme--documenter-dark .table th.is-narrow{white-space:nowrap;width:1%}html.theme--documenter-dark .table td.is-selected,html.theme--documenter-dark .table th.is-selected{background-color:#375a7f;color:#fff}html.theme--documenter-dark .table td.is-selected a,html.theme--documenter-dark .table td.is-selected strong,html.theme--documenter-dark .table th.is-selected a,html.theme--documenter-dark .table th.is-selected strong{color:currentColor}html.theme--documenter-dark .table td.is-vcentered,html.theme--documenter-dark .table th.is-vcentered{vertical-align:middle}html.theme--documenter-dark .table th{color:#f2f2f2}html.theme--documenter-dark .table th:not([align]){text-align:left}html.theme--documenter-dark .table tr.is-selected{background-color:#375a7f;color:#fff}html.theme--documenter-dark .table tr.is-selected a,html.theme--documenter-dark .table tr.is-selected strong{color:currentColor}html.theme--documenter-dark .table tr.is-selected td,html.theme--documenter-dark .table tr.is-selected th{border-color:#fff;color:currentColor}html.theme--documenter-dark .table thead{background-color:rgba(0,0,0,0)}html.theme--documenter-dark .table thead td,html.theme--documenter-dark .table thead th{border-width:0 0 2px;color:#f2f2f2}html.theme--documenter-dark .table tfoot{background-color:rgba(0,0,0,0)}html.theme--documenter-dark .table tfoot td,html.theme--documenter-dark .table tfoot th{border-width:2px 0 0;color:#f2f2f2}html.theme--documenter-dark .table tbody{background-color:rgba(0,0,0,0)}html.theme--documenter-dark .table tbody tr:last-child td,html.theme--documenter-dark .table tbody tr:last-child th{border-bottom-width:0}html.theme--documenter-dark .table.is-bordered td,html.theme--documenter-dark .table.is-bordered th{border-width:1px}html.theme--documenter-dark .table.is-bordered tr:last-child td,html.theme--documenter-dark .table.is-bordered tr:last-child th{border-bottom-width:1px}html.theme--documenter-dark .table.is-fullwidth{width:100%}html.theme--documenter-dark .table.is-hoverable tbody tr:not(.is-selected):hover{background-color:#282f2f}html.theme--documenter-dark .table.is-hoverable.is-striped tbody tr:not(.is-selected):hover{background-color:#282f2f}html.theme--documenter-dark .table.is-hoverable.is-striped tbody tr:not(.is-selected):hover:nth-child(even){background-color:#2d3435}html.theme--documenter-dark .table.is-narrow td,html.theme--documenter-dark .table.is-narrow th{padding:0.25em 0.5em}html.theme--documenter-dark .table.is-striped tbody tr:not(.is-selected):nth-child(even){background-color:#282f2f}html.theme--documenter-dark .table-container{-webkit-overflow-scrolling:touch;overflow:auto;overflow-y:hidden;max-width:100%}html.theme--documenter-dark .tags{align-items:center;display:flex;flex-wrap:wrap;justify-content:flex-start}html.theme--documenter-dark .tags .tag,html.theme--documenter-dark .tags .content kbd,html.theme--documenter-dark .content .tags kbd,html.theme--documenter-dark .tags .docstring>section>a.docs-sourcelink{margin-bottom:0.5rem}html.theme--documenter-dark .tags .tag:not(:last-child),html.theme--documenter-dark .tags .content kbd:not(:last-child),html.theme--documenter-dark .content .tags kbd:not(:last-child),html.theme--documenter-dark .tags .docstring>section>a.docs-sourcelink:not(:last-child){margin-right:.5rem}html.theme--documenter-dark .tags:last-child{margin-bottom:-0.5rem}html.theme--documenter-dark .tags:not(:last-child){margin-bottom:1rem}html.theme--documenter-dark .tags.are-medium .tag:not(.is-normal):not(.is-large),html.theme--documenter-dark .tags.are-medium .content kbd:not(.is-normal):not(.is-large),html.theme--documenter-dark .content .tags.are-medium kbd:not(.is-normal):not(.is-large),html.theme--documenter-dark .tags.are-medium .docstring>section>a.docs-sourcelink:not(.is-normal):not(.is-large){font-size:1rem}html.theme--documenter-dark .tags.are-large .tag:not(.is-normal):not(.is-medium),html.theme--documenter-dark .tags.are-large .content kbd:not(.is-normal):not(.is-medium),html.theme--documenter-dark .content .tags.are-large kbd:not(.is-normal):not(.is-medium),html.theme--documenter-dark .tags.are-large .docstring>section>a.docs-sourcelink:not(.is-normal):not(.is-medium){font-size:1.25rem}html.theme--documenter-dark .tags.is-centered{justify-content:center}html.theme--documenter-dark .tags.is-centered .tag,html.theme--documenter-dark .tags.is-centered .content kbd,html.theme--documenter-dark .content .tags.is-centered kbd,html.theme--documenter-dark .tags.is-centered .docstring>section>a.docs-sourcelink{margin-right:0.25rem;margin-left:0.25rem}html.theme--documenter-dark .tags.is-right{justify-content:flex-end}html.theme--documenter-dark .tags.is-right .tag:not(:first-child),html.theme--documenter-dark .tags.is-right .content kbd:not(:first-child),html.theme--documenter-dark .content .tags.is-right kbd:not(:first-child),html.theme--documenter-dark .tags.is-right .docstring>section>a.docs-sourcelink:not(:first-child){margin-left:0.5rem}html.theme--documenter-dark .tags.is-right .tag:not(:last-child),html.theme--documenter-dark .tags.is-right .content kbd:not(:last-child),html.theme--documenter-dark .content .tags.is-right kbd:not(:last-child),html.theme--documenter-dark .tags.is-right .docstring>section>a.docs-sourcelink:not(:last-child){margin-right:0}html.theme--documenter-dark .tags.has-addons .tag,html.theme--documenter-dark .tags.has-addons .content kbd,html.theme--documenter-dark .content .tags.has-addons kbd,html.theme--documenter-dark .tags.has-addons .docstring>section>a.docs-sourcelink{margin-right:0}html.theme--documenter-dark .tags.has-addons .tag:not(:first-child),html.theme--documenter-dark .tags.has-addons .content kbd:not(:first-child),html.theme--documenter-dark .content .tags.has-addons kbd:not(:first-child),html.theme--documenter-dark .tags.has-addons .docstring>section>a.docs-sourcelink:not(:first-child){margin-left:0;border-top-left-radius:0;border-bottom-left-radius:0}html.theme--documenter-dark .tags.has-addons .tag:not(:last-child),html.theme--documenter-dark .tags.has-addons .content kbd:not(:last-child),html.theme--documenter-dark .content .tags.has-addons kbd:not(:last-child),html.theme--documenter-dark .tags.has-addons .docstring>section>a.docs-sourcelink:not(:last-child){border-top-right-radius:0;border-bottom-right-radius:0}html.theme--documenter-dark .tag:not(body),html.theme--documenter-dark .content kbd:not(body),html.theme--documenter-dark .docstring>section>a.docs-sourcelink:not(body){align-items:center;background-color:#282f2f;border-radius:.4em;color:#fff;display:inline-flex;font-size:.75rem;height:2em;justify-content:center;line-height:1.5;padding-left:0.75em;padding-right:0.75em;white-space:nowrap}html.theme--documenter-dark .tag:not(body) .delete,html.theme--documenter-dark .content kbd:not(body) .delete,html.theme--documenter-dark .docstring>section>a.docs-sourcelink:not(body) .delete{margin-left:.25rem;margin-right:-.375rem}html.theme--documenter-dark .tag.is-white:not(body),html.theme--documenter-dark .content kbd.is-white:not(body),html.theme--documenter-dark .docstring>section>a.docs-sourcelink.is-white:not(body){background-color:#fff;color:#0a0a0a}html.theme--documenter-dark .tag.is-black:not(body),html.theme--documenter-dark .content kbd.is-black:not(body),html.theme--documenter-dark .docstring>section>a.docs-sourcelink.is-black:not(body){background-color:#0a0a0a;color:#fff}html.theme--documenter-dark .tag.is-light:not(body),html.theme--documenter-dark .content kbd.is-light:not(body),html.theme--documenter-dark .docstring>section>a.docs-sourcelink.is-light:not(body){background-color:#ecf0f1;color:rgba(0,0,0,0.7)}html.theme--documenter-dark .tag.is-dark:not(body),html.theme--documenter-dark .content kbd:not(body),html.theme--documenter-dark .docstring>section>a.docs-sourcelink.is-dark:not(body),html.theme--documenter-dark .content .docstring>section>kbd:not(body){background-color:#282f2f;color:#fff}html.theme--documenter-dark .tag.is-primary:not(body),html.theme--documenter-dark .content kbd.is-primary:not(body),html.theme--documenter-dark .docstring>section>a.docs-sourcelink:not(body){background-color:#375a7f;color:#fff}html.theme--documenter-dark .tag.is-primary.is-light:not(body),html.theme--documenter-dark .content kbd.is-primary.is-light:not(body),html.theme--documenter-dark .docstring>section>a.docs-sourcelink.is-light:not(body){background-color:#f1f5f9;color:#4d7eb2}html.theme--documenter-dark .tag.is-link:not(body),html.theme--documenter-dark .content kbd.is-link:not(body),html.theme--documenter-dark .docstring>section>a.docs-sourcelink.is-link:not(body){background-color:#1abc9c;color:#fff}html.theme--documenter-dark .tag.is-link.is-light:not(body),html.theme--documenter-dark .content kbd.is-link.is-light:not(body),html.theme--documenter-dark .docstring>section>a.docs-sourcelink.is-link.is-light:not(body){background-color:#edfdf9;color:#15987e}html.theme--documenter-dark .tag.is-info:not(body),html.theme--documenter-dark .content kbd.is-info:not(body),html.theme--documenter-dark .docstring>section>a.docs-sourcelink.is-info:not(body){background-color:#024c7d;color:#fff}html.theme--documenter-dark .tag.is-info.is-light:not(body),html.theme--documenter-dark .content kbd.is-info.is-light:not(body),html.theme--documenter-dark .docstring>section>a.docs-sourcelink.is-info.is-light:not(body){background-color:#ebf7ff;color:#0e9dfb}html.theme--documenter-dark .tag.is-success:not(body),html.theme--documenter-dark .content kbd.is-success:not(body),html.theme--documenter-dark .docstring>section>a.docs-sourcelink.is-success:not(body){background-color:#008438;color:#fff}html.theme--documenter-dark .tag.is-success.is-light:not(body),html.theme--documenter-dark .content kbd.is-success.is-light:not(body),html.theme--documenter-dark .docstring>section>a.docs-sourcelink.is-success.is-light:not(body){background-color:#ebfff3;color:#00eb64}html.theme--documenter-dark .tag.is-warning:not(body),html.theme--documenter-dark .content kbd.is-warning:not(body),html.theme--documenter-dark .docstring>section>a.docs-sourcelink.is-warning:not(body){background-color:#ad8100;color:#fff}html.theme--documenter-dark .tag.is-warning.is-light:not(body),html.theme--documenter-dark .content kbd.is-warning.is-light:not(body),html.theme--documenter-dark .docstring>section>a.docs-sourcelink.is-warning.is-light:not(body){background-color:#fffaeb;color:#d19c00}html.theme--documenter-dark .tag.is-danger:not(body),html.theme--documenter-dark .content kbd.is-danger:not(body),html.theme--documenter-dark .docstring>section>a.docs-sourcelink.is-danger:not(body){background-color:#9e1b0d;color:#fff}html.theme--documenter-dark .tag.is-danger.is-light:not(body),html.theme--documenter-dark .content kbd.is-danger.is-light:not(body),html.theme--documenter-dark .docstring>section>a.docs-sourcelink.is-danger.is-light:not(body){background-color:#fdeeec;color:#ec311d}html.theme--documenter-dark .tag.is-normal:not(body),html.theme--documenter-dark .content kbd.is-normal:not(body),html.theme--documenter-dark .docstring>section>a.docs-sourcelink.is-normal:not(body){font-size:.75rem}html.theme--documenter-dark .tag.is-medium:not(body),html.theme--documenter-dark .content kbd.is-medium:not(body),html.theme--documenter-dark .docstring>section>a.docs-sourcelink.is-medium:not(body){font-size:1rem}html.theme--documenter-dark .tag.is-large:not(body),html.theme--documenter-dark .content kbd.is-large:not(body),html.theme--documenter-dark .docstring>section>a.docs-sourcelink.is-large:not(body){font-size:1.25rem}html.theme--documenter-dark .tag:not(body) .icon:first-child:not(:last-child),html.theme--documenter-dark .content kbd:not(body) .icon:first-child:not(:last-child),html.theme--documenter-dark .docstring>section>a.docs-sourcelink:not(body) .icon:first-child:not(:last-child){margin-left:-.375em;margin-right:.1875em}html.theme--documenter-dark .tag:not(body) .icon:last-child:not(:first-child),html.theme--documenter-dark .content kbd:not(body) .icon:last-child:not(:first-child),html.theme--documenter-dark .docstring>section>a.docs-sourcelink:not(body) .icon:last-child:not(:first-child){margin-left:.1875em;margin-right:-.375em}html.theme--documenter-dark .tag:not(body) .icon:first-child:last-child,html.theme--documenter-dark .content kbd:not(body) .icon:first-child:last-child,html.theme--documenter-dark .docstring>section>a.docs-sourcelink:not(body) .icon:first-child:last-child{margin-left:-.375em;margin-right:-.375em}html.theme--documenter-dark .tag.is-delete:not(body),html.theme--documenter-dark .content kbd.is-delete:not(body),html.theme--documenter-dark .docstring>section>a.docs-sourcelink.is-delete:not(body){margin-left:1px;padding:0;position:relative;width:2em}html.theme--documenter-dark .tag.is-delete:not(body)::before,html.theme--documenter-dark .content kbd.is-delete:not(body)::before,html.theme--documenter-dark .docstring>section>a.docs-sourcelink.is-delete:not(body)::before,html.theme--documenter-dark .tag.is-delete:not(body)::after,html.theme--documenter-dark .content kbd.is-delete:not(body)::after,html.theme--documenter-dark .docstring>section>a.docs-sourcelink.is-delete:not(body)::after{background-color:currentColor;content:"";display:block;left:50%;position:absolute;top:50%;transform:translateX(-50%) translateY(-50%) rotate(45deg);transform-origin:center center}html.theme--documenter-dark .tag.is-delete:not(body)::before,html.theme--documenter-dark .content kbd.is-delete:not(body)::before,html.theme--documenter-dark .docstring>section>a.docs-sourcelink.is-delete:not(body)::before{height:1px;width:50%}html.theme--documenter-dark .tag.is-delete:not(body)::after,html.theme--documenter-dark .content kbd.is-delete:not(body)::after,html.theme--documenter-dark .docstring>section>a.docs-sourcelink.is-delete:not(body)::after{height:50%;width:1px}html.theme--documenter-dark .tag.is-delete:not(body):hover,html.theme--documenter-dark .content kbd.is-delete:not(body):hover,html.theme--documenter-dark .docstring>section>a.docs-sourcelink.is-delete:not(body):hover,html.theme--documenter-dark .tag.is-delete:not(body):focus,html.theme--documenter-dark .content kbd.is-delete:not(body):focus,html.theme--documenter-dark .docstring>section>a.docs-sourcelink.is-delete:not(body):focus{background-color:#1d2122}html.theme--documenter-dark .tag.is-delete:not(body):active,html.theme--documenter-dark .content kbd.is-delete:not(body):active,html.theme--documenter-dark .docstring>section>a.docs-sourcelink.is-delete:not(body):active{background-color:#111414}html.theme--documenter-dark .tag.is-rounded:not(body),html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input:not(body),html.theme--documenter-dark .content kbd.is-rounded:not(body),html.theme--documenter-dark #documenter .docs-sidebar .content form.docs-search>input:not(body),html.theme--documenter-dark .docstring>section>a.docs-sourcelink.is-rounded:not(body){border-radius:9999px}html.theme--documenter-dark a.tag:hover,html.theme--documenter-dark .docstring>section>a.docs-sourcelink:hover{text-decoration:underline}html.theme--documenter-dark .title,html.theme--documenter-dark .subtitle{word-break:break-word}html.theme--documenter-dark .title em,html.theme--documenter-dark .title span,html.theme--documenter-dark .subtitle em,html.theme--documenter-dark .subtitle span{font-weight:inherit}html.theme--documenter-dark .title sub,html.theme--documenter-dark .subtitle sub{font-size:.75em}html.theme--documenter-dark .title sup,html.theme--documenter-dark .subtitle sup{font-size:.75em}html.theme--documenter-dark .title .tag,html.theme--documenter-dark .title .content kbd,html.theme--documenter-dark .content .title kbd,html.theme--documenter-dark .title .docstring>section>a.docs-sourcelink,html.theme--documenter-dark .subtitle .tag,html.theme--documenter-dark .subtitle .content kbd,html.theme--documenter-dark .content .subtitle kbd,html.theme--documenter-dark .subtitle .docstring>section>a.docs-sourcelink{vertical-align:middle}html.theme--documenter-dark .title{color:#fff;font-size:2rem;font-weight:500;line-height:1.125}html.theme--documenter-dark .title strong{color:inherit;font-weight:inherit}html.theme--documenter-dark .title:not(.is-spaced)+.subtitle{margin-top:-1.25rem}html.theme--documenter-dark .title.is-1{font-size:3rem}html.theme--documenter-dark .title.is-2{font-size:2.5rem}html.theme--documenter-dark .title.is-3{font-size:2rem}html.theme--documenter-dark .title.is-4{font-size:1.5rem}html.theme--documenter-dark .title.is-5{font-size:1.25rem}html.theme--documenter-dark .title.is-6{font-size:1rem}html.theme--documenter-dark .title.is-7{font-size:.75rem}html.theme--documenter-dark .subtitle{color:#8c9b9d;font-size:1.25rem;font-weight:400;line-height:1.25}html.theme--documenter-dark .subtitle strong{color:#8c9b9d;font-weight:600}html.theme--documenter-dark .subtitle:not(.is-spaced)+.title{margin-top:-1.25rem}html.theme--documenter-dark .subtitle.is-1{font-size:3rem}html.theme--documenter-dark .subtitle.is-2{font-size:2.5rem}html.theme--documenter-dark .subtitle.is-3{font-size:2rem}html.theme--documenter-dark .subtitle.is-4{font-size:1.5rem}html.theme--documenter-dark .subtitle.is-5{font-size:1.25rem}html.theme--documenter-dark .subtitle.is-6{font-size:1rem}html.theme--documenter-dark .subtitle.is-7{font-size:.75rem}html.theme--documenter-dark .heading{display:block;font-size:11px;letter-spacing:1px;margin-bottom:5px;text-transform:uppercase}html.theme--documenter-dark .number{align-items:center;background-color:#282f2f;border-radius:9999px;display:inline-flex;font-size:1.25rem;height:2em;justify-content:center;margin-right:1.5rem;min-width:2.5em;padding:0.25rem 0.5rem;text-align:center;vertical-align:top}html.theme--documenter-dark .select select,html.theme--documenter-dark .textarea,html.theme--documenter-dark .input,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input{background-color:#1f2424;border-color:#5e6d6f;border-radius:.4em;color:#dbdee0}html.theme--documenter-dark .select select::-moz-placeholder,html.theme--documenter-dark .textarea::-moz-placeholder,html.theme--documenter-dark .input::-moz-placeholder,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input::-moz-placeholder{color:#868c98}html.theme--documenter-dark .select select::-webkit-input-placeholder,html.theme--documenter-dark .textarea::-webkit-input-placeholder,html.theme--documenter-dark .input::-webkit-input-placeholder,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input::-webkit-input-placeholder{color:#868c98}html.theme--documenter-dark .select select:-moz-placeholder,html.theme--documenter-dark .textarea:-moz-placeholder,html.theme--documenter-dark .input:-moz-placeholder,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input:-moz-placeholder{color:#868c98}html.theme--documenter-dark .select select:-ms-input-placeholder,html.theme--documenter-dark .textarea:-ms-input-placeholder,html.theme--documenter-dark .input:-ms-input-placeholder,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input:-ms-input-placeholder{color:#868c98}html.theme--documenter-dark .select select:hover,html.theme--documenter-dark .textarea:hover,html.theme--documenter-dark .input:hover,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input:hover,html.theme--documenter-dark .select select.is-hovered,html.theme--documenter-dark .is-hovered.textarea,html.theme--documenter-dark .is-hovered.input,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.is-hovered{border-color:#8c9b9d}html.theme--documenter-dark .select select:focus,html.theme--documenter-dark .textarea:focus,html.theme--documenter-dark .input:focus,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input:focus,html.theme--documenter-dark .select select.is-focused,html.theme--documenter-dark .is-focused.textarea,html.theme--documenter-dark .is-focused.input,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.is-focused,html.theme--documenter-dark .select select:active,html.theme--documenter-dark .textarea:active,html.theme--documenter-dark .input:active,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input:active,html.theme--documenter-dark .select select.is-active,html.theme--documenter-dark .is-active.textarea,html.theme--documenter-dark .is-active.input,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.is-active{border-color:#1abc9c;box-shadow:0 0 0 0.125em rgba(26,188,156,0.25)}html.theme--documenter-dark .select select[disabled],html.theme--documenter-dark .textarea[disabled],html.theme--documenter-dark .input[disabled],html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input[disabled],fieldset[disabled] html.theme--documenter-dark .select select,fieldset[disabled] html.theme--documenter-dark .textarea,fieldset[disabled] html.theme--documenter-dark .input,fieldset[disabled] html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input{background-color:#8c9b9d;border-color:#282f2f;box-shadow:none;color:#fff}html.theme--documenter-dark .select select[disabled]::-moz-placeholder,html.theme--documenter-dark .textarea[disabled]::-moz-placeholder,html.theme--documenter-dark .input[disabled]::-moz-placeholder,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input[disabled]::-moz-placeholder,fieldset[disabled] html.theme--documenter-dark .select select::-moz-placeholder,fieldset[disabled] html.theme--documenter-dark .textarea::-moz-placeholder,fieldset[disabled] html.theme--documenter-dark .input::-moz-placeholder,fieldset[disabled] html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input::-moz-placeholder{color:rgba(255,255,255,0.3)}html.theme--documenter-dark .select select[disabled]::-webkit-input-placeholder,html.theme--documenter-dark .textarea[disabled]::-webkit-input-placeholder,html.theme--documenter-dark .input[disabled]::-webkit-input-placeholder,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input[disabled]::-webkit-input-placeholder,fieldset[disabled] html.theme--documenter-dark .select select::-webkit-input-placeholder,fieldset[disabled] html.theme--documenter-dark .textarea::-webkit-input-placeholder,fieldset[disabled] html.theme--documenter-dark .input::-webkit-input-placeholder,fieldset[disabled] html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input::-webkit-input-placeholder{color:rgba(255,255,255,0.3)}html.theme--documenter-dark .select select[disabled]:-moz-placeholder,html.theme--documenter-dark .textarea[disabled]:-moz-placeholder,html.theme--documenter-dark .input[disabled]:-moz-placeholder,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input[disabled]:-moz-placeholder,fieldset[disabled] html.theme--documenter-dark .select select:-moz-placeholder,fieldset[disabled] html.theme--documenter-dark .textarea:-moz-placeholder,fieldset[disabled] html.theme--documenter-dark .input:-moz-placeholder,fieldset[disabled] html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input:-moz-placeholder{color:rgba(255,255,255,0.3)}html.theme--documenter-dark .select select[disabled]:-ms-input-placeholder,html.theme--documenter-dark .textarea[disabled]:-ms-input-placeholder,html.theme--documenter-dark .input[disabled]:-ms-input-placeholder,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input[disabled]:-ms-input-placeholder,fieldset[disabled] html.theme--documenter-dark .select select:-ms-input-placeholder,fieldset[disabled] html.theme--documenter-dark .textarea:-ms-input-placeholder,fieldset[disabled] html.theme--documenter-dark .input:-ms-input-placeholder,fieldset[disabled] html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input:-ms-input-placeholder{color:rgba(255,255,255,0.3)}html.theme--documenter-dark .textarea,html.theme--documenter-dark .input,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input{box-shadow:inset 0 0.0625em 0.125em rgba(10,10,10,0.05);max-width:100%;width:100%}html.theme--documenter-dark .textarea[readonly],html.theme--documenter-dark .input[readonly],html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input[readonly]{box-shadow:none}html.theme--documenter-dark .is-white.textarea,html.theme--documenter-dark .is-white.input,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.is-white{border-color:#fff}html.theme--documenter-dark .is-white.textarea:focus,html.theme--documenter-dark .is-white.input:focus,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.is-white:focus,html.theme--documenter-dark .is-white.is-focused.textarea,html.theme--documenter-dark .is-white.is-focused.input,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.is-focused,html.theme--documenter-dark .is-white.textarea:active,html.theme--documenter-dark .is-white.input:active,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.is-white:active,html.theme--documenter-dark .is-white.is-active.textarea,html.theme--documenter-dark .is-white.is-active.input,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.is-active{box-shadow:0 0 0 0.125em rgba(255,255,255,0.25)}html.theme--documenter-dark .is-black.textarea,html.theme--documenter-dark .is-black.input,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.is-black{border-color:#0a0a0a}html.theme--documenter-dark .is-black.textarea:focus,html.theme--documenter-dark .is-black.input:focus,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.is-black:focus,html.theme--documenter-dark .is-black.is-focused.textarea,html.theme--documenter-dark .is-black.is-focused.input,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.is-focused,html.theme--documenter-dark .is-black.textarea:active,html.theme--documenter-dark .is-black.input:active,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.is-black:active,html.theme--documenter-dark .is-black.is-active.textarea,html.theme--documenter-dark .is-black.is-active.input,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.is-active{box-shadow:0 0 0 0.125em rgba(10,10,10,0.25)}html.theme--documenter-dark .is-light.textarea,html.theme--documenter-dark .is-light.input,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.is-light{border-color:#ecf0f1}html.theme--documenter-dark .is-light.textarea:focus,html.theme--documenter-dark .is-light.input:focus,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.is-light:focus,html.theme--documenter-dark .is-light.is-focused.textarea,html.theme--documenter-dark .is-light.is-focused.input,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.is-focused,html.theme--documenter-dark .is-light.textarea:active,html.theme--documenter-dark .is-light.input:active,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.is-light:active,html.theme--documenter-dark .is-light.is-active.textarea,html.theme--documenter-dark .is-light.is-active.input,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.is-active{box-shadow:0 0 0 0.125em rgba(236,240,241,0.25)}html.theme--documenter-dark .is-dark.textarea,html.theme--documenter-dark .content kbd.textarea,html.theme--documenter-dark .is-dark.input,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.is-dark,html.theme--documenter-dark .content kbd.input{border-color:#282f2f}html.theme--documenter-dark .is-dark.textarea:focus,html.theme--documenter-dark .content kbd.textarea:focus,html.theme--documenter-dark .is-dark.input:focus,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.is-dark:focus,html.theme--documenter-dark .content kbd.input:focus,html.theme--documenter-dark .is-dark.is-focused.textarea,html.theme--documenter-dark .content kbd.is-focused.textarea,html.theme--documenter-dark .is-dark.is-focused.input,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.is-focused,html.theme--documenter-dark .content kbd.is-focused.input,html.theme--documenter-dark #documenter .docs-sidebar .content form.docs-search>input.is-focused,html.theme--documenter-dark .is-dark.textarea:active,html.theme--documenter-dark .content kbd.textarea:active,html.theme--documenter-dark .is-dark.input:active,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.is-dark:active,html.theme--documenter-dark .content kbd.input:active,html.theme--documenter-dark .is-dark.is-active.textarea,html.theme--documenter-dark .content kbd.is-active.textarea,html.theme--documenter-dark .is-dark.is-active.input,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.is-active,html.theme--documenter-dark .content kbd.is-active.input,html.theme--documenter-dark #documenter .docs-sidebar .content form.docs-search>input.is-active{box-shadow:0 0 0 0.125em rgba(40,47,47,0.25)}html.theme--documenter-dark .is-primary.textarea,html.theme--documenter-dark .docstring>section>a.textarea.docs-sourcelink,html.theme--documenter-dark .is-primary.input,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.is-primary,html.theme--documenter-dark .docstring>section>a.input.docs-sourcelink{border-color:#375a7f}html.theme--documenter-dark .is-primary.textarea:focus,html.theme--documenter-dark .docstring>section>a.textarea.docs-sourcelink:focus,html.theme--documenter-dark .is-primary.input:focus,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.is-primary:focus,html.theme--documenter-dark .docstring>section>a.input.docs-sourcelink:focus,html.theme--documenter-dark .is-primary.is-focused.textarea,html.theme--documenter-dark .docstring>section>a.is-focused.textarea.docs-sourcelink,html.theme--documenter-dark .is-primary.is-focused.input,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.is-focused,html.theme--documenter-dark .docstring>section>a.is-focused.input.docs-sourcelink,html.theme--documenter-dark .is-primary.textarea:active,html.theme--documenter-dark .docstring>section>a.textarea.docs-sourcelink:active,html.theme--documenter-dark .is-primary.input:active,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.is-primary:active,html.theme--documenter-dark .docstring>section>a.input.docs-sourcelink:active,html.theme--documenter-dark .is-primary.is-active.textarea,html.theme--documenter-dark .docstring>section>a.is-active.textarea.docs-sourcelink,html.theme--documenter-dark .is-primary.is-active.input,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.is-active,html.theme--documenter-dark .docstring>section>a.is-active.input.docs-sourcelink{box-shadow:0 0 0 0.125em rgba(55,90,127,0.25)}html.theme--documenter-dark .is-link.textarea,html.theme--documenter-dark .is-link.input,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.is-link{border-color:#1abc9c}html.theme--documenter-dark .is-link.textarea:focus,html.theme--documenter-dark .is-link.input:focus,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.is-link:focus,html.theme--documenter-dark .is-link.is-focused.textarea,html.theme--documenter-dark .is-link.is-focused.input,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.is-focused,html.theme--documenter-dark .is-link.textarea:active,html.theme--documenter-dark .is-link.input:active,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.is-link:active,html.theme--documenter-dark .is-link.is-active.textarea,html.theme--documenter-dark .is-link.is-active.input,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.is-active{box-shadow:0 0 0 0.125em rgba(26,188,156,0.25)}html.theme--documenter-dark .is-info.textarea,html.theme--documenter-dark .is-info.input,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.is-info{border-color:#024c7d}html.theme--documenter-dark .is-info.textarea:focus,html.theme--documenter-dark .is-info.input:focus,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.is-info:focus,html.theme--documenter-dark .is-info.is-focused.textarea,html.theme--documenter-dark .is-info.is-focused.input,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.is-focused,html.theme--documenter-dark .is-info.textarea:active,html.theme--documenter-dark .is-info.input:active,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.is-info:active,html.theme--documenter-dark .is-info.is-active.textarea,html.theme--documenter-dark .is-info.is-active.input,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.is-active{box-shadow:0 0 0 0.125em rgba(2,76,125,0.25)}html.theme--documenter-dark .is-success.textarea,html.theme--documenter-dark .is-success.input,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.is-success{border-color:#008438}html.theme--documenter-dark .is-success.textarea:focus,html.theme--documenter-dark .is-success.input:focus,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.is-success:focus,html.theme--documenter-dark .is-success.is-focused.textarea,html.theme--documenter-dark .is-success.is-focused.input,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.is-focused,html.theme--documenter-dark .is-success.textarea:active,html.theme--documenter-dark .is-success.input:active,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.is-success:active,html.theme--documenter-dark .is-success.is-active.textarea,html.theme--documenter-dark .is-success.is-active.input,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.is-active{box-shadow:0 0 0 0.125em rgba(0,132,56,0.25)}html.theme--documenter-dark .is-warning.textarea,html.theme--documenter-dark .is-warning.input,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.is-warning{border-color:#ad8100}html.theme--documenter-dark .is-warning.textarea:focus,html.theme--documenter-dark .is-warning.input:focus,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.is-warning:focus,html.theme--documenter-dark .is-warning.is-focused.textarea,html.theme--documenter-dark .is-warning.is-focused.input,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.is-focused,html.theme--documenter-dark .is-warning.textarea:active,html.theme--documenter-dark .is-warning.input:active,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.is-warning:active,html.theme--documenter-dark .is-warning.is-active.textarea,html.theme--documenter-dark .is-warning.is-active.input,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.is-active{box-shadow:0 0 0 0.125em rgba(173,129,0,0.25)}html.theme--documenter-dark .is-danger.textarea,html.theme--documenter-dark .is-danger.input,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.is-danger{border-color:#9e1b0d}html.theme--documenter-dark .is-danger.textarea:focus,html.theme--documenter-dark .is-danger.input:focus,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.is-danger:focus,html.theme--documenter-dark .is-danger.is-focused.textarea,html.theme--documenter-dark .is-danger.is-focused.input,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.is-focused,html.theme--documenter-dark .is-danger.textarea:active,html.theme--documenter-dark .is-danger.input:active,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.is-danger:active,html.theme--documenter-dark .is-danger.is-active.textarea,html.theme--documenter-dark .is-danger.is-active.input,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.is-active{box-shadow:0 0 0 0.125em rgba(158,27,13,0.25)}html.theme--documenter-dark .is-small.textarea,html.theme--documenter-dark .is-small.input,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input{border-radius:3px;font-size:.75rem}html.theme--documenter-dark .is-medium.textarea,html.theme--documenter-dark .is-medium.input,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.is-medium{font-size:1.25rem}html.theme--documenter-dark .is-large.textarea,html.theme--documenter-dark .is-large.input,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.is-large{font-size:1.5rem}html.theme--documenter-dark .is-fullwidth.textarea,html.theme--documenter-dark .is-fullwidth.input,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.is-fullwidth{display:block;width:100%}html.theme--documenter-dark .is-inline.textarea,html.theme--documenter-dark .is-inline.input,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.is-inline{display:inline;width:auto}html.theme--documenter-dark .input.is-rounded,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input{border-radius:9999px;padding-left:calc(calc(0.75em - 1px) + 0.375em);padding-right:calc(calc(0.75em - 1px) + 0.375em)}html.theme--documenter-dark .input.is-static,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.is-static{background-color:transparent;border-color:transparent;box-shadow:none;padding-left:0;padding-right:0}html.theme--documenter-dark .textarea{display:block;max-width:100%;min-width:100%;padding:calc(0.75em - 1px);resize:vertical}html.theme--documenter-dark .textarea:not([rows]){max-height:40em;min-height:8em}html.theme--documenter-dark .textarea[rows]{height:initial}html.theme--documenter-dark .textarea.has-fixed-size{resize:none}html.theme--documenter-dark .radio,html.theme--documenter-dark .checkbox{cursor:pointer;display:inline-block;line-height:1.25;position:relative}html.theme--documenter-dark .radio input,html.theme--documenter-dark .checkbox input{cursor:pointer}html.theme--documenter-dark .radio:hover,html.theme--documenter-dark .checkbox:hover{color:#8c9b9d}html.theme--documenter-dark .radio[disabled],html.theme--documenter-dark .checkbox[disabled],fieldset[disabled] html.theme--documenter-dark .radio,fieldset[disabled] html.theme--documenter-dark .checkbox,html.theme--documenter-dark .radio input[disabled],html.theme--documenter-dark .checkbox input[disabled]{color:#fff;cursor:not-allowed}html.theme--documenter-dark .radio+.radio{margin-left:.5em}html.theme--documenter-dark .select{display:inline-block;max-width:100%;position:relative;vertical-align:top}html.theme--documenter-dark .select:not(.is-multiple){height:2.5em}html.theme--documenter-dark .select:not(.is-multiple):not(.is-loading)::after{border-color:#1abc9c;right:1.125em;z-index:4}html.theme--documenter-dark .select.is-rounded select,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.select select{border-radius:9999px;padding-left:1em}html.theme--documenter-dark .select select{cursor:pointer;display:block;font-size:1em;max-width:100%;outline:none}html.theme--documenter-dark .select select::-ms-expand{display:none}html.theme--documenter-dark .select select[disabled]:hover,fieldset[disabled] html.theme--documenter-dark .select select:hover{border-color:#282f2f}html.theme--documenter-dark .select select:not([multiple]){padding-right:2.5em}html.theme--documenter-dark .select select[multiple]{height:auto;padding:0}html.theme--documenter-dark .select select[multiple] option{padding:0.5em 1em}html.theme--documenter-dark .select:not(.is-multiple):not(.is-loading):hover::after{border-color:#8c9b9d}html.theme--documenter-dark .select.is-white:not(:hover)::after{border-color:#fff}html.theme--documenter-dark .select.is-white select{border-color:#fff}html.theme--documenter-dark .select.is-white select:hover,html.theme--documenter-dark .select.is-white select.is-hovered{border-color:#f2f2f2}html.theme--documenter-dark .select.is-white select:focus,html.theme--documenter-dark .select.is-white select.is-focused,html.theme--documenter-dark .select.is-white select:active,html.theme--documenter-dark .select.is-white select.is-active{box-shadow:0 0 0 0.125em rgba(255,255,255,0.25)}html.theme--documenter-dark .select.is-black:not(:hover)::after{border-color:#0a0a0a}html.theme--documenter-dark .select.is-black select{border-color:#0a0a0a}html.theme--documenter-dark .select.is-black select:hover,html.theme--documenter-dark .select.is-black select.is-hovered{border-color:#000}html.theme--documenter-dark .select.is-black select:focus,html.theme--documenter-dark .select.is-black select.is-focused,html.theme--documenter-dark .select.is-black select:active,html.theme--documenter-dark .select.is-black select.is-active{box-shadow:0 0 0 0.125em rgba(10,10,10,0.25)}html.theme--documenter-dark .select.is-light:not(:hover)::after{border-color:#ecf0f1}html.theme--documenter-dark .select.is-light select{border-color:#ecf0f1}html.theme--documenter-dark .select.is-light select:hover,html.theme--documenter-dark .select.is-light select.is-hovered{border-color:#dde4e6}html.theme--documenter-dark .select.is-light select:focus,html.theme--documenter-dark .select.is-light select.is-focused,html.theme--documenter-dark .select.is-light select:active,html.theme--documenter-dark .select.is-light select.is-active{box-shadow:0 0 0 0.125em rgba(236,240,241,0.25)}html.theme--documenter-dark .select.is-dark:not(:hover)::after,html.theme--documenter-dark .content kbd.select:not(:hover)::after{border-color:#282f2f}html.theme--documenter-dark .select.is-dark select,html.theme--documenter-dark .content kbd.select select{border-color:#282f2f}html.theme--documenter-dark .select.is-dark select:hover,html.theme--documenter-dark .content kbd.select select:hover,html.theme--documenter-dark .select.is-dark select.is-hovered,html.theme--documenter-dark .content kbd.select select.is-hovered{border-color:#1d2122}html.theme--documenter-dark .select.is-dark select:focus,html.theme--documenter-dark .content kbd.select select:focus,html.theme--documenter-dark .select.is-dark select.is-focused,html.theme--documenter-dark .content kbd.select select.is-focused,html.theme--documenter-dark .select.is-dark select:active,html.theme--documenter-dark .content kbd.select select:active,html.theme--documenter-dark .select.is-dark select.is-active,html.theme--documenter-dark .content kbd.select select.is-active{box-shadow:0 0 0 0.125em rgba(40,47,47,0.25)}html.theme--documenter-dark .select.is-primary:not(:hover)::after,html.theme--documenter-dark .docstring>section>a.select.docs-sourcelink:not(:hover)::after{border-color:#375a7f}html.theme--documenter-dark .select.is-primary select,html.theme--documenter-dark .docstring>section>a.select.docs-sourcelink select{border-color:#375a7f}html.theme--documenter-dark .select.is-primary select:hover,html.theme--documenter-dark .docstring>section>a.select.docs-sourcelink select:hover,html.theme--documenter-dark .select.is-primary select.is-hovered,html.theme--documenter-dark .docstring>section>a.select.docs-sourcelink select.is-hovered{border-color:#2f4d6d}html.theme--documenter-dark .select.is-primary select:focus,html.theme--documenter-dark .docstring>section>a.select.docs-sourcelink select:focus,html.theme--documenter-dark .select.is-primary select.is-focused,html.theme--documenter-dark .docstring>section>a.select.docs-sourcelink select.is-focused,html.theme--documenter-dark .select.is-primary select:active,html.theme--documenter-dark .docstring>section>a.select.docs-sourcelink select:active,html.theme--documenter-dark .select.is-primary select.is-active,html.theme--documenter-dark .docstring>section>a.select.docs-sourcelink select.is-active{box-shadow:0 0 0 0.125em rgba(55,90,127,0.25)}html.theme--documenter-dark .select.is-link:not(:hover)::after{border-color:#1abc9c}html.theme--documenter-dark .select.is-link select{border-color:#1abc9c}html.theme--documenter-dark .select.is-link select:hover,html.theme--documenter-dark .select.is-link select.is-hovered{border-color:#17a689}html.theme--documenter-dark .select.is-link select:focus,html.theme--documenter-dark .select.is-link select.is-focused,html.theme--documenter-dark .select.is-link select:active,html.theme--documenter-dark .select.is-link select.is-active{box-shadow:0 0 0 0.125em rgba(26,188,156,0.25)}html.theme--documenter-dark .select.is-info:not(:hover)::after{border-color:#024c7d}html.theme--documenter-dark .select.is-info select{border-color:#024c7d}html.theme--documenter-dark .select.is-info select:hover,html.theme--documenter-dark .select.is-info select.is-hovered{border-color:#023d64}html.theme--documenter-dark .select.is-info select:focus,html.theme--documenter-dark .select.is-info select.is-focused,html.theme--documenter-dark .select.is-info select:active,html.theme--documenter-dark .select.is-info select.is-active{box-shadow:0 0 0 0.125em rgba(2,76,125,0.25)}html.theme--documenter-dark .select.is-success:not(:hover)::after{border-color:#008438}html.theme--documenter-dark .select.is-success select{border-color:#008438}html.theme--documenter-dark .select.is-success select:hover,html.theme--documenter-dark .select.is-success select.is-hovered{border-color:#006b2d}html.theme--documenter-dark .select.is-success select:focus,html.theme--documenter-dark .select.is-success select.is-focused,html.theme--documenter-dark .select.is-success select:active,html.theme--documenter-dark .select.is-success select.is-active{box-shadow:0 0 0 0.125em rgba(0,132,56,0.25)}html.theme--documenter-dark .select.is-warning:not(:hover)::after{border-color:#ad8100}html.theme--documenter-dark .select.is-warning select{border-color:#ad8100}html.theme--documenter-dark .select.is-warning select:hover,html.theme--documenter-dark .select.is-warning select.is-hovered{border-color:#946e00}html.theme--documenter-dark .select.is-warning select:focus,html.theme--documenter-dark .select.is-warning select.is-focused,html.theme--documenter-dark .select.is-warning select:active,html.theme--documenter-dark .select.is-warning select.is-active{box-shadow:0 0 0 0.125em rgba(173,129,0,0.25)}html.theme--documenter-dark .select.is-danger:not(:hover)::after{border-color:#9e1b0d}html.theme--documenter-dark .select.is-danger select{border-color:#9e1b0d}html.theme--documenter-dark .select.is-danger select:hover,html.theme--documenter-dark .select.is-danger select.is-hovered{border-color:#86170b}html.theme--documenter-dark .select.is-danger select:focus,html.theme--documenter-dark .select.is-danger select.is-focused,html.theme--documenter-dark .select.is-danger select:active,html.theme--documenter-dark .select.is-danger select.is-active{box-shadow:0 0 0 0.125em rgba(158,27,13,0.25)}html.theme--documenter-dark .select.is-small,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.select{border-radius:3px;font-size:.75rem}html.theme--documenter-dark .select.is-medium{font-size:1.25rem}html.theme--documenter-dark .select.is-large{font-size:1.5rem}html.theme--documenter-dark .select.is-disabled::after{border-color:#fff !important;opacity:0.5}html.theme--documenter-dark .select.is-fullwidth{width:100%}html.theme--documenter-dark .select.is-fullwidth select{width:100%}html.theme--documenter-dark .select.is-loading::after{margin-top:0;position:absolute;right:.625em;top:0.625em;transform:none}html.theme--documenter-dark .select.is-loading.is-small:after,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.is-loading:after{font-size:.75rem}html.theme--documenter-dark .select.is-loading.is-medium:after{font-size:1.25rem}html.theme--documenter-dark .select.is-loading.is-large:after{font-size:1.5rem}html.theme--documenter-dark .file{align-items:stretch;display:flex;justify-content:flex-start;position:relative}html.theme--documenter-dark .file.is-white .file-cta{background-color:#fff;border-color:transparent;color:#0a0a0a}html.theme--documenter-dark .file.is-white:hover .file-cta,html.theme--documenter-dark .file.is-white.is-hovered .file-cta{background-color:#f9f9f9;border-color:transparent;color:#0a0a0a}html.theme--documenter-dark .file.is-white:focus .file-cta,html.theme--documenter-dark .file.is-white.is-focused .file-cta{border-color:transparent;box-shadow:0 0 0.5em rgba(255,255,255,0.25);color:#0a0a0a}html.theme--documenter-dark .file.is-white:active .file-cta,html.theme--documenter-dark .file.is-white.is-active .file-cta{background-color:#f2f2f2;border-color:transparent;color:#0a0a0a}html.theme--documenter-dark .file.is-black .file-cta{background-color:#0a0a0a;border-color:transparent;color:#fff}html.theme--documenter-dark .file.is-black:hover .file-cta,html.theme--documenter-dark .file.is-black.is-hovered .file-cta{background-color:#040404;border-color:transparent;color:#fff}html.theme--documenter-dark .file.is-black:focus .file-cta,html.theme--documenter-dark .file.is-black.is-focused .file-cta{border-color:transparent;box-shadow:0 0 0.5em rgba(10,10,10,0.25);color:#fff}html.theme--documenter-dark .file.is-black:active .file-cta,html.theme--documenter-dark .file.is-black.is-active .file-cta{background-color:#000;border-color:transparent;color:#fff}html.theme--documenter-dark .file.is-light .file-cta{background-color:#ecf0f1;border-color:transparent;color:rgba(0,0,0,0.7)}html.theme--documenter-dark .file.is-light:hover .file-cta,html.theme--documenter-dark .file.is-light.is-hovered .file-cta{background-color:#e5eaec;border-color:transparent;color:rgba(0,0,0,0.7)}html.theme--documenter-dark .file.is-light:focus .file-cta,html.theme--documenter-dark .file.is-light.is-focused .file-cta{border-color:transparent;box-shadow:0 0 0.5em rgba(236,240,241,0.25);color:rgba(0,0,0,0.7)}html.theme--documenter-dark .file.is-light:active .file-cta,html.theme--documenter-dark .file.is-light.is-active .file-cta{background-color:#dde4e6;border-color:transparent;color:rgba(0,0,0,0.7)}html.theme--documenter-dark .file.is-dark .file-cta,html.theme--documenter-dark .content kbd.file .file-cta{background-color:#282f2f;border-color:transparent;color:#fff}html.theme--documenter-dark .file.is-dark:hover .file-cta,html.theme--documenter-dark .content kbd.file:hover .file-cta,html.theme--documenter-dark .file.is-dark.is-hovered .file-cta,html.theme--documenter-dark .content kbd.file.is-hovered .file-cta{background-color:#232829;border-color:transparent;color:#fff}html.theme--documenter-dark .file.is-dark:focus .file-cta,html.theme--documenter-dark .content kbd.file:focus .file-cta,html.theme--documenter-dark .file.is-dark.is-focused .file-cta,html.theme--documenter-dark .content kbd.file.is-focused .file-cta{border-color:transparent;box-shadow:0 0 0.5em rgba(40,47,47,0.25);color:#fff}html.theme--documenter-dark .file.is-dark:active .file-cta,html.theme--documenter-dark .content kbd.file:active .file-cta,html.theme--documenter-dark .file.is-dark.is-active .file-cta,html.theme--documenter-dark .content kbd.file.is-active .file-cta{background-color:#1d2122;border-color:transparent;color:#fff}html.theme--documenter-dark .file.is-primary .file-cta,html.theme--documenter-dark .docstring>section>a.file.docs-sourcelink .file-cta{background-color:#375a7f;border-color:transparent;color:#fff}html.theme--documenter-dark .file.is-primary:hover .file-cta,html.theme--documenter-dark .docstring>section>a.file.docs-sourcelink:hover .file-cta,html.theme--documenter-dark .file.is-primary.is-hovered .file-cta,html.theme--documenter-dark .docstring>section>a.file.is-hovered.docs-sourcelink .file-cta{background-color:#335476;border-color:transparent;color:#fff}html.theme--documenter-dark .file.is-primary:focus .file-cta,html.theme--documenter-dark .docstring>section>a.file.docs-sourcelink:focus .file-cta,html.theme--documenter-dark .file.is-primary.is-focused .file-cta,html.theme--documenter-dark .docstring>section>a.file.is-focused.docs-sourcelink .file-cta{border-color:transparent;box-shadow:0 0 0.5em rgba(55,90,127,0.25);color:#fff}html.theme--documenter-dark .file.is-primary:active .file-cta,html.theme--documenter-dark .docstring>section>a.file.docs-sourcelink:active .file-cta,html.theme--documenter-dark .file.is-primary.is-active .file-cta,html.theme--documenter-dark .docstring>section>a.file.is-active.docs-sourcelink .file-cta{background-color:#2f4d6d;border-color:transparent;color:#fff}html.theme--documenter-dark .file.is-link .file-cta{background-color:#1abc9c;border-color:transparent;color:#fff}html.theme--documenter-dark .file.is-link:hover .file-cta,html.theme--documenter-dark .file.is-link.is-hovered .file-cta{background-color:#18b193;border-color:transparent;color:#fff}html.theme--documenter-dark .file.is-link:focus .file-cta,html.theme--documenter-dark .file.is-link.is-focused .file-cta{border-color:transparent;box-shadow:0 0 0.5em rgba(26,188,156,0.25);color:#fff}html.theme--documenter-dark .file.is-link:active .file-cta,html.theme--documenter-dark .file.is-link.is-active .file-cta{background-color:#17a689;border-color:transparent;color:#fff}html.theme--documenter-dark .file.is-info .file-cta{background-color:#024c7d;border-color:transparent;color:#fff}html.theme--documenter-dark .file.is-info:hover .file-cta,html.theme--documenter-dark .file.is-info.is-hovered .file-cta{background-color:#024470;border-color:transparent;color:#fff}html.theme--documenter-dark .file.is-info:focus .file-cta,html.theme--documenter-dark .file.is-info.is-focused .file-cta{border-color:transparent;box-shadow:0 0 0.5em rgba(2,76,125,0.25);color:#fff}html.theme--documenter-dark .file.is-info:active .file-cta,html.theme--documenter-dark .file.is-info.is-active .file-cta{background-color:#023d64;border-color:transparent;color:#fff}html.theme--documenter-dark .file.is-success .file-cta{background-color:#008438;border-color:transparent;color:#fff}html.theme--documenter-dark .file.is-success:hover .file-cta,html.theme--documenter-dark .file.is-success.is-hovered .file-cta{background-color:#073;border-color:transparent;color:#fff}html.theme--documenter-dark .file.is-success:focus .file-cta,html.theme--documenter-dark .file.is-success.is-focused .file-cta{border-color:transparent;box-shadow:0 0 0.5em rgba(0,132,56,0.25);color:#fff}html.theme--documenter-dark .file.is-success:active .file-cta,html.theme--documenter-dark .file.is-success.is-active .file-cta{background-color:#006b2d;border-color:transparent;color:#fff}html.theme--documenter-dark .file.is-warning .file-cta{background-color:#ad8100;border-color:transparent;color:#fff}html.theme--documenter-dark .file.is-warning:hover .file-cta,html.theme--documenter-dark .file.is-warning.is-hovered .file-cta{background-color:#a07700;border-color:transparent;color:#fff}html.theme--documenter-dark .file.is-warning:focus .file-cta,html.theme--documenter-dark .file.is-warning.is-focused .file-cta{border-color:transparent;box-shadow:0 0 0.5em rgba(173,129,0,0.25);color:#fff}html.theme--documenter-dark .file.is-warning:active .file-cta,html.theme--documenter-dark .file.is-warning.is-active .file-cta{background-color:#946e00;border-color:transparent;color:#fff}html.theme--documenter-dark .file.is-danger .file-cta{background-color:#9e1b0d;border-color:transparent;color:#fff}html.theme--documenter-dark .file.is-danger:hover .file-cta,html.theme--documenter-dark .file.is-danger.is-hovered .file-cta{background-color:#92190c;border-color:transparent;color:#fff}html.theme--documenter-dark .file.is-danger:focus .file-cta,html.theme--documenter-dark .file.is-danger.is-focused .file-cta{border-color:transparent;box-shadow:0 0 0.5em rgba(158,27,13,0.25);color:#fff}html.theme--documenter-dark .file.is-danger:active .file-cta,html.theme--documenter-dark .file.is-danger.is-active .file-cta{background-color:#86170b;border-color:transparent;color:#fff}html.theme--documenter-dark .file.is-small,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.file{font-size:.75rem}html.theme--documenter-dark .file.is-normal{font-size:1rem}html.theme--documenter-dark .file.is-medium{font-size:1.25rem}html.theme--documenter-dark .file.is-medium .file-icon .fa{font-size:21px}html.theme--documenter-dark .file.is-large{font-size:1.5rem}html.theme--documenter-dark .file.is-large .file-icon .fa{font-size:28px}html.theme--documenter-dark .file.has-name .file-cta{border-bottom-right-radius:0;border-top-right-radius:0}html.theme--documenter-dark .file.has-name .file-name{border-bottom-left-radius:0;border-top-left-radius:0}html.theme--documenter-dark .file.has-name.is-empty .file-cta{border-radius:.4em}html.theme--documenter-dark .file.has-name.is-empty .file-name{display:none}html.theme--documenter-dark .file.is-boxed .file-label{flex-direction:column}html.theme--documenter-dark .file.is-boxed .file-cta{flex-direction:column;height:auto;padding:1em 3em}html.theme--documenter-dark .file.is-boxed .file-name{border-width:0 1px 1px}html.theme--documenter-dark .file.is-boxed .file-icon{height:1.5em;width:1.5em}html.theme--documenter-dark .file.is-boxed .file-icon .fa{font-size:21px}html.theme--documenter-dark .file.is-boxed.is-small .file-icon .fa,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.is-boxed .file-icon .fa{font-size:14px}html.theme--documenter-dark .file.is-boxed.is-medium .file-icon .fa{font-size:28px}html.theme--documenter-dark .file.is-boxed.is-large .file-icon .fa{font-size:35px}html.theme--documenter-dark .file.is-boxed.has-name .file-cta{border-radius:.4em .4em 0 0}html.theme--documenter-dark .file.is-boxed.has-name .file-name{border-radius:0 0 .4em .4em;border-width:0 1px 1px}html.theme--documenter-dark .file.is-centered{justify-content:center}html.theme--documenter-dark .file.is-fullwidth .file-label{width:100%}html.theme--documenter-dark .file.is-fullwidth .file-name{flex-grow:1;max-width:none}html.theme--documenter-dark .file.is-right{justify-content:flex-end}html.theme--documenter-dark .file.is-right .file-cta{border-radius:0 .4em .4em 0}html.theme--documenter-dark .file.is-right .file-name{border-radius:.4em 0 0 .4em;border-width:1px 0 1px 1px;order:-1}html.theme--documenter-dark .file-label{align-items:stretch;display:flex;cursor:pointer;justify-content:flex-start;overflow:hidden;position:relative}html.theme--documenter-dark .file-label:hover .file-cta{background-color:#232829;color:#f2f2f2}html.theme--documenter-dark .file-label:hover .file-name{border-color:#596668}html.theme--documenter-dark .file-label:active .file-cta{background-color:#1d2122;color:#f2f2f2}html.theme--documenter-dark .file-label:active .file-name{border-color:#535f61}html.theme--documenter-dark .file-input{height:100%;left:0;opacity:0;outline:none;position:absolute;top:0;width:100%}html.theme--documenter-dark .file-cta,html.theme--documenter-dark .file-name{border-color:#5e6d6f;border-radius:.4em;font-size:1em;padding-left:1em;padding-right:1em;white-space:nowrap}html.theme--documenter-dark .file-cta{background-color:#282f2f;color:#fff}html.theme--documenter-dark .file-name{border-color:#5e6d6f;border-style:solid;border-width:1px 1px 1px 0;display:block;max-width:16em;overflow:hidden;text-align:inherit;text-overflow:ellipsis}html.theme--documenter-dark .file-icon{align-items:center;display:flex;height:1em;justify-content:center;margin-right:.5em;width:1em}html.theme--documenter-dark .file-icon .fa{font-size:14px}html.theme--documenter-dark .label{color:#f2f2f2;display:block;font-size:1rem;font-weight:700}html.theme--documenter-dark .label:not(:last-child){margin-bottom:0.5em}html.theme--documenter-dark .label.is-small,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.label{font-size:.75rem}html.theme--documenter-dark .label.is-medium{font-size:1.25rem}html.theme--documenter-dark .label.is-large{font-size:1.5rem}html.theme--documenter-dark .help{display:block;font-size:.75rem;margin-top:0.25rem}html.theme--documenter-dark .help.is-white{color:#fff}html.theme--documenter-dark .help.is-black{color:#0a0a0a}html.theme--documenter-dark .help.is-light{color:#ecf0f1}html.theme--documenter-dark .help.is-dark,html.theme--documenter-dark .content kbd.help{color:#282f2f}html.theme--documenter-dark .help.is-primary,html.theme--documenter-dark .docstring>section>a.help.docs-sourcelink{color:#375a7f}html.theme--documenter-dark .help.is-link{color:#1abc9c}html.theme--documenter-dark .help.is-info{color:#024c7d}html.theme--documenter-dark .help.is-success{color:#008438}html.theme--documenter-dark .help.is-warning{color:#ad8100}html.theme--documenter-dark .help.is-danger{color:#9e1b0d}html.theme--documenter-dark .field:not(:last-child){margin-bottom:0.75rem}html.theme--documenter-dark .field.has-addons{display:flex;justify-content:flex-start}html.theme--documenter-dark .field.has-addons .control:not(:last-child){margin-right:-1px}html.theme--documenter-dark .field.has-addons .control:not(:first-child):not(:last-child) .button,html.theme--documenter-dark .field.has-addons .control:not(:first-child):not(:last-child) .input,html.theme--documenter-dark .field.has-addons .control:not(:first-child):not(:last-child) #documenter .docs-sidebar form.docs-search>input,html.theme--documenter-dark #documenter .docs-sidebar .field.has-addons .control:not(:first-child):not(:last-child) form.docs-search>input,html.theme--documenter-dark .field.has-addons .control:not(:first-child):not(:last-child) .select select{border-radius:0}html.theme--documenter-dark .field.has-addons .control:first-child:not(:only-child) .button,html.theme--documenter-dark .field.has-addons .control:first-child:not(:only-child) .input,html.theme--documenter-dark .field.has-addons .control:first-child:not(:only-child) #documenter .docs-sidebar form.docs-search>input,html.theme--documenter-dark #documenter .docs-sidebar .field.has-addons .control:first-child:not(:only-child) form.docs-search>input,html.theme--documenter-dark .field.has-addons .control:first-child:not(:only-child) .select select{border-bottom-right-radius:0;border-top-right-radius:0}html.theme--documenter-dark .field.has-addons .control:last-child:not(:only-child) .button,html.theme--documenter-dark .field.has-addons .control:last-child:not(:only-child) .input,html.theme--documenter-dark .field.has-addons .control:last-child:not(:only-child) #documenter .docs-sidebar form.docs-search>input,html.theme--documenter-dark #documenter .docs-sidebar .field.has-addons .control:last-child:not(:only-child) form.docs-search>input,html.theme--documenter-dark .field.has-addons .control:last-child:not(:only-child) .select select{border-bottom-left-radius:0;border-top-left-radius:0}html.theme--documenter-dark .field.has-addons .control .button:not([disabled]):hover,html.theme--documenter-dark .field.has-addons .control .button.is-hovered:not([disabled]),html.theme--documenter-dark .field.has-addons .control .input:not([disabled]):hover,html.theme--documenter-dark .field.has-addons .control #documenter .docs-sidebar form.docs-search>input:not([disabled]):hover,html.theme--documenter-dark #documenter .docs-sidebar .field.has-addons .control form.docs-search>input:not([disabled]):hover,html.theme--documenter-dark .field.has-addons .control .input.is-hovered:not([disabled]),html.theme--documenter-dark .field.has-addons .control #documenter .docs-sidebar form.docs-search>input.is-hovered:not([disabled]),html.theme--documenter-dark #documenter .docs-sidebar .field.has-addons .control form.docs-search>input.is-hovered:not([disabled]),html.theme--documenter-dark .field.has-addons .control .select select:not([disabled]):hover,html.theme--documenter-dark .field.has-addons .control .select select.is-hovered:not([disabled]){z-index:2}html.theme--documenter-dark .field.has-addons .control .button:not([disabled]):focus,html.theme--documenter-dark .field.has-addons .control .button.is-focused:not([disabled]),html.theme--documenter-dark .field.has-addons .control .button:not([disabled]):active,html.theme--documenter-dark .field.has-addons .control .button.is-active:not([disabled]),html.theme--documenter-dark .field.has-addons .control .input:not([disabled]):focus,html.theme--documenter-dark .field.has-addons .control #documenter .docs-sidebar form.docs-search>input:not([disabled]):focus,html.theme--documenter-dark #documenter .docs-sidebar .field.has-addons .control form.docs-search>input:not([disabled]):focus,html.theme--documenter-dark .field.has-addons .control .input.is-focused:not([disabled]),html.theme--documenter-dark .field.has-addons .control #documenter .docs-sidebar form.docs-search>input.is-focused:not([disabled]),html.theme--documenter-dark #documenter .docs-sidebar .field.has-addons .control form.docs-search>input.is-focused:not([disabled]),html.theme--documenter-dark .field.has-addons .control .input:not([disabled]):active,html.theme--documenter-dark .field.has-addons .control #documenter .docs-sidebar form.docs-search>input:not([disabled]):active,html.theme--documenter-dark #documenter .docs-sidebar .field.has-addons .control form.docs-search>input:not([disabled]):active,html.theme--documenter-dark .field.has-addons .control .input.is-active:not([disabled]),html.theme--documenter-dark .field.has-addons .control #documenter .docs-sidebar form.docs-search>input.is-active:not([disabled]),html.theme--documenter-dark #documenter .docs-sidebar .field.has-addons .control form.docs-search>input.is-active:not([disabled]),html.theme--documenter-dark .field.has-addons .control .select select:not([disabled]):focus,html.theme--documenter-dark .field.has-addons .control .select select.is-focused:not([disabled]),html.theme--documenter-dark .field.has-addons .control .select select:not([disabled]):active,html.theme--documenter-dark .field.has-addons .control .select select.is-active:not([disabled]){z-index:3}html.theme--documenter-dark .field.has-addons .control .button:not([disabled]):focus:hover,html.theme--documenter-dark .field.has-addons .control .button.is-focused:not([disabled]):hover,html.theme--documenter-dark .field.has-addons .control .button:not([disabled]):active:hover,html.theme--documenter-dark .field.has-addons .control .button.is-active:not([disabled]):hover,html.theme--documenter-dark .field.has-addons .control .input:not([disabled]):focus:hover,html.theme--documenter-dark .field.has-addons .control #documenter .docs-sidebar form.docs-search>input:not([disabled]):focus:hover,html.theme--documenter-dark #documenter .docs-sidebar .field.has-addons .control form.docs-search>input:not([disabled]):focus:hover,html.theme--documenter-dark .field.has-addons .control .input.is-focused:not([disabled]):hover,html.theme--documenter-dark .field.has-addons .control #documenter .docs-sidebar form.docs-search>input.is-focused:not([disabled]):hover,html.theme--documenter-dark #documenter .docs-sidebar .field.has-addons .control form.docs-search>input.is-focused:not([disabled]):hover,html.theme--documenter-dark .field.has-addons .control .input:not([disabled]):active:hover,html.theme--documenter-dark .field.has-addons .control #documenter .docs-sidebar form.docs-search>input:not([disabled]):active:hover,html.theme--documenter-dark #documenter .docs-sidebar .field.has-addons .control form.docs-search>input:not([disabled]):active:hover,html.theme--documenter-dark .field.has-addons .control .input.is-active:not([disabled]):hover,html.theme--documenter-dark .field.has-addons .control #documenter .docs-sidebar form.docs-search>input.is-active:not([disabled]):hover,html.theme--documenter-dark #documenter .docs-sidebar .field.has-addons .control form.docs-search>input.is-active:not([disabled]):hover,html.theme--documenter-dark .field.has-addons .control .select select:not([disabled]):focus:hover,html.theme--documenter-dark .field.has-addons .control .select select.is-focused:not([disabled]):hover,html.theme--documenter-dark .field.has-addons .control .select select:not([disabled]):active:hover,html.theme--documenter-dark .field.has-addons .control .select select.is-active:not([disabled]):hover{z-index:4}html.theme--documenter-dark .field.has-addons .control.is-expanded{flex-grow:1;flex-shrink:1}html.theme--documenter-dark .field.has-addons.has-addons-centered{justify-content:center}html.theme--documenter-dark .field.has-addons.has-addons-right{justify-content:flex-end}html.theme--documenter-dark .field.has-addons.has-addons-fullwidth .control{flex-grow:1;flex-shrink:0}html.theme--documenter-dark .field.is-grouped{display:flex;justify-content:flex-start}html.theme--documenter-dark .field.is-grouped>.control{flex-shrink:0}html.theme--documenter-dark .field.is-grouped>.control:not(:last-child){margin-bottom:0;margin-right:.75rem}html.theme--documenter-dark .field.is-grouped>.control.is-expanded{flex-grow:1;flex-shrink:1}html.theme--documenter-dark .field.is-grouped.is-grouped-centered{justify-content:center}html.theme--documenter-dark .field.is-grouped.is-grouped-right{justify-content:flex-end}html.theme--documenter-dark .field.is-grouped.is-grouped-multiline{flex-wrap:wrap}html.theme--documenter-dark .field.is-grouped.is-grouped-multiline>.control:last-child,html.theme--documenter-dark .field.is-grouped.is-grouped-multiline>.control:not(:last-child){margin-bottom:0.75rem}html.theme--documenter-dark .field.is-grouped.is-grouped-multiline:last-child{margin-bottom:-0.75rem}html.theme--documenter-dark .field.is-grouped.is-grouped-multiline:not(:last-child){margin-bottom:0}@media screen and (min-width: 769px),print{html.theme--documenter-dark .field.is-horizontal{display:flex}}html.theme--documenter-dark .field-label .label{font-size:inherit}@media screen and (max-width: 768px){html.theme--documenter-dark .field-label{margin-bottom:0.5rem}}@media screen and (min-width: 769px),print{html.theme--documenter-dark .field-label{flex-basis:0;flex-grow:1;flex-shrink:0;margin-right:1.5rem;text-align:right}html.theme--documenter-dark .field-label.is-small,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.field-label{font-size:.75rem;padding-top:0.375em}html.theme--documenter-dark .field-label.is-normal{padding-top:0.375em}html.theme--documenter-dark .field-label.is-medium{font-size:1.25rem;padding-top:0.375em}html.theme--documenter-dark .field-label.is-large{font-size:1.5rem;padding-top:0.375em}}html.theme--documenter-dark .field-body .field .field{margin-bottom:0}@media screen and (min-width: 769px),print{html.theme--documenter-dark .field-body{display:flex;flex-basis:0;flex-grow:5;flex-shrink:1}html.theme--documenter-dark .field-body .field{margin-bottom:0}html.theme--documenter-dark .field-body>.field{flex-shrink:1}html.theme--documenter-dark .field-body>.field:not(.is-narrow){flex-grow:1}html.theme--documenter-dark .field-body>.field:not(:last-child){margin-right:.75rem}}html.theme--documenter-dark .control{box-sizing:border-box;clear:both;font-size:1rem;position:relative;text-align:inherit}html.theme--documenter-dark .control.has-icons-left .input:focus~.icon,html.theme--documenter-dark .control.has-icons-left #documenter .docs-sidebar form.docs-search>input:focus~.icon,html.theme--documenter-dark #documenter .docs-sidebar .control.has-icons-left form.docs-search>input:focus~.icon,html.theme--documenter-dark .control.has-icons-left .select:focus~.icon,html.theme--documenter-dark .control.has-icons-right .input:focus~.icon,html.theme--documenter-dark .control.has-icons-right #documenter .docs-sidebar form.docs-search>input:focus~.icon,html.theme--documenter-dark #documenter .docs-sidebar .control.has-icons-right form.docs-search>input:focus~.icon,html.theme--documenter-dark .control.has-icons-right .select:focus~.icon{color:#282f2f}html.theme--documenter-dark .control.has-icons-left .input.is-small~.icon,html.theme--documenter-dark .control.has-icons-left #documenter .docs-sidebar form.docs-search>input~.icon,html.theme--documenter-dark #documenter .docs-sidebar .control.has-icons-left form.docs-search>input~.icon,html.theme--documenter-dark .control.has-icons-left .select.is-small~.icon,html.theme--documenter-dark .control.has-icons-right .input.is-small~.icon,html.theme--documenter-dark .control.has-icons-right #documenter .docs-sidebar form.docs-search>input~.icon,html.theme--documenter-dark #documenter .docs-sidebar .control.has-icons-right form.docs-search>input~.icon,html.theme--documenter-dark .control.has-icons-right .select.is-small~.icon{font-size:.75rem}html.theme--documenter-dark .control.has-icons-left .input.is-medium~.icon,html.theme--documenter-dark .control.has-icons-left #documenter .docs-sidebar form.docs-search>input.is-medium~.icon,html.theme--documenter-dark #documenter .docs-sidebar .control.has-icons-left form.docs-search>input.is-medium~.icon,html.theme--documenter-dark .control.has-icons-left .select.is-medium~.icon,html.theme--documenter-dark .control.has-icons-right .input.is-medium~.icon,html.theme--documenter-dark .control.has-icons-right #documenter .docs-sidebar form.docs-search>input.is-medium~.icon,html.theme--documenter-dark #documenter .docs-sidebar .control.has-icons-right form.docs-search>input.is-medium~.icon,html.theme--documenter-dark .control.has-icons-right .select.is-medium~.icon{font-size:1.25rem}html.theme--documenter-dark .control.has-icons-left .input.is-large~.icon,html.theme--documenter-dark .control.has-icons-left #documenter .docs-sidebar form.docs-search>input.is-large~.icon,html.theme--documenter-dark #documenter .docs-sidebar .control.has-icons-left form.docs-search>input.is-large~.icon,html.theme--documenter-dark .control.has-icons-left .select.is-large~.icon,html.theme--documenter-dark .control.has-icons-right .input.is-large~.icon,html.theme--documenter-dark .control.has-icons-right #documenter .docs-sidebar form.docs-search>input.is-large~.icon,html.theme--documenter-dark #documenter .docs-sidebar .control.has-icons-right form.docs-search>input.is-large~.icon,html.theme--documenter-dark .control.has-icons-right .select.is-large~.icon{font-size:1.5rem}html.theme--documenter-dark .control.has-icons-left .icon,html.theme--documenter-dark .control.has-icons-right .icon{color:#5e6d6f;height:2.5em;pointer-events:none;position:absolute;top:0;width:2.5em;z-index:4}html.theme--documenter-dark .control.has-icons-left .input,html.theme--documenter-dark .control.has-icons-left #documenter .docs-sidebar form.docs-search>input,html.theme--documenter-dark #documenter .docs-sidebar .control.has-icons-left form.docs-search>input,html.theme--documenter-dark .control.has-icons-left .select select{padding-left:2.5em}html.theme--documenter-dark .control.has-icons-left .icon.is-left{left:0}html.theme--documenter-dark .control.has-icons-right .input,html.theme--documenter-dark .control.has-icons-right #documenter .docs-sidebar form.docs-search>input,html.theme--documenter-dark #documenter .docs-sidebar .control.has-icons-right form.docs-search>input,html.theme--documenter-dark .control.has-icons-right .select select{padding-right:2.5em}html.theme--documenter-dark .control.has-icons-right .icon.is-right{right:0}html.theme--documenter-dark .control.is-loading::after{position:absolute !important;right:.625em;top:0.625em;z-index:4}html.theme--documenter-dark .control.is-loading.is-small:after,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.is-loading:after{font-size:.75rem}html.theme--documenter-dark .control.is-loading.is-medium:after{font-size:1.25rem}html.theme--documenter-dark .control.is-loading.is-large:after{font-size:1.5rem}html.theme--documenter-dark .breadcrumb{font-size:1rem;white-space:nowrap}html.theme--documenter-dark .breadcrumb a{align-items:center;color:#1abc9c;display:flex;justify-content:center;padding:0 .75em}html.theme--documenter-dark .breadcrumb a:hover{color:#1dd2af}html.theme--documenter-dark .breadcrumb li{align-items:center;display:flex}html.theme--documenter-dark .breadcrumb li:first-child a{padding-left:0}html.theme--documenter-dark .breadcrumb li.is-active a{color:#f2f2f2;cursor:default;pointer-events:none}html.theme--documenter-dark .breadcrumb li+li::before{color:#8c9b9d;content:"\0002f"}html.theme--documenter-dark .breadcrumb ul,html.theme--documenter-dark .breadcrumb ol{align-items:flex-start;display:flex;flex-wrap:wrap;justify-content:flex-start}html.theme--documenter-dark .breadcrumb .icon:first-child{margin-right:.5em}html.theme--documenter-dark .breadcrumb .icon:last-child{margin-left:.5em}html.theme--documenter-dark .breadcrumb.is-centered ol,html.theme--documenter-dark .breadcrumb.is-centered ul{justify-content:center}html.theme--documenter-dark .breadcrumb.is-right ol,html.theme--documenter-dark .breadcrumb.is-right ul{justify-content:flex-end}html.theme--documenter-dark .breadcrumb.is-small,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.breadcrumb{font-size:.75rem}html.theme--documenter-dark .breadcrumb.is-medium{font-size:1.25rem}html.theme--documenter-dark .breadcrumb.is-large{font-size:1.5rem}html.theme--documenter-dark .breadcrumb.has-arrow-separator li+li::before{content:"\02192"}html.theme--documenter-dark .breadcrumb.has-bullet-separator li+li::before{content:"\02022"}html.theme--documenter-dark .breadcrumb.has-dot-separator li+li::before{content:"\000b7"}html.theme--documenter-dark .breadcrumb.has-succeeds-separator li+li::before{content:"\0227B"}html.theme--documenter-dark .card{background-color:#fff;border-radius:.25rem;box-shadow:#171717;color:#fff;max-width:100%;position:relative}html.theme--documenter-dark .card-footer:first-child,html.theme--documenter-dark .card-content:first-child,html.theme--documenter-dark .card-header:first-child{border-top-left-radius:.25rem;border-top-right-radius:.25rem}html.theme--documenter-dark .card-footer:last-child,html.theme--documenter-dark .card-content:last-child,html.theme--documenter-dark .card-header:last-child{border-bottom-left-radius:.25rem;border-bottom-right-radius:.25rem}html.theme--documenter-dark .card-header{background-color:rgba(0,0,0,0);align-items:stretch;box-shadow:0 0.125em 0.25em rgba(10,10,10,0.1);display:flex}html.theme--documenter-dark .card-header-title{align-items:center;color:#f2f2f2;display:flex;flex-grow:1;font-weight:700;padding:0.75rem 1rem}html.theme--documenter-dark .card-header-title.is-centered{justify-content:center}html.theme--documenter-dark .card-header-icon{-moz-appearance:none;-webkit-appearance:none;appearance:none;background:none;border:none;color:currentColor;font-family:inherit;font-size:1em;margin:0;padding:0;align-items:center;cursor:pointer;display:flex;justify-content:center;padding:0.75rem 1rem}html.theme--documenter-dark .card-image{display:block;position:relative}html.theme--documenter-dark .card-image:first-child img{border-top-left-radius:.25rem;border-top-right-radius:.25rem}html.theme--documenter-dark .card-image:last-child img{border-bottom-left-radius:.25rem;border-bottom-right-radius:.25rem}html.theme--documenter-dark .card-content{background-color:rgba(0,0,0,0);padding:1.5rem}html.theme--documenter-dark .card-footer{background-color:rgba(0,0,0,0);border-top:1px solid #ededed;align-items:stretch;display:flex}html.theme--documenter-dark .card-footer-item{align-items:center;display:flex;flex-basis:0;flex-grow:1;flex-shrink:0;justify-content:center;padding:.75rem}html.theme--documenter-dark .card-footer-item:not(:last-child){border-right:1px solid #ededed}html.theme--documenter-dark .card .media:not(:last-child){margin-bottom:1.5rem}html.theme--documenter-dark .dropdown{display:inline-flex;position:relative;vertical-align:top}html.theme--documenter-dark .dropdown.is-active .dropdown-menu,html.theme--documenter-dark .dropdown.is-hoverable:hover .dropdown-menu{display:block}html.theme--documenter-dark .dropdown.is-right .dropdown-menu{left:auto;right:0}html.theme--documenter-dark .dropdown.is-up .dropdown-menu{bottom:100%;padding-bottom:4px;padding-top:initial;top:auto}html.theme--documenter-dark .dropdown-menu{display:none;left:0;min-width:12rem;padding-top:4px;position:absolute;top:100%;z-index:20}html.theme--documenter-dark .dropdown-content{background-color:#282f2f;border-radius:.4em;box-shadow:#171717;padding-bottom:.5rem;padding-top:.5rem}html.theme--documenter-dark .dropdown-item{color:#fff;display:block;font-size:0.875rem;line-height:1.5;padding:0.375rem 1rem;position:relative}html.theme--documenter-dark a.dropdown-item,html.theme--documenter-dark button.dropdown-item{padding-right:3rem;text-align:inherit;white-space:nowrap;width:100%}html.theme--documenter-dark a.dropdown-item:hover,html.theme--documenter-dark button.dropdown-item:hover{background-color:#282f2f;color:#0a0a0a}html.theme--documenter-dark a.dropdown-item.is-active,html.theme--documenter-dark button.dropdown-item.is-active{background-color:#1abc9c;color:#fff}html.theme--documenter-dark .dropdown-divider{background-color:#ededed;border:none;display:block;height:1px;margin:0.5rem 0}html.theme--documenter-dark .level{align-items:center;justify-content:space-between}html.theme--documenter-dark .level code{border-radius:.4em}html.theme--documenter-dark .level img{display:inline-block;vertical-align:top}html.theme--documenter-dark .level.is-mobile{display:flex}html.theme--documenter-dark .level.is-mobile .level-left,html.theme--documenter-dark .level.is-mobile .level-right{display:flex}html.theme--documenter-dark .level.is-mobile .level-left+.level-right{margin-top:0}html.theme--documenter-dark .level.is-mobile .level-item:not(:last-child){margin-bottom:0;margin-right:.75rem}html.theme--documenter-dark .level.is-mobile .level-item:not(.is-narrow){flex-grow:1}@media screen and (min-width: 769px),print{html.theme--documenter-dark .level{display:flex}html.theme--documenter-dark .level>.level-item:not(.is-narrow){flex-grow:1}}html.theme--documenter-dark .level-item{align-items:center;display:flex;flex-basis:auto;flex-grow:0;flex-shrink:0;justify-content:center}html.theme--documenter-dark .level-item .title,html.theme--documenter-dark .level-item .subtitle{margin-bottom:0}@media screen and (max-width: 768px){html.theme--documenter-dark .level-item:not(:last-child){margin-bottom:.75rem}}html.theme--documenter-dark .level-left,html.theme--documenter-dark .level-right{flex-basis:auto;flex-grow:0;flex-shrink:0}html.theme--documenter-dark .level-left .level-item.is-flexible,html.theme--documenter-dark .level-right .level-item.is-flexible{flex-grow:1}@media screen and (min-width: 769px),print{html.theme--documenter-dark .level-left .level-item:not(:last-child),html.theme--documenter-dark .level-right .level-item:not(:last-child){margin-right:.75rem}}html.theme--documenter-dark .level-left{align-items:center;justify-content:flex-start}@media screen and (max-width: 768px){html.theme--documenter-dark .level-left+.level-right{margin-top:1.5rem}}@media screen and (min-width: 769px),print{html.theme--documenter-dark .level-left{display:flex}}html.theme--documenter-dark .level-right{align-items:center;justify-content:flex-end}@media screen and (min-width: 769px),print{html.theme--documenter-dark .level-right{display:flex}}html.theme--documenter-dark .media{align-items:flex-start;display:flex;text-align:inherit}html.theme--documenter-dark .media .content:not(:last-child){margin-bottom:.75rem}html.theme--documenter-dark .media .media{border-top:1px solid rgba(94,109,111,0.5);display:flex;padding-top:.75rem}html.theme--documenter-dark .media .media .content:not(:last-child),html.theme--documenter-dark .media .media .control:not(:last-child){margin-bottom:.5rem}html.theme--documenter-dark .media .media .media{padding-top:.5rem}html.theme--documenter-dark .media .media .media+.media{margin-top:.5rem}html.theme--documenter-dark .media+.media{border-top:1px solid rgba(94,109,111,0.5);margin-top:1rem;padding-top:1rem}html.theme--documenter-dark .media.is-large+.media{margin-top:1.5rem;padding-top:1.5rem}html.theme--documenter-dark .media-left,html.theme--documenter-dark .media-right{flex-basis:auto;flex-grow:0;flex-shrink:0}html.theme--documenter-dark .media-left{margin-right:1rem}html.theme--documenter-dark .media-right{margin-left:1rem}html.theme--documenter-dark .media-content{flex-basis:auto;flex-grow:1;flex-shrink:1;text-align:inherit}@media screen and (max-width: 768px){html.theme--documenter-dark .media-content{overflow-x:auto}}html.theme--documenter-dark .menu{font-size:1rem}html.theme--documenter-dark .menu.is-small,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.menu{font-size:.75rem}html.theme--documenter-dark .menu.is-medium{font-size:1.25rem}html.theme--documenter-dark .menu.is-large{font-size:1.5rem}html.theme--documenter-dark .menu-list{line-height:1.25}html.theme--documenter-dark .menu-list a{border-radius:3px;color:#fff;display:block;padding:0.5em 0.75em}html.theme--documenter-dark .menu-list a:hover{background-color:#282f2f;color:#f2f2f2}html.theme--documenter-dark .menu-list a.is-active{background-color:#1abc9c;color:#fff}html.theme--documenter-dark .menu-list li ul{border-left:1px solid #5e6d6f;margin:.75em;padding-left:.75em}html.theme--documenter-dark .menu-label{color:#fff;font-size:.75em;letter-spacing:.1em;text-transform:uppercase}html.theme--documenter-dark .menu-label:not(:first-child){margin-top:1em}html.theme--documenter-dark .menu-label:not(:last-child){margin-bottom:1em}html.theme--documenter-dark .message{background-color:#282f2f;border-radius:.4em;font-size:1rem}html.theme--documenter-dark .message strong{color:currentColor}html.theme--documenter-dark .message a:not(.button):not(.tag):not(.dropdown-item){color:currentColor;text-decoration:underline}html.theme--documenter-dark .message.is-small,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.message{font-size:.75rem}html.theme--documenter-dark .message.is-medium{font-size:1.25rem}html.theme--documenter-dark .message.is-large{font-size:1.5rem}html.theme--documenter-dark .message.is-white{background-color:#fff}html.theme--documenter-dark .message.is-white .message-header{background-color:#fff;color:#0a0a0a}html.theme--documenter-dark .message.is-white .message-body{border-color:#fff}html.theme--documenter-dark .message.is-black{background-color:#fafafa}html.theme--documenter-dark .message.is-black .message-header{background-color:#0a0a0a;color:#fff}html.theme--documenter-dark .message.is-black .message-body{border-color:#0a0a0a}html.theme--documenter-dark .message.is-light{background-color:#f9fafb}html.theme--documenter-dark .message.is-light .message-header{background-color:#ecf0f1;color:rgba(0,0,0,0.7)}html.theme--documenter-dark .message.is-light .message-body{border-color:#ecf0f1}html.theme--documenter-dark .message.is-dark,html.theme--documenter-dark .content kbd.message{background-color:#f9fafa}html.theme--documenter-dark .message.is-dark .message-header,html.theme--documenter-dark .content kbd.message .message-header{background-color:#282f2f;color:#fff}html.theme--documenter-dark .message.is-dark .message-body,html.theme--documenter-dark .content kbd.message .message-body{border-color:#282f2f}html.theme--documenter-dark .message.is-primary,html.theme--documenter-dark .docstring>section>a.message.docs-sourcelink{background-color:#f1f5f9}html.theme--documenter-dark .message.is-primary .message-header,html.theme--documenter-dark .docstring>section>a.message.docs-sourcelink .message-header{background-color:#375a7f;color:#fff}html.theme--documenter-dark .message.is-primary .message-body,html.theme--documenter-dark .docstring>section>a.message.docs-sourcelink .message-body{border-color:#375a7f;color:#4d7eb2}html.theme--documenter-dark .message.is-link{background-color:#edfdf9}html.theme--documenter-dark .message.is-link .message-header{background-color:#1abc9c;color:#fff}html.theme--documenter-dark .message.is-link .message-body{border-color:#1abc9c;color:#15987e}html.theme--documenter-dark .message.is-info{background-color:#ebf7ff}html.theme--documenter-dark .message.is-info .message-header{background-color:#024c7d;color:#fff}html.theme--documenter-dark .message.is-info .message-body{border-color:#024c7d;color:#0e9dfb}html.theme--documenter-dark .message.is-success{background-color:#ebfff3}html.theme--documenter-dark .message.is-success .message-header{background-color:#008438;color:#fff}html.theme--documenter-dark .message.is-success .message-body{border-color:#008438;color:#00eb64}html.theme--documenter-dark .message.is-warning{background-color:#fffaeb}html.theme--documenter-dark .message.is-warning .message-header{background-color:#ad8100;color:#fff}html.theme--documenter-dark .message.is-warning .message-body{border-color:#ad8100;color:#d19c00}html.theme--documenter-dark .message.is-danger{background-color:#fdeeec}html.theme--documenter-dark .message.is-danger .message-header{background-color:#9e1b0d;color:#fff}html.theme--documenter-dark .message.is-danger .message-body{border-color:#9e1b0d;color:#ec311d}html.theme--documenter-dark .message-header{align-items:center;background-color:#fff;border-radius:.4em .4em 0 0;color:rgba(0,0,0,0.7);display:flex;font-weight:700;justify-content:space-between;line-height:1.25;padding:0.75em 1em;position:relative}html.theme--documenter-dark .message-header .delete{flex-grow:0;flex-shrink:0;margin-left:.75em}html.theme--documenter-dark .message-header+.message-body{border-width:0;border-top-left-radius:0;border-top-right-radius:0}html.theme--documenter-dark .message-body{border-color:#5e6d6f;border-radius:.4em;border-style:solid;border-width:0 0 0 4px;color:#fff;padding:1.25em 1.5em}html.theme--documenter-dark .message-body code,html.theme--documenter-dark .message-body pre{background-color:#fff}html.theme--documenter-dark .message-body pre code{background-color:rgba(0,0,0,0)}html.theme--documenter-dark .modal{align-items:center;display:none;flex-direction:column;justify-content:center;overflow:hidden;position:fixed;z-index:40}html.theme--documenter-dark .modal.is-active{display:flex}html.theme--documenter-dark .modal-background{background-color:rgba(10,10,10,0.86)}html.theme--documenter-dark .modal-content,html.theme--documenter-dark .modal-card{margin:0 20px;max-height:calc(100vh - 160px);overflow:auto;position:relative;width:100%}@media screen and (min-width: 769px){html.theme--documenter-dark .modal-content,html.theme--documenter-dark .modal-card{margin:0 auto;max-height:calc(100vh - 40px);width:640px}}html.theme--documenter-dark .modal-close{background:none;height:40px;position:fixed;right:20px;top:20px;width:40px}html.theme--documenter-dark .modal-card{display:flex;flex-direction:column;max-height:calc(100vh - 40px);overflow:hidden;-ms-overflow-y:visible}html.theme--documenter-dark .modal-card-head,html.theme--documenter-dark .modal-card-foot{align-items:center;background-color:#282f2f;display:flex;flex-shrink:0;justify-content:flex-start;padding:20px;position:relative}html.theme--documenter-dark .modal-card-head{border-bottom:1px solid #5e6d6f;border-top-left-radius:8px;border-top-right-radius:8px}html.theme--documenter-dark .modal-card-title{color:#f2f2f2;flex-grow:1;flex-shrink:0;font-size:1.5rem;line-height:1}html.theme--documenter-dark .modal-card-foot{border-bottom-left-radius:8px;border-bottom-right-radius:8px;border-top:1px solid #5e6d6f}html.theme--documenter-dark .modal-card-foot .button:not(:last-child){margin-right:.5em}html.theme--documenter-dark .modal-card-body{-webkit-overflow-scrolling:touch;background-color:#fff;flex-grow:1;flex-shrink:1;overflow:auto;padding:20px}html.theme--documenter-dark .navbar{background-color:#375a7f;min-height:4rem;position:relative;z-index:30}html.theme--documenter-dark .navbar.is-white{background-color:#fff;color:#0a0a0a}html.theme--documenter-dark .navbar.is-white .navbar-brand>.navbar-item,html.theme--documenter-dark .navbar.is-white .navbar-brand .navbar-link{color:#0a0a0a}html.theme--documenter-dark .navbar.is-white .navbar-brand>a.navbar-item:focus,html.theme--documenter-dark .navbar.is-white .navbar-brand>a.navbar-item:hover,html.theme--documenter-dark .navbar.is-white .navbar-brand>a.navbar-item.is-active,html.theme--documenter-dark .navbar.is-white .navbar-brand .navbar-link:focus,html.theme--documenter-dark .navbar.is-white .navbar-brand .navbar-link:hover,html.theme--documenter-dark .navbar.is-white .navbar-brand .navbar-link.is-active{background-color:#f2f2f2;color:#0a0a0a}html.theme--documenter-dark .navbar.is-white .navbar-brand .navbar-link::after{border-color:#0a0a0a}html.theme--documenter-dark .navbar.is-white .navbar-burger{color:#0a0a0a}@media screen and (min-width: 1056px){html.theme--documenter-dark .navbar.is-white .navbar-start>.navbar-item,html.theme--documenter-dark .navbar.is-white .navbar-start .navbar-link,html.theme--documenter-dark .navbar.is-white .navbar-end>.navbar-item,html.theme--documenter-dark .navbar.is-white .navbar-end .navbar-link{color:#0a0a0a}html.theme--documenter-dark .navbar.is-white .navbar-start>a.navbar-item:focus,html.theme--documenter-dark .navbar.is-white .navbar-start>a.navbar-item:hover,html.theme--documenter-dark .navbar.is-white .navbar-start>a.navbar-item.is-active,html.theme--documenter-dark .navbar.is-white .navbar-start .navbar-link:focus,html.theme--documenter-dark .navbar.is-white .navbar-start .navbar-link:hover,html.theme--documenter-dark .navbar.is-white .navbar-start .navbar-link.is-active,html.theme--documenter-dark .navbar.is-white .navbar-end>a.navbar-item:focus,html.theme--documenter-dark .navbar.is-white .navbar-end>a.navbar-item:hover,html.theme--documenter-dark .navbar.is-white .navbar-end>a.navbar-item.is-active,html.theme--documenter-dark .navbar.is-white .navbar-end .navbar-link:focus,html.theme--documenter-dark .navbar.is-white .navbar-end .navbar-link:hover,html.theme--documenter-dark .navbar.is-white .navbar-end .navbar-link.is-active{background-color:#f2f2f2;color:#0a0a0a}html.theme--documenter-dark .navbar.is-white .navbar-start .navbar-link::after,html.theme--documenter-dark .navbar.is-white .navbar-end .navbar-link::after{border-color:#0a0a0a}html.theme--documenter-dark .navbar.is-white .navbar-item.has-dropdown:focus .navbar-link,html.theme--documenter-dark .navbar.is-white .navbar-item.has-dropdown:hover .navbar-link,html.theme--documenter-dark .navbar.is-white .navbar-item.has-dropdown.is-active .navbar-link{background-color:#f2f2f2;color:#0a0a0a}html.theme--documenter-dark .navbar.is-white .navbar-dropdown a.navbar-item.is-active{background-color:#fff;color:#0a0a0a}}html.theme--documenter-dark .navbar.is-black{background-color:#0a0a0a;color:#fff}html.theme--documenter-dark .navbar.is-black .navbar-brand>.navbar-item,html.theme--documenter-dark .navbar.is-black .navbar-brand .navbar-link{color:#fff}html.theme--documenter-dark .navbar.is-black .navbar-brand>a.navbar-item:focus,html.theme--documenter-dark .navbar.is-black .navbar-brand>a.navbar-item:hover,html.theme--documenter-dark .navbar.is-black .navbar-brand>a.navbar-item.is-active,html.theme--documenter-dark .navbar.is-black .navbar-brand .navbar-link:focus,html.theme--documenter-dark .navbar.is-black .navbar-brand .navbar-link:hover,html.theme--documenter-dark .navbar.is-black .navbar-brand .navbar-link.is-active{background-color:#000;color:#fff}html.theme--documenter-dark .navbar.is-black .navbar-brand .navbar-link::after{border-color:#fff}html.theme--documenter-dark .navbar.is-black .navbar-burger{color:#fff}@media screen and (min-width: 1056px){html.theme--documenter-dark .navbar.is-black .navbar-start>.navbar-item,html.theme--documenter-dark .navbar.is-black .navbar-start .navbar-link,html.theme--documenter-dark .navbar.is-black .navbar-end>.navbar-item,html.theme--documenter-dark .navbar.is-black .navbar-end .navbar-link{color:#fff}html.theme--documenter-dark .navbar.is-black .navbar-start>a.navbar-item:focus,html.theme--documenter-dark .navbar.is-black .navbar-start>a.navbar-item:hover,html.theme--documenter-dark .navbar.is-black .navbar-start>a.navbar-item.is-active,html.theme--documenter-dark .navbar.is-black .navbar-start .navbar-link:focus,html.theme--documenter-dark .navbar.is-black .navbar-start .navbar-link:hover,html.theme--documenter-dark .navbar.is-black .navbar-start .navbar-link.is-active,html.theme--documenter-dark .navbar.is-black .navbar-end>a.navbar-item:focus,html.theme--documenter-dark .navbar.is-black .navbar-end>a.navbar-item:hover,html.theme--documenter-dark .navbar.is-black .navbar-end>a.navbar-item.is-active,html.theme--documenter-dark .navbar.is-black .navbar-end .navbar-link:focus,html.theme--documenter-dark .navbar.is-black .navbar-end .navbar-link:hover,html.theme--documenter-dark .navbar.is-black .navbar-end .navbar-link.is-active{background-color:#000;color:#fff}html.theme--documenter-dark .navbar.is-black .navbar-start .navbar-link::after,html.theme--documenter-dark .navbar.is-black .navbar-end .navbar-link::after{border-color:#fff}html.theme--documenter-dark .navbar.is-black .navbar-item.has-dropdown:focus .navbar-link,html.theme--documenter-dark .navbar.is-black .navbar-item.has-dropdown:hover .navbar-link,html.theme--documenter-dark .navbar.is-black .navbar-item.has-dropdown.is-active .navbar-link{background-color:#000;color:#fff}html.theme--documenter-dark .navbar.is-black .navbar-dropdown a.navbar-item.is-active{background-color:#0a0a0a;color:#fff}}html.theme--documenter-dark .navbar.is-light{background-color:#ecf0f1;color:rgba(0,0,0,0.7)}html.theme--documenter-dark .navbar.is-light .navbar-brand>.navbar-item,html.theme--documenter-dark .navbar.is-light .navbar-brand .navbar-link{color:rgba(0,0,0,0.7)}html.theme--documenter-dark .navbar.is-light .navbar-brand>a.navbar-item:focus,html.theme--documenter-dark .navbar.is-light .navbar-brand>a.navbar-item:hover,html.theme--documenter-dark .navbar.is-light .navbar-brand>a.navbar-item.is-active,html.theme--documenter-dark .navbar.is-light .navbar-brand .navbar-link:focus,html.theme--documenter-dark .navbar.is-light .navbar-brand .navbar-link:hover,html.theme--documenter-dark .navbar.is-light .navbar-brand .navbar-link.is-active{background-color:#dde4e6;color:rgba(0,0,0,0.7)}html.theme--documenter-dark .navbar.is-light .navbar-brand .navbar-link::after{border-color:rgba(0,0,0,0.7)}html.theme--documenter-dark .navbar.is-light .navbar-burger{color:rgba(0,0,0,0.7)}@media screen and (min-width: 1056px){html.theme--documenter-dark .navbar.is-light .navbar-start>.navbar-item,html.theme--documenter-dark .navbar.is-light .navbar-start .navbar-link,html.theme--documenter-dark .navbar.is-light .navbar-end>.navbar-item,html.theme--documenter-dark .navbar.is-light .navbar-end .navbar-link{color:rgba(0,0,0,0.7)}html.theme--documenter-dark .navbar.is-light .navbar-start>a.navbar-item:focus,html.theme--documenter-dark .navbar.is-light .navbar-start>a.navbar-item:hover,html.theme--documenter-dark .navbar.is-light .navbar-start>a.navbar-item.is-active,html.theme--documenter-dark .navbar.is-light .navbar-start .navbar-link:focus,html.theme--documenter-dark .navbar.is-light .navbar-start .navbar-link:hover,html.theme--documenter-dark .navbar.is-light .navbar-start .navbar-link.is-active,html.theme--documenter-dark .navbar.is-light .navbar-end>a.navbar-item:focus,html.theme--documenter-dark .navbar.is-light .navbar-end>a.navbar-item:hover,html.theme--documenter-dark .navbar.is-light .navbar-end>a.navbar-item.is-active,html.theme--documenter-dark .navbar.is-light .navbar-end .navbar-link:focus,html.theme--documenter-dark .navbar.is-light .navbar-end .navbar-link:hover,html.theme--documenter-dark .navbar.is-light .navbar-end .navbar-link.is-active{background-color:#dde4e6;color:rgba(0,0,0,0.7)}html.theme--documenter-dark .navbar.is-light .navbar-start .navbar-link::after,html.theme--documenter-dark .navbar.is-light .navbar-end .navbar-link::after{border-color:rgba(0,0,0,0.7)}html.theme--documenter-dark .navbar.is-light .navbar-item.has-dropdown:focus .navbar-link,html.theme--documenter-dark .navbar.is-light .navbar-item.has-dropdown:hover .navbar-link,html.theme--documenter-dark .navbar.is-light .navbar-item.has-dropdown.is-active .navbar-link{background-color:#dde4e6;color:rgba(0,0,0,0.7)}html.theme--documenter-dark .navbar.is-light .navbar-dropdown a.navbar-item.is-active{background-color:#ecf0f1;color:rgba(0,0,0,0.7)}}html.theme--documenter-dark .navbar.is-dark,html.theme--documenter-dark .content kbd.navbar{background-color:#282f2f;color:#fff}html.theme--documenter-dark .navbar.is-dark .navbar-brand>.navbar-item,html.theme--documenter-dark .content kbd.navbar .navbar-brand>.navbar-item,html.theme--documenter-dark .navbar.is-dark .navbar-brand .navbar-link,html.theme--documenter-dark .content kbd.navbar .navbar-brand .navbar-link{color:#fff}html.theme--documenter-dark .navbar.is-dark .navbar-brand>a.navbar-item:focus,html.theme--documenter-dark .content kbd.navbar .navbar-brand>a.navbar-item:focus,html.theme--documenter-dark .navbar.is-dark .navbar-brand>a.navbar-item:hover,html.theme--documenter-dark .content kbd.navbar .navbar-brand>a.navbar-item:hover,html.theme--documenter-dark .navbar.is-dark .navbar-brand>a.navbar-item.is-active,html.theme--documenter-dark .content kbd.navbar .navbar-brand>a.navbar-item.is-active,html.theme--documenter-dark .navbar.is-dark .navbar-brand .navbar-link:focus,html.theme--documenter-dark .content kbd.navbar .navbar-brand .navbar-link:focus,html.theme--documenter-dark .navbar.is-dark .navbar-brand .navbar-link:hover,html.theme--documenter-dark .content kbd.navbar .navbar-brand .navbar-link:hover,html.theme--documenter-dark .navbar.is-dark .navbar-brand .navbar-link.is-active,html.theme--documenter-dark .content kbd.navbar .navbar-brand .navbar-link.is-active{background-color:#1d2122;color:#fff}html.theme--documenter-dark .navbar.is-dark .navbar-brand .navbar-link::after,html.theme--documenter-dark .content kbd.navbar .navbar-brand .navbar-link::after{border-color:#fff}html.theme--documenter-dark .navbar.is-dark .navbar-burger,html.theme--documenter-dark .content kbd.navbar .navbar-burger{color:#fff}@media screen and (min-width: 1056px){html.theme--documenter-dark .navbar.is-dark .navbar-start>.navbar-item,html.theme--documenter-dark .content kbd.navbar .navbar-start>.navbar-item,html.theme--documenter-dark .navbar.is-dark .navbar-start .navbar-link,html.theme--documenter-dark .content kbd.navbar .navbar-start .navbar-link,html.theme--documenter-dark .navbar.is-dark .navbar-end>.navbar-item,html.theme--documenter-dark .content kbd.navbar .navbar-end>.navbar-item,html.theme--documenter-dark .navbar.is-dark .navbar-end .navbar-link,html.theme--documenter-dark .content kbd.navbar .navbar-end .navbar-link{color:#fff}html.theme--documenter-dark .navbar.is-dark .navbar-start>a.navbar-item:focus,html.theme--documenter-dark .content kbd.navbar .navbar-start>a.navbar-item:focus,html.theme--documenter-dark .navbar.is-dark .navbar-start>a.navbar-item:hover,html.theme--documenter-dark .content kbd.navbar .navbar-start>a.navbar-item:hover,html.theme--documenter-dark .navbar.is-dark .navbar-start>a.navbar-item.is-active,html.theme--documenter-dark .content kbd.navbar .navbar-start>a.navbar-item.is-active,html.theme--documenter-dark .navbar.is-dark .navbar-start .navbar-link:focus,html.theme--documenter-dark .content kbd.navbar .navbar-start .navbar-link:focus,html.theme--documenter-dark .navbar.is-dark .navbar-start .navbar-link:hover,html.theme--documenter-dark .content kbd.navbar .navbar-start .navbar-link:hover,html.theme--documenter-dark .navbar.is-dark .navbar-start .navbar-link.is-active,html.theme--documenter-dark .content kbd.navbar .navbar-start .navbar-link.is-active,html.theme--documenter-dark .navbar.is-dark .navbar-end>a.navbar-item:focus,html.theme--documenter-dark .content kbd.navbar .navbar-end>a.navbar-item:focus,html.theme--documenter-dark .navbar.is-dark .navbar-end>a.navbar-item:hover,html.theme--documenter-dark .content kbd.navbar .navbar-end>a.navbar-item:hover,html.theme--documenter-dark .navbar.is-dark .navbar-end>a.navbar-item.is-active,html.theme--documenter-dark .content kbd.navbar .navbar-end>a.navbar-item.is-active,html.theme--documenter-dark .navbar.is-dark .navbar-end .navbar-link:focus,html.theme--documenter-dark .content kbd.navbar .navbar-end .navbar-link:focus,html.theme--documenter-dark .navbar.is-dark .navbar-end .navbar-link:hover,html.theme--documenter-dark .content kbd.navbar .navbar-end .navbar-link:hover,html.theme--documenter-dark .navbar.is-dark .navbar-end .navbar-link.is-active,html.theme--documenter-dark .content kbd.navbar .navbar-end .navbar-link.is-active{background-color:#1d2122;color:#fff}html.theme--documenter-dark .navbar.is-dark .navbar-start .navbar-link::after,html.theme--documenter-dark .content kbd.navbar .navbar-start .navbar-link::after,html.theme--documenter-dark .navbar.is-dark .navbar-end .navbar-link::after,html.theme--documenter-dark .content kbd.navbar .navbar-end .navbar-link::after{border-color:#fff}html.theme--documenter-dark .navbar.is-dark .navbar-item.has-dropdown:focus .navbar-link,html.theme--documenter-dark .content kbd.navbar .navbar-item.has-dropdown:focus .navbar-link,html.theme--documenter-dark .navbar.is-dark .navbar-item.has-dropdown:hover .navbar-link,html.theme--documenter-dark .content kbd.navbar .navbar-item.has-dropdown:hover .navbar-link,html.theme--documenter-dark .navbar.is-dark .navbar-item.has-dropdown.is-active .navbar-link,html.theme--documenter-dark .content kbd.navbar .navbar-item.has-dropdown.is-active .navbar-link{background-color:#1d2122;color:#fff}html.theme--documenter-dark .navbar.is-dark .navbar-dropdown a.navbar-item.is-active,html.theme--documenter-dark .content kbd.navbar .navbar-dropdown a.navbar-item.is-active{background-color:#282f2f;color:#fff}}html.theme--documenter-dark .navbar.is-primary,html.theme--documenter-dark .docstring>section>a.navbar.docs-sourcelink{background-color:#375a7f;color:#fff}html.theme--documenter-dark .navbar.is-primary .navbar-brand>.navbar-item,html.theme--documenter-dark .docstring>section>a.navbar.docs-sourcelink .navbar-brand>.navbar-item,html.theme--documenter-dark .navbar.is-primary .navbar-brand .navbar-link,html.theme--documenter-dark .docstring>section>a.navbar.docs-sourcelink .navbar-brand .navbar-link{color:#fff}html.theme--documenter-dark .navbar.is-primary .navbar-brand>a.navbar-item:focus,html.theme--documenter-dark .docstring>section>a.navbar.docs-sourcelink .navbar-brand>a.navbar-item:focus,html.theme--documenter-dark .navbar.is-primary .navbar-brand>a.navbar-item:hover,html.theme--documenter-dark .docstring>section>a.navbar.docs-sourcelink .navbar-brand>a.navbar-item:hover,html.theme--documenter-dark .navbar.is-primary .navbar-brand>a.navbar-item.is-active,html.theme--documenter-dark .docstring>section>a.navbar.docs-sourcelink .navbar-brand>a.navbar-item.is-active,html.theme--documenter-dark .navbar.is-primary .navbar-brand .navbar-link:focus,html.theme--documenter-dark .docstring>section>a.navbar.docs-sourcelink .navbar-brand .navbar-link:focus,html.theme--documenter-dark .navbar.is-primary .navbar-brand .navbar-link:hover,html.theme--documenter-dark .docstring>section>a.navbar.docs-sourcelink .navbar-brand .navbar-link:hover,html.theme--documenter-dark .navbar.is-primary .navbar-brand .navbar-link.is-active,html.theme--documenter-dark .docstring>section>a.navbar.docs-sourcelink .navbar-brand .navbar-link.is-active{background-color:#2f4d6d;color:#fff}html.theme--documenter-dark .navbar.is-primary .navbar-brand .navbar-link::after,html.theme--documenter-dark .docstring>section>a.navbar.docs-sourcelink .navbar-brand .navbar-link::after{border-color:#fff}html.theme--documenter-dark .navbar.is-primary .navbar-burger,html.theme--documenter-dark .docstring>section>a.navbar.docs-sourcelink .navbar-burger{color:#fff}@media screen and (min-width: 1056px){html.theme--documenter-dark .navbar.is-primary .navbar-start>.navbar-item,html.theme--documenter-dark .docstring>section>a.navbar.docs-sourcelink .navbar-start>.navbar-item,html.theme--documenter-dark .navbar.is-primary .navbar-start .navbar-link,html.theme--documenter-dark .docstring>section>a.navbar.docs-sourcelink .navbar-start .navbar-link,html.theme--documenter-dark .navbar.is-primary .navbar-end>.navbar-item,html.theme--documenter-dark .docstring>section>a.navbar.docs-sourcelink .navbar-end>.navbar-item,html.theme--documenter-dark .navbar.is-primary .navbar-end .navbar-link,html.theme--documenter-dark .docstring>section>a.navbar.docs-sourcelink .navbar-end .navbar-link{color:#fff}html.theme--documenter-dark .navbar.is-primary .navbar-start>a.navbar-item:focus,html.theme--documenter-dark .docstring>section>a.navbar.docs-sourcelink .navbar-start>a.navbar-item:focus,html.theme--documenter-dark .navbar.is-primary .navbar-start>a.navbar-item:hover,html.theme--documenter-dark .docstring>section>a.navbar.docs-sourcelink .navbar-start>a.navbar-item:hover,html.theme--documenter-dark .navbar.is-primary .navbar-start>a.navbar-item.is-active,html.theme--documenter-dark .docstring>section>a.navbar.docs-sourcelink .navbar-start>a.navbar-item.is-active,html.theme--documenter-dark .navbar.is-primary .navbar-start .navbar-link:focus,html.theme--documenter-dark .docstring>section>a.navbar.docs-sourcelink .navbar-start .navbar-link:focus,html.theme--documenter-dark .navbar.is-primary .navbar-start .navbar-link:hover,html.theme--documenter-dark .docstring>section>a.navbar.docs-sourcelink .navbar-start .navbar-link:hover,html.theme--documenter-dark .navbar.is-primary .navbar-start .navbar-link.is-active,html.theme--documenter-dark .docstring>section>a.navbar.docs-sourcelink .navbar-start .navbar-link.is-active,html.theme--documenter-dark .navbar.is-primary .navbar-end>a.navbar-item:focus,html.theme--documenter-dark .docstring>section>a.navbar.docs-sourcelink .navbar-end>a.navbar-item:focus,html.theme--documenter-dark .navbar.is-primary .navbar-end>a.navbar-item:hover,html.theme--documenter-dark .docstring>section>a.navbar.docs-sourcelink .navbar-end>a.navbar-item:hover,html.theme--documenter-dark .navbar.is-primary .navbar-end>a.navbar-item.is-active,html.theme--documenter-dark .docstring>section>a.navbar.docs-sourcelink .navbar-end>a.navbar-item.is-active,html.theme--documenter-dark .navbar.is-primary .navbar-end .navbar-link:focus,html.theme--documenter-dark .docstring>section>a.navbar.docs-sourcelink .navbar-end .navbar-link:focus,html.theme--documenter-dark .navbar.is-primary .navbar-end .navbar-link:hover,html.theme--documenter-dark .docstring>section>a.navbar.docs-sourcelink .navbar-end .navbar-link:hover,html.theme--documenter-dark .navbar.is-primary .navbar-end .navbar-link.is-active,html.theme--documenter-dark .docstring>section>a.navbar.docs-sourcelink .navbar-end .navbar-link.is-active{background-color:#2f4d6d;color:#fff}html.theme--documenter-dark .navbar.is-primary .navbar-start .navbar-link::after,html.theme--documenter-dark .docstring>section>a.navbar.docs-sourcelink .navbar-start .navbar-link::after,html.theme--documenter-dark .navbar.is-primary .navbar-end .navbar-link::after,html.theme--documenter-dark .docstring>section>a.navbar.docs-sourcelink .navbar-end .navbar-link::after{border-color:#fff}html.theme--documenter-dark .navbar.is-primary .navbar-item.has-dropdown:focus .navbar-link,html.theme--documenter-dark .docstring>section>a.navbar.docs-sourcelink .navbar-item.has-dropdown:focus .navbar-link,html.theme--documenter-dark .navbar.is-primary .navbar-item.has-dropdown:hover .navbar-link,html.theme--documenter-dark .docstring>section>a.navbar.docs-sourcelink .navbar-item.has-dropdown:hover .navbar-link,html.theme--documenter-dark .navbar.is-primary .navbar-item.has-dropdown.is-active .navbar-link,html.theme--documenter-dark .docstring>section>a.navbar.docs-sourcelink .navbar-item.has-dropdown.is-active .navbar-link{background-color:#2f4d6d;color:#fff}html.theme--documenter-dark .navbar.is-primary .navbar-dropdown a.navbar-item.is-active,html.theme--documenter-dark .docstring>section>a.navbar.docs-sourcelink .navbar-dropdown a.navbar-item.is-active{background-color:#375a7f;color:#fff}}html.theme--documenter-dark .navbar.is-link{background-color:#1abc9c;color:#fff}html.theme--documenter-dark .navbar.is-link .navbar-brand>.navbar-item,html.theme--documenter-dark .navbar.is-link .navbar-brand .navbar-link{color:#fff}html.theme--documenter-dark .navbar.is-link .navbar-brand>a.navbar-item:focus,html.theme--documenter-dark .navbar.is-link .navbar-brand>a.navbar-item:hover,html.theme--documenter-dark .navbar.is-link .navbar-brand>a.navbar-item.is-active,html.theme--documenter-dark .navbar.is-link .navbar-brand .navbar-link:focus,html.theme--documenter-dark .navbar.is-link .navbar-brand .navbar-link:hover,html.theme--documenter-dark .navbar.is-link .navbar-brand .navbar-link.is-active{background-color:#17a689;color:#fff}html.theme--documenter-dark .navbar.is-link .navbar-brand .navbar-link::after{border-color:#fff}html.theme--documenter-dark .navbar.is-link .navbar-burger{color:#fff}@media screen and (min-width: 1056px){html.theme--documenter-dark .navbar.is-link .navbar-start>.navbar-item,html.theme--documenter-dark .navbar.is-link .navbar-start .navbar-link,html.theme--documenter-dark .navbar.is-link .navbar-end>.navbar-item,html.theme--documenter-dark .navbar.is-link .navbar-end .navbar-link{color:#fff}html.theme--documenter-dark .navbar.is-link .navbar-start>a.navbar-item:focus,html.theme--documenter-dark .navbar.is-link .navbar-start>a.navbar-item:hover,html.theme--documenter-dark .navbar.is-link .navbar-start>a.navbar-item.is-active,html.theme--documenter-dark .navbar.is-link .navbar-start .navbar-link:focus,html.theme--documenter-dark .navbar.is-link .navbar-start .navbar-link:hover,html.theme--documenter-dark .navbar.is-link .navbar-start .navbar-link.is-active,html.theme--documenter-dark .navbar.is-link .navbar-end>a.navbar-item:focus,html.theme--documenter-dark .navbar.is-link .navbar-end>a.navbar-item:hover,html.theme--documenter-dark .navbar.is-link .navbar-end>a.navbar-item.is-active,html.theme--documenter-dark .navbar.is-link .navbar-end .navbar-link:focus,html.theme--documenter-dark .navbar.is-link .navbar-end .navbar-link:hover,html.theme--documenter-dark .navbar.is-link .navbar-end .navbar-link.is-active{background-color:#17a689;color:#fff}html.theme--documenter-dark .navbar.is-link .navbar-start .navbar-link::after,html.theme--documenter-dark .navbar.is-link .navbar-end .navbar-link::after{border-color:#fff}html.theme--documenter-dark .navbar.is-link .navbar-item.has-dropdown:focus .navbar-link,html.theme--documenter-dark .navbar.is-link .navbar-item.has-dropdown:hover .navbar-link,html.theme--documenter-dark .navbar.is-link .navbar-item.has-dropdown.is-active .navbar-link{background-color:#17a689;color:#fff}html.theme--documenter-dark .navbar.is-link .navbar-dropdown a.navbar-item.is-active{background-color:#1abc9c;color:#fff}}html.theme--documenter-dark .navbar.is-info{background-color:#024c7d;color:#fff}html.theme--documenter-dark .navbar.is-info .navbar-brand>.navbar-item,html.theme--documenter-dark .navbar.is-info .navbar-brand .navbar-link{color:#fff}html.theme--documenter-dark .navbar.is-info .navbar-brand>a.navbar-item:focus,html.theme--documenter-dark .navbar.is-info .navbar-brand>a.navbar-item:hover,html.theme--documenter-dark .navbar.is-info .navbar-brand>a.navbar-item.is-active,html.theme--documenter-dark .navbar.is-info .navbar-brand .navbar-link:focus,html.theme--documenter-dark .navbar.is-info .navbar-brand .navbar-link:hover,html.theme--documenter-dark .navbar.is-info .navbar-brand .navbar-link.is-active{background-color:#023d64;color:#fff}html.theme--documenter-dark .navbar.is-info .navbar-brand .navbar-link::after{border-color:#fff}html.theme--documenter-dark .navbar.is-info .navbar-burger{color:#fff}@media screen and (min-width: 1056px){html.theme--documenter-dark .navbar.is-info .navbar-start>.navbar-item,html.theme--documenter-dark .navbar.is-info .navbar-start .navbar-link,html.theme--documenter-dark .navbar.is-info .navbar-end>.navbar-item,html.theme--documenter-dark .navbar.is-info .navbar-end .navbar-link{color:#fff}html.theme--documenter-dark .navbar.is-info .navbar-start>a.navbar-item:focus,html.theme--documenter-dark .navbar.is-info .navbar-start>a.navbar-item:hover,html.theme--documenter-dark .navbar.is-info .navbar-start>a.navbar-item.is-active,html.theme--documenter-dark .navbar.is-info .navbar-start .navbar-link:focus,html.theme--documenter-dark .navbar.is-info .navbar-start .navbar-link:hover,html.theme--documenter-dark .navbar.is-info .navbar-start .navbar-link.is-active,html.theme--documenter-dark .navbar.is-info .navbar-end>a.navbar-item:focus,html.theme--documenter-dark .navbar.is-info .navbar-end>a.navbar-item:hover,html.theme--documenter-dark .navbar.is-info .navbar-end>a.navbar-item.is-active,html.theme--documenter-dark .navbar.is-info .navbar-end .navbar-link:focus,html.theme--documenter-dark .navbar.is-info .navbar-end .navbar-link:hover,html.theme--documenter-dark .navbar.is-info .navbar-end .navbar-link.is-active{background-color:#023d64;color:#fff}html.theme--documenter-dark .navbar.is-info .navbar-start .navbar-link::after,html.theme--documenter-dark .navbar.is-info .navbar-end .navbar-link::after{border-color:#fff}html.theme--documenter-dark .navbar.is-info .navbar-item.has-dropdown:focus .navbar-link,html.theme--documenter-dark .navbar.is-info .navbar-item.has-dropdown:hover .navbar-link,html.theme--documenter-dark .navbar.is-info .navbar-item.has-dropdown.is-active .navbar-link{background-color:#023d64;color:#fff}html.theme--documenter-dark .navbar.is-info .navbar-dropdown a.navbar-item.is-active{background-color:#024c7d;color:#fff}}html.theme--documenter-dark .navbar.is-success{background-color:#008438;color:#fff}html.theme--documenter-dark .navbar.is-success .navbar-brand>.navbar-item,html.theme--documenter-dark .navbar.is-success .navbar-brand .navbar-link{color:#fff}html.theme--documenter-dark .navbar.is-success .navbar-brand>a.navbar-item:focus,html.theme--documenter-dark .navbar.is-success .navbar-brand>a.navbar-item:hover,html.theme--documenter-dark .navbar.is-success .navbar-brand>a.navbar-item.is-active,html.theme--documenter-dark .navbar.is-success .navbar-brand .navbar-link:focus,html.theme--documenter-dark .navbar.is-success .navbar-brand .navbar-link:hover,html.theme--documenter-dark .navbar.is-success .navbar-brand .navbar-link.is-active{background-color:#006b2d;color:#fff}html.theme--documenter-dark .navbar.is-success .navbar-brand .navbar-link::after{border-color:#fff}html.theme--documenter-dark .navbar.is-success .navbar-burger{color:#fff}@media screen and (min-width: 1056px){html.theme--documenter-dark .navbar.is-success .navbar-start>.navbar-item,html.theme--documenter-dark .navbar.is-success .navbar-start .navbar-link,html.theme--documenter-dark .navbar.is-success .navbar-end>.navbar-item,html.theme--documenter-dark .navbar.is-success .navbar-end .navbar-link{color:#fff}html.theme--documenter-dark .navbar.is-success .navbar-start>a.navbar-item:focus,html.theme--documenter-dark .navbar.is-success .navbar-start>a.navbar-item:hover,html.theme--documenter-dark .navbar.is-success .navbar-start>a.navbar-item.is-active,html.theme--documenter-dark .navbar.is-success .navbar-start .navbar-link:focus,html.theme--documenter-dark .navbar.is-success .navbar-start .navbar-link:hover,html.theme--documenter-dark .navbar.is-success .navbar-start .navbar-link.is-active,html.theme--documenter-dark .navbar.is-success .navbar-end>a.navbar-item:focus,html.theme--documenter-dark .navbar.is-success .navbar-end>a.navbar-item:hover,html.theme--documenter-dark .navbar.is-success .navbar-end>a.navbar-item.is-active,html.theme--documenter-dark .navbar.is-success .navbar-end .navbar-link:focus,html.theme--documenter-dark .navbar.is-success .navbar-end .navbar-link:hover,html.theme--documenter-dark .navbar.is-success .navbar-end .navbar-link.is-active{background-color:#006b2d;color:#fff}html.theme--documenter-dark .navbar.is-success .navbar-start .navbar-link::after,html.theme--documenter-dark .navbar.is-success .navbar-end .navbar-link::after{border-color:#fff}html.theme--documenter-dark .navbar.is-success .navbar-item.has-dropdown:focus .navbar-link,html.theme--documenter-dark .navbar.is-success .navbar-item.has-dropdown:hover .navbar-link,html.theme--documenter-dark .navbar.is-success .navbar-item.has-dropdown.is-active .navbar-link{background-color:#006b2d;color:#fff}html.theme--documenter-dark .navbar.is-success .navbar-dropdown a.navbar-item.is-active{background-color:#008438;color:#fff}}html.theme--documenter-dark .navbar.is-warning{background-color:#ad8100;color:#fff}html.theme--documenter-dark .navbar.is-warning .navbar-brand>.navbar-item,html.theme--documenter-dark .navbar.is-warning .navbar-brand .navbar-link{color:#fff}html.theme--documenter-dark .navbar.is-warning .navbar-brand>a.navbar-item:focus,html.theme--documenter-dark .navbar.is-warning .navbar-brand>a.navbar-item:hover,html.theme--documenter-dark .navbar.is-warning .navbar-brand>a.navbar-item.is-active,html.theme--documenter-dark .navbar.is-warning .navbar-brand .navbar-link:focus,html.theme--documenter-dark .navbar.is-warning .navbar-brand .navbar-link:hover,html.theme--documenter-dark .navbar.is-warning .navbar-brand .navbar-link.is-active{background-color:#946e00;color:#fff}html.theme--documenter-dark .navbar.is-warning .navbar-brand .navbar-link::after{border-color:#fff}html.theme--documenter-dark .navbar.is-warning .navbar-burger{color:#fff}@media screen and (min-width: 1056px){html.theme--documenter-dark .navbar.is-warning .navbar-start>.navbar-item,html.theme--documenter-dark .navbar.is-warning .navbar-start .navbar-link,html.theme--documenter-dark .navbar.is-warning .navbar-end>.navbar-item,html.theme--documenter-dark .navbar.is-warning .navbar-end .navbar-link{color:#fff}html.theme--documenter-dark .navbar.is-warning .navbar-start>a.navbar-item:focus,html.theme--documenter-dark .navbar.is-warning .navbar-start>a.navbar-item:hover,html.theme--documenter-dark .navbar.is-warning .navbar-start>a.navbar-item.is-active,html.theme--documenter-dark .navbar.is-warning .navbar-start .navbar-link:focus,html.theme--documenter-dark .navbar.is-warning .navbar-start .navbar-link:hover,html.theme--documenter-dark .navbar.is-warning .navbar-start .navbar-link.is-active,html.theme--documenter-dark .navbar.is-warning .navbar-end>a.navbar-item:focus,html.theme--documenter-dark .navbar.is-warning .navbar-end>a.navbar-item:hover,html.theme--documenter-dark .navbar.is-warning .navbar-end>a.navbar-item.is-active,html.theme--documenter-dark .navbar.is-warning .navbar-end .navbar-link:focus,html.theme--documenter-dark .navbar.is-warning .navbar-end .navbar-link:hover,html.theme--documenter-dark .navbar.is-warning .navbar-end .navbar-link.is-active{background-color:#946e00;color:#fff}html.theme--documenter-dark .navbar.is-warning .navbar-start .navbar-link::after,html.theme--documenter-dark .navbar.is-warning .navbar-end .navbar-link::after{border-color:#fff}html.theme--documenter-dark .navbar.is-warning .navbar-item.has-dropdown:focus .navbar-link,html.theme--documenter-dark .navbar.is-warning .navbar-item.has-dropdown:hover .navbar-link,html.theme--documenter-dark .navbar.is-warning .navbar-item.has-dropdown.is-active .navbar-link{background-color:#946e00;color:#fff}html.theme--documenter-dark .navbar.is-warning .navbar-dropdown a.navbar-item.is-active{background-color:#ad8100;color:#fff}}html.theme--documenter-dark .navbar.is-danger{background-color:#9e1b0d;color:#fff}html.theme--documenter-dark .navbar.is-danger .navbar-brand>.navbar-item,html.theme--documenter-dark .navbar.is-danger .navbar-brand .navbar-link{color:#fff}html.theme--documenter-dark .navbar.is-danger .navbar-brand>a.navbar-item:focus,html.theme--documenter-dark .navbar.is-danger .navbar-brand>a.navbar-item:hover,html.theme--documenter-dark .navbar.is-danger .navbar-brand>a.navbar-item.is-active,html.theme--documenter-dark .navbar.is-danger .navbar-brand .navbar-link:focus,html.theme--documenter-dark .navbar.is-danger .navbar-brand .navbar-link:hover,html.theme--documenter-dark .navbar.is-danger .navbar-brand .navbar-link.is-active{background-color:#86170b;color:#fff}html.theme--documenter-dark .navbar.is-danger .navbar-brand .navbar-link::after{border-color:#fff}html.theme--documenter-dark .navbar.is-danger .navbar-burger{color:#fff}@media screen and (min-width: 1056px){html.theme--documenter-dark .navbar.is-danger .navbar-start>.navbar-item,html.theme--documenter-dark .navbar.is-danger .navbar-start .navbar-link,html.theme--documenter-dark .navbar.is-danger .navbar-end>.navbar-item,html.theme--documenter-dark .navbar.is-danger .navbar-end .navbar-link{color:#fff}html.theme--documenter-dark .navbar.is-danger .navbar-start>a.navbar-item:focus,html.theme--documenter-dark .navbar.is-danger .navbar-start>a.navbar-item:hover,html.theme--documenter-dark .navbar.is-danger .navbar-start>a.navbar-item.is-active,html.theme--documenter-dark .navbar.is-danger .navbar-start .navbar-link:focus,html.theme--documenter-dark .navbar.is-danger .navbar-start .navbar-link:hover,html.theme--documenter-dark .navbar.is-danger .navbar-start .navbar-link.is-active,html.theme--documenter-dark .navbar.is-danger .navbar-end>a.navbar-item:focus,html.theme--documenter-dark .navbar.is-danger .navbar-end>a.navbar-item:hover,html.theme--documenter-dark .navbar.is-danger .navbar-end>a.navbar-item.is-active,html.theme--documenter-dark .navbar.is-danger .navbar-end .navbar-link:focus,html.theme--documenter-dark .navbar.is-danger .navbar-end .navbar-link:hover,html.theme--documenter-dark .navbar.is-danger .navbar-end .navbar-link.is-active{background-color:#86170b;color:#fff}html.theme--documenter-dark .navbar.is-danger .navbar-start .navbar-link::after,html.theme--documenter-dark .navbar.is-danger .navbar-end .navbar-link::after{border-color:#fff}html.theme--documenter-dark .navbar.is-danger .navbar-item.has-dropdown:focus .navbar-link,html.theme--documenter-dark .navbar.is-danger .navbar-item.has-dropdown:hover .navbar-link,html.theme--documenter-dark .navbar.is-danger .navbar-item.has-dropdown.is-active .navbar-link{background-color:#86170b;color:#fff}html.theme--documenter-dark .navbar.is-danger .navbar-dropdown a.navbar-item.is-active{background-color:#9e1b0d;color:#fff}}html.theme--documenter-dark .navbar>.container{align-items:stretch;display:flex;min-height:4rem;width:100%}html.theme--documenter-dark .navbar.has-shadow{box-shadow:0 2px 0 0 #282f2f}html.theme--documenter-dark .navbar.is-fixed-bottom,html.theme--documenter-dark .navbar.is-fixed-top{left:0;position:fixed;right:0;z-index:30}html.theme--documenter-dark .navbar.is-fixed-bottom{bottom:0}html.theme--documenter-dark .navbar.is-fixed-bottom.has-shadow{box-shadow:0 -2px 0 0 #282f2f}html.theme--documenter-dark .navbar.is-fixed-top{top:0}html.theme--documenter-dark html.has-navbar-fixed-top,html.theme--documenter-dark body.has-navbar-fixed-top{padding-top:4rem}html.theme--documenter-dark html.has-navbar-fixed-bottom,html.theme--documenter-dark body.has-navbar-fixed-bottom{padding-bottom:4rem}html.theme--documenter-dark .navbar-brand,html.theme--documenter-dark .navbar-tabs{align-items:stretch;display:flex;flex-shrink:0;min-height:4rem}html.theme--documenter-dark .navbar-brand a.navbar-item:focus,html.theme--documenter-dark .navbar-brand a.navbar-item:hover{background-color:transparent}html.theme--documenter-dark .navbar-tabs{-webkit-overflow-scrolling:touch;max-width:100vw;overflow-x:auto;overflow-y:hidden}html.theme--documenter-dark .navbar-burger{color:#fff;-moz-appearance:none;-webkit-appearance:none;appearance:none;background:none;border:none;cursor:pointer;display:block;height:4rem;position:relative;width:4rem;margin-left:auto}html.theme--documenter-dark .navbar-burger span{background-color:currentColor;display:block;height:1px;left:calc(50% - 8px);position:absolute;transform-origin:center;transition-duration:86ms;transition-property:background-color, opacity, transform;transition-timing-function:ease-out;width:16px}html.theme--documenter-dark .navbar-burger span:nth-child(1){top:calc(50% - 6px)}html.theme--documenter-dark .navbar-burger span:nth-child(2){top:calc(50% - 1px)}html.theme--documenter-dark .navbar-burger span:nth-child(3){top:calc(50% + 4px)}html.theme--documenter-dark .navbar-burger:hover{background-color:rgba(0,0,0,0.05)}html.theme--documenter-dark .navbar-burger.is-active span:nth-child(1){transform:translateY(5px) rotate(45deg)}html.theme--documenter-dark .navbar-burger.is-active span:nth-child(2){opacity:0}html.theme--documenter-dark .navbar-burger.is-active span:nth-child(3){transform:translateY(-5px) rotate(-45deg)}html.theme--documenter-dark .navbar-menu{display:none}html.theme--documenter-dark .navbar-item,html.theme--documenter-dark .navbar-link{color:#fff;display:block;line-height:1.5;padding:0.5rem 0.75rem;position:relative}html.theme--documenter-dark .navbar-item .icon:only-child,html.theme--documenter-dark .navbar-link .icon:only-child{margin-left:-0.25rem;margin-right:-0.25rem}html.theme--documenter-dark a.navbar-item,html.theme--documenter-dark .navbar-link{cursor:pointer}html.theme--documenter-dark a.navbar-item:focus,html.theme--documenter-dark a.navbar-item:focus-within,html.theme--documenter-dark a.navbar-item:hover,html.theme--documenter-dark a.navbar-item.is-active,html.theme--documenter-dark .navbar-link:focus,html.theme--documenter-dark .navbar-link:focus-within,html.theme--documenter-dark .navbar-link:hover,html.theme--documenter-dark .navbar-link.is-active{background-color:rgba(0,0,0,0);color:#1abc9c}html.theme--documenter-dark .navbar-item{flex-grow:0;flex-shrink:0}html.theme--documenter-dark .navbar-item img{max-height:1.75rem}html.theme--documenter-dark .navbar-item.has-dropdown{padding:0}html.theme--documenter-dark .navbar-item.is-expanded{flex-grow:1;flex-shrink:1}html.theme--documenter-dark .navbar-item.is-tab{border-bottom:1px solid transparent;min-height:4rem;padding-bottom:calc(0.5rem - 1px)}html.theme--documenter-dark .navbar-item.is-tab:focus,html.theme--documenter-dark .navbar-item.is-tab:hover{background-color:rgba(0,0,0,0);border-bottom-color:#1abc9c}html.theme--documenter-dark .navbar-item.is-tab.is-active{background-color:rgba(0,0,0,0);border-bottom-color:#1abc9c;border-bottom-style:solid;border-bottom-width:3px;color:#1abc9c;padding-bottom:calc(0.5rem - 3px)}html.theme--documenter-dark .navbar-content{flex-grow:1;flex-shrink:1}html.theme--documenter-dark .navbar-link:not(.is-arrowless){padding-right:2.5em}html.theme--documenter-dark .navbar-link:not(.is-arrowless)::after{border-color:#fff;margin-top:-0.375em;right:1.125em}html.theme--documenter-dark .navbar-dropdown{font-size:0.875rem;padding-bottom:0.5rem;padding-top:0.5rem}html.theme--documenter-dark .navbar-dropdown .navbar-item{padding-left:1.5rem;padding-right:1.5rem}html.theme--documenter-dark .navbar-divider{background-color:rgba(0,0,0,0.2);border:none;display:none;height:2px;margin:0.5rem 0}@media screen and (max-width: 1055px){html.theme--documenter-dark .navbar>.container{display:block}html.theme--documenter-dark .navbar-brand .navbar-item,html.theme--documenter-dark .navbar-tabs .navbar-item{align-items:center;display:flex}html.theme--documenter-dark .navbar-link::after{display:none}html.theme--documenter-dark .navbar-menu{background-color:#375a7f;box-shadow:0 8px 16px rgba(10,10,10,0.1);padding:0.5rem 0}html.theme--documenter-dark .navbar-menu.is-active{display:block}html.theme--documenter-dark .navbar.is-fixed-bottom-touch,html.theme--documenter-dark .navbar.is-fixed-top-touch{left:0;position:fixed;right:0;z-index:30}html.theme--documenter-dark .navbar.is-fixed-bottom-touch{bottom:0}html.theme--documenter-dark .navbar.is-fixed-bottom-touch.has-shadow{box-shadow:0 -2px 3px rgba(10,10,10,0.1)}html.theme--documenter-dark .navbar.is-fixed-top-touch{top:0}html.theme--documenter-dark .navbar.is-fixed-top .navbar-menu,html.theme--documenter-dark .navbar.is-fixed-top-touch .navbar-menu{-webkit-overflow-scrolling:touch;max-height:calc(100vh - 4rem);overflow:auto}html.theme--documenter-dark html.has-navbar-fixed-top-touch,html.theme--documenter-dark body.has-navbar-fixed-top-touch{padding-top:4rem}html.theme--documenter-dark html.has-navbar-fixed-bottom-touch,html.theme--documenter-dark body.has-navbar-fixed-bottom-touch{padding-bottom:4rem}}@media screen and (min-width: 1056px){html.theme--documenter-dark .navbar,html.theme--documenter-dark .navbar-menu,html.theme--documenter-dark .navbar-start,html.theme--documenter-dark .navbar-end{align-items:stretch;display:flex}html.theme--documenter-dark .navbar{min-height:4rem}html.theme--documenter-dark .navbar.is-spaced{padding:1rem 2rem}html.theme--documenter-dark .navbar.is-spaced .navbar-start,html.theme--documenter-dark .navbar.is-spaced .navbar-end{align-items:center}html.theme--documenter-dark .navbar.is-spaced a.navbar-item,html.theme--documenter-dark .navbar.is-spaced .navbar-link{border-radius:.4em}html.theme--documenter-dark .navbar.is-transparent a.navbar-item:focus,html.theme--documenter-dark .navbar.is-transparent a.navbar-item:hover,html.theme--documenter-dark .navbar.is-transparent a.navbar-item.is-active,html.theme--documenter-dark .navbar.is-transparent .navbar-link:focus,html.theme--documenter-dark .navbar.is-transparent .navbar-link:hover,html.theme--documenter-dark .navbar.is-transparent .navbar-link.is-active{background-color:transparent !important}html.theme--documenter-dark .navbar.is-transparent .navbar-item.has-dropdown.is-active .navbar-link,html.theme--documenter-dark .navbar.is-transparent .navbar-item.has-dropdown.is-hoverable:focus .navbar-link,html.theme--documenter-dark .navbar.is-transparent .navbar-item.has-dropdown.is-hoverable:focus-within .navbar-link,html.theme--documenter-dark .navbar.is-transparent .navbar-item.has-dropdown.is-hoverable:hover .navbar-link{background-color:transparent !important}html.theme--documenter-dark .navbar.is-transparent .navbar-dropdown a.navbar-item:focus,html.theme--documenter-dark .navbar.is-transparent .navbar-dropdown a.navbar-item:hover{background-color:rgba(0,0,0,0);color:#dbdee0}html.theme--documenter-dark .navbar.is-transparent .navbar-dropdown a.navbar-item.is-active{background-color:rgba(0,0,0,0);color:#1abc9c}html.theme--documenter-dark .navbar-burger{display:none}html.theme--documenter-dark .navbar-item,html.theme--documenter-dark .navbar-link{align-items:center;display:flex}html.theme--documenter-dark .navbar-item.has-dropdown{align-items:stretch}html.theme--documenter-dark .navbar-item.has-dropdown-up .navbar-link::after{transform:rotate(135deg) translate(0.25em, -0.25em)}html.theme--documenter-dark .navbar-item.has-dropdown-up .navbar-dropdown{border-bottom:1px solid rgba(0,0,0,0.2);border-radius:8px 8px 0 0;border-top:none;bottom:100%;box-shadow:0 -8px 8px rgba(10,10,10,0.1);top:auto}html.theme--documenter-dark .navbar-item.is-active .navbar-dropdown,html.theme--documenter-dark .navbar-item.is-hoverable:focus .navbar-dropdown,html.theme--documenter-dark .navbar-item.is-hoverable:focus-within .navbar-dropdown,html.theme--documenter-dark .navbar-item.is-hoverable:hover .navbar-dropdown{display:block}.navbar.is-spaced html.theme--documenter-dark .navbar-item.is-active .navbar-dropdown,html.theme--documenter-dark .navbar-item.is-active .navbar-dropdown.is-boxed,.navbar.is-spaced html.theme--documenter-dark .navbar-item.is-hoverable:focus .navbar-dropdown,html.theme--documenter-dark .navbar-item.is-hoverable:focus .navbar-dropdown.is-boxed,.navbar.is-spaced html.theme--documenter-dark .navbar-item.is-hoverable:focus-within .navbar-dropdown,html.theme--documenter-dark .navbar-item.is-hoverable:focus-within .navbar-dropdown.is-boxed,.navbar.is-spaced html.theme--documenter-dark .navbar-item.is-hoverable:hover .navbar-dropdown,html.theme--documenter-dark .navbar-item.is-hoverable:hover .navbar-dropdown.is-boxed{opacity:1;pointer-events:auto;transform:translateY(0)}html.theme--documenter-dark .navbar-menu{flex-grow:1;flex-shrink:0}html.theme--documenter-dark .navbar-start{justify-content:flex-start;margin-right:auto}html.theme--documenter-dark .navbar-end{justify-content:flex-end;margin-left:auto}html.theme--documenter-dark .navbar-dropdown{background-color:#375a7f;border-bottom-left-radius:8px;border-bottom-right-radius:8px;border-top:1px solid rgba(0,0,0,0.2);box-shadow:0 8px 8px rgba(10,10,10,0.1);display:none;font-size:0.875rem;left:0;min-width:100%;position:absolute;top:100%;z-index:20}html.theme--documenter-dark .navbar-dropdown .navbar-item{padding:0.375rem 1rem;white-space:nowrap}html.theme--documenter-dark .navbar-dropdown a.navbar-item{padding-right:3rem}html.theme--documenter-dark .navbar-dropdown a.navbar-item:focus,html.theme--documenter-dark .navbar-dropdown a.navbar-item:hover{background-color:rgba(0,0,0,0);color:#dbdee0}html.theme--documenter-dark .navbar-dropdown a.navbar-item.is-active{background-color:rgba(0,0,0,0);color:#1abc9c}.navbar.is-spaced html.theme--documenter-dark .navbar-dropdown,html.theme--documenter-dark .navbar-dropdown.is-boxed{border-radius:8px;border-top:none;box-shadow:0 8px 8px rgba(10,10,10,0.1), 0 0 0 1px rgba(10,10,10,0.1);display:block;opacity:0;pointer-events:none;top:calc(100% + (-4px));transform:translateY(-5px);transition-duration:86ms;transition-property:opacity, transform}html.theme--documenter-dark .navbar-dropdown.is-right{left:auto;right:0}html.theme--documenter-dark .navbar-divider{display:block}html.theme--documenter-dark .navbar>.container .navbar-brand,html.theme--documenter-dark .container>.navbar .navbar-brand{margin-left:-.75rem}html.theme--documenter-dark .navbar>.container .navbar-menu,html.theme--documenter-dark .container>.navbar .navbar-menu{margin-right:-.75rem}html.theme--documenter-dark .navbar.is-fixed-bottom-desktop,html.theme--documenter-dark .navbar.is-fixed-top-desktop{left:0;position:fixed;right:0;z-index:30}html.theme--documenter-dark .navbar.is-fixed-bottom-desktop{bottom:0}html.theme--documenter-dark .navbar.is-fixed-bottom-desktop.has-shadow{box-shadow:0 -2px 3px rgba(10,10,10,0.1)}html.theme--documenter-dark .navbar.is-fixed-top-desktop{top:0}html.theme--documenter-dark html.has-navbar-fixed-top-desktop,html.theme--documenter-dark body.has-navbar-fixed-top-desktop{padding-top:4rem}html.theme--documenter-dark html.has-navbar-fixed-bottom-desktop,html.theme--documenter-dark body.has-navbar-fixed-bottom-desktop{padding-bottom:4rem}html.theme--documenter-dark html.has-spaced-navbar-fixed-top,html.theme--documenter-dark body.has-spaced-navbar-fixed-top{padding-top:6rem}html.theme--documenter-dark html.has-spaced-navbar-fixed-bottom,html.theme--documenter-dark body.has-spaced-navbar-fixed-bottom{padding-bottom:6rem}html.theme--documenter-dark a.navbar-item.is-active,html.theme--documenter-dark .navbar-link.is-active{color:#1abc9c}html.theme--documenter-dark a.navbar-item.is-active:not(:focus):not(:hover),html.theme--documenter-dark .navbar-link.is-active:not(:focus):not(:hover){background-color:rgba(0,0,0,0)}html.theme--documenter-dark .navbar-item.has-dropdown:focus .navbar-link,html.theme--documenter-dark .navbar-item.has-dropdown:hover .navbar-link,html.theme--documenter-dark .navbar-item.has-dropdown.is-active .navbar-link{background-color:rgba(0,0,0,0)}}html.theme--documenter-dark .hero.is-fullheight-with-navbar{min-height:calc(100vh - 4rem)}html.theme--documenter-dark .pagination{font-size:1rem;margin:-.25rem}html.theme--documenter-dark .pagination.is-small,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.pagination{font-size:.75rem}html.theme--documenter-dark .pagination.is-medium{font-size:1.25rem}html.theme--documenter-dark .pagination.is-large{font-size:1.5rem}html.theme--documenter-dark .pagination.is-rounded .pagination-previous,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.pagination .pagination-previous,html.theme--documenter-dark .pagination.is-rounded .pagination-next,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.pagination .pagination-next{padding-left:1em;padding-right:1em;border-radius:9999px}html.theme--documenter-dark .pagination.is-rounded .pagination-link,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.pagination .pagination-link{border-radius:9999px}html.theme--documenter-dark .pagination,html.theme--documenter-dark .pagination-list{align-items:center;display:flex;justify-content:center;text-align:center}html.theme--documenter-dark .pagination-previous,html.theme--documenter-dark .pagination-next,html.theme--documenter-dark .pagination-link,html.theme--documenter-dark .pagination-ellipsis{font-size:1em;justify-content:center;margin:.25rem;padding-left:.5em;padding-right:.5em;text-align:center}html.theme--documenter-dark .pagination-previous,html.theme--documenter-dark .pagination-next,html.theme--documenter-dark .pagination-link{border-color:#5e6d6f;color:#1abc9c;min-width:2.5em}html.theme--documenter-dark .pagination-previous:hover,html.theme--documenter-dark .pagination-next:hover,html.theme--documenter-dark .pagination-link:hover{border-color:#8c9b9d;color:#1dd2af}html.theme--documenter-dark .pagination-previous:focus,html.theme--documenter-dark .pagination-next:focus,html.theme--documenter-dark .pagination-link:focus{border-color:#8c9b9d}html.theme--documenter-dark .pagination-previous:active,html.theme--documenter-dark .pagination-next:active,html.theme--documenter-dark .pagination-link:active{box-shadow:inset 0 1px 2px rgba(10,10,10,0.2)}html.theme--documenter-dark .pagination-previous[disabled],html.theme--documenter-dark .pagination-previous.is-disabled,html.theme--documenter-dark .pagination-next[disabled],html.theme--documenter-dark .pagination-next.is-disabled,html.theme--documenter-dark .pagination-link[disabled],html.theme--documenter-dark .pagination-link.is-disabled{background-color:#5e6d6f;border-color:#5e6d6f;box-shadow:none;color:#fff;opacity:0.5}html.theme--documenter-dark .pagination-previous,html.theme--documenter-dark .pagination-next{padding-left:.75em;padding-right:.75em;white-space:nowrap}html.theme--documenter-dark .pagination-link.is-current{background-color:#1abc9c;border-color:#1abc9c;color:#fff}html.theme--documenter-dark .pagination-ellipsis{color:#8c9b9d;pointer-events:none}html.theme--documenter-dark .pagination-list{flex-wrap:wrap}html.theme--documenter-dark .pagination-list li{list-style:none}@media screen and (max-width: 768px){html.theme--documenter-dark .pagination{flex-wrap:wrap}html.theme--documenter-dark .pagination-previous,html.theme--documenter-dark .pagination-next{flex-grow:1;flex-shrink:1}html.theme--documenter-dark .pagination-list li{flex-grow:1;flex-shrink:1}}@media screen and (min-width: 769px),print{html.theme--documenter-dark .pagination-list{flex-grow:1;flex-shrink:1;justify-content:flex-start;order:1}html.theme--documenter-dark .pagination-previous,html.theme--documenter-dark .pagination-next,html.theme--documenter-dark .pagination-link,html.theme--documenter-dark .pagination-ellipsis{margin-bottom:0;margin-top:0}html.theme--documenter-dark .pagination-previous{order:2}html.theme--documenter-dark .pagination-next{order:3}html.theme--documenter-dark .pagination{justify-content:space-between;margin-bottom:0;margin-top:0}html.theme--documenter-dark .pagination.is-centered .pagination-previous{order:1}html.theme--documenter-dark .pagination.is-centered .pagination-list{justify-content:center;order:2}html.theme--documenter-dark .pagination.is-centered .pagination-next{order:3}html.theme--documenter-dark .pagination.is-right .pagination-previous{order:1}html.theme--documenter-dark .pagination.is-right .pagination-next{order:2}html.theme--documenter-dark .pagination.is-right .pagination-list{justify-content:flex-end;order:3}}html.theme--documenter-dark .panel{border-radius:8px;box-shadow:#171717;font-size:1rem}html.theme--documenter-dark .panel:not(:last-child){margin-bottom:1.5rem}html.theme--documenter-dark .panel.is-white .panel-heading{background-color:#fff;color:#0a0a0a}html.theme--documenter-dark .panel.is-white .panel-tabs a.is-active{border-bottom-color:#fff}html.theme--documenter-dark .panel.is-white .panel-block.is-active .panel-icon{color:#fff}html.theme--documenter-dark .panel.is-black .panel-heading{background-color:#0a0a0a;color:#fff}html.theme--documenter-dark .panel.is-black .panel-tabs a.is-active{border-bottom-color:#0a0a0a}html.theme--documenter-dark .panel.is-black .panel-block.is-active .panel-icon{color:#0a0a0a}html.theme--documenter-dark .panel.is-light .panel-heading{background-color:#ecf0f1;color:rgba(0,0,0,0.7)}html.theme--documenter-dark .panel.is-light .panel-tabs a.is-active{border-bottom-color:#ecf0f1}html.theme--documenter-dark .panel.is-light .panel-block.is-active .panel-icon{color:#ecf0f1}html.theme--documenter-dark .panel.is-dark .panel-heading,html.theme--documenter-dark .content kbd.panel .panel-heading{background-color:#282f2f;color:#fff}html.theme--documenter-dark .panel.is-dark .panel-tabs a.is-active,html.theme--documenter-dark .content kbd.panel .panel-tabs a.is-active{border-bottom-color:#282f2f}html.theme--documenter-dark .panel.is-dark .panel-block.is-active .panel-icon,html.theme--documenter-dark .content kbd.panel .panel-block.is-active .panel-icon{color:#282f2f}html.theme--documenter-dark .panel.is-primary .panel-heading,html.theme--documenter-dark .docstring>section>a.panel.docs-sourcelink .panel-heading{background-color:#375a7f;color:#fff}html.theme--documenter-dark .panel.is-primary .panel-tabs a.is-active,html.theme--documenter-dark .docstring>section>a.panel.docs-sourcelink .panel-tabs a.is-active{border-bottom-color:#375a7f}html.theme--documenter-dark .panel.is-primary .panel-block.is-active .panel-icon,html.theme--documenter-dark .docstring>section>a.panel.docs-sourcelink .panel-block.is-active .panel-icon{color:#375a7f}html.theme--documenter-dark .panel.is-link .panel-heading{background-color:#1abc9c;color:#fff}html.theme--documenter-dark .panel.is-link .panel-tabs a.is-active{border-bottom-color:#1abc9c}html.theme--documenter-dark .panel.is-link .panel-block.is-active .panel-icon{color:#1abc9c}html.theme--documenter-dark .panel.is-info .panel-heading{background-color:#024c7d;color:#fff}html.theme--documenter-dark .panel.is-info .panel-tabs a.is-active{border-bottom-color:#024c7d}html.theme--documenter-dark .panel.is-info .panel-block.is-active .panel-icon{color:#024c7d}html.theme--documenter-dark .panel.is-success .panel-heading{background-color:#008438;color:#fff}html.theme--documenter-dark .panel.is-success .panel-tabs a.is-active{border-bottom-color:#008438}html.theme--documenter-dark .panel.is-success .panel-block.is-active .panel-icon{color:#008438}html.theme--documenter-dark .panel.is-warning .panel-heading{background-color:#ad8100;color:#fff}html.theme--documenter-dark .panel.is-warning .panel-tabs a.is-active{border-bottom-color:#ad8100}html.theme--documenter-dark .panel.is-warning .panel-block.is-active .panel-icon{color:#ad8100}html.theme--documenter-dark .panel.is-danger .panel-heading{background-color:#9e1b0d;color:#fff}html.theme--documenter-dark .panel.is-danger .panel-tabs a.is-active{border-bottom-color:#9e1b0d}html.theme--documenter-dark .panel.is-danger .panel-block.is-active .panel-icon{color:#9e1b0d}html.theme--documenter-dark .panel-tabs:not(:last-child),html.theme--documenter-dark .panel-block:not(:last-child){border-bottom:1px solid #ededed}html.theme--documenter-dark .panel-heading{background-color:#343c3d;border-radius:8px 8px 0 0;color:#f2f2f2;font-size:1.25em;font-weight:700;line-height:1.25;padding:0.75em 1em}html.theme--documenter-dark .panel-tabs{align-items:flex-end;display:flex;font-size:.875em;justify-content:center}html.theme--documenter-dark .panel-tabs a{border-bottom:1px solid #5e6d6f;margin-bottom:-1px;padding:0.5em}html.theme--documenter-dark .panel-tabs a.is-active{border-bottom-color:#343c3d;color:#17a689}html.theme--documenter-dark .panel-list a{color:#fff}html.theme--documenter-dark .panel-list a:hover{color:#1abc9c}html.theme--documenter-dark .panel-block{align-items:center;color:#f2f2f2;display:flex;justify-content:flex-start;padding:0.5em 0.75em}html.theme--documenter-dark .panel-block input[type="checkbox"]{margin-right:.75em}html.theme--documenter-dark .panel-block>.control{flex-grow:1;flex-shrink:1;width:100%}html.theme--documenter-dark .panel-block.is-wrapped{flex-wrap:wrap}html.theme--documenter-dark .panel-block.is-active{border-left-color:#1abc9c;color:#17a689}html.theme--documenter-dark .panel-block.is-active .panel-icon{color:#1abc9c}html.theme--documenter-dark .panel-block:last-child{border-bottom-left-radius:8px;border-bottom-right-radius:8px}html.theme--documenter-dark a.panel-block,html.theme--documenter-dark label.panel-block{cursor:pointer}html.theme--documenter-dark a.panel-block:hover,html.theme--documenter-dark label.panel-block:hover{background-color:#282f2f}html.theme--documenter-dark .panel-icon{display:inline-block;font-size:14px;height:1em;line-height:1em;text-align:center;vertical-align:top;width:1em;color:#fff;margin-right:.75em}html.theme--documenter-dark .panel-icon .fa{font-size:inherit;line-height:inherit}html.theme--documenter-dark .tabs{-webkit-overflow-scrolling:touch;align-items:stretch;display:flex;font-size:1rem;justify-content:space-between;overflow:hidden;overflow-x:auto;white-space:nowrap}html.theme--documenter-dark .tabs a{align-items:center;border-bottom-color:#5e6d6f;border-bottom-style:solid;border-bottom-width:1px;color:#fff;display:flex;justify-content:center;margin-bottom:-1px;padding:0.5em 1em;vertical-align:top}html.theme--documenter-dark .tabs a:hover{border-bottom-color:#f2f2f2;color:#f2f2f2}html.theme--documenter-dark .tabs li{display:block}html.theme--documenter-dark .tabs li.is-active a{border-bottom-color:#1abc9c;color:#1abc9c}html.theme--documenter-dark .tabs ul{align-items:center;border-bottom-color:#5e6d6f;border-bottom-style:solid;border-bottom-width:1px;display:flex;flex-grow:1;flex-shrink:0;justify-content:flex-start}html.theme--documenter-dark .tabs ul.is-left{padding-right:0.75em}html.theme--documenter-dark .tabs ul.is-center{flex:none;justify-content:center;padding-left:0.75em;padding-right:0.75em}html.theme--documenter-dark .tabs ul.is-right{justify-content:flex-end;padding-left:0.75em}html.theme--documenter-dark .tabs .icon:first-child{margin-right:.5em}html.theme--documenter-dark .tabs .icon:last-child{margin-left:.5em}html.theme--documenter-dark .tabs.is-centered ul{justify-content:center}html.theme--documenter-dark .tabs.is-right ul{justify-content:flex-end}html.theme--documenter-dark .tabs.is-boxed a{border:1px solid transparent;border-radius:.4em .4em 0 0}html.theme--documenter-dark .tabs.is-boxed a:hover{background-color:#282f2f;border-bottom-color:#5e6d6f}html.theme--documenter-dark .tabs.is-boxed li.is-active a{background-color:#fff;border-color:#5e6d6f;border-bottom-color:rgba(0,0,0,0) !important}html.theme--documenter-dark .tabs.is-fullwidth li{flex-grow:1;flex-shrink:0}html.theme--documenter-dark .tabs.is-toggle a{border-color:#5e6d6f;border-style:solid;border-width:1px;margin-bottom:0;position:relative}html.theme--documenter-dark .tabs.is-toggle a:hover{background-color:#282f2f;border-color:#8c9b9d;z-index:2}html.theme--documenter-dark .tabs.is-toggle li+li{margin-left:-1px}html.theme--documenter-dark .tabs.is-toggle li:first-child a{border-top-left-radius:.4em;border-bottom-left-radius:.4em}html.theme--documenter-dark .tabs.is-toggle li:last-child a{border-top-right-radius:.4em;border-bottom-right-radius:.4em}html.theme--documenter-dark .tabs.is-toggle li.is-active a{background-color:#1abc9c;border-color:#1abc9c;color:#fff;z-index:1}html.theme--documenter-dark .tabs.is-toggle ul{border-bottom:none}html.theme--documenter-dark .tabs.is-toggle.is-toggle-rounded li:first-child a{border-bottom-left-radius:9999px;border-top-left-radius:9999px;padding-left:1.25em}html.theme--documenter-dark .tabs.is-toggle.is-toggle-rounded li:last-child a{border-bottom-right-radius:9999px;border-top-right-radius:9999px;padding-right:1.25em}html.theme--documenter-dark .tabs.is-small,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.tabs{font-size:.75rem}html.theme--documenter-dark .tabs.is-medium{font-size:1.25rem}html.theme--documenter-dark .tabs.is-large{font-size:1.5rem}html.theme--documenter-dark .column{display:block;flex-basis:0;flex-grow:1;flex-shrink:1;padding:.75rem}.columns.is-mobile>html.theme--documenter-dark .column.is-narrow{flex:none;width:unset}.columns.is-mobile>html.theme--documenter-dark .column.is-full{flex:none;width:100%}.columns.is-mobile>html.theme--documenter-dark .column.is-three-quarters{flex:none;width:75%}.columns.is-mobile>html.theme--documenter-dark .column.is-two-thirds{flex:none;width:66.6666%}.columns.is-mobile>html.theme--documenter-dark .column.is-half{flex:none;width:50%}.columns.is-mobile>html.theme--documenter-dark .column.is-one-third{flex:none;width:33.3333%}.columns.is-mobile>html.theme--documenter-dark .column.is-one-quarter{flex:none;width:25%}.columns.is-mobile>html.theme--documenter-dark .column.is-one-fifth{flex:none;width:20%}.columns.is-mobile>html.theme--documenter-dark .column.is-two-fifths{flex:none;width:40%}.columns.is-mobile>html.theme--documenter-dark .column.is-three-fifths{flex:none;width:60%}.columns.is-mobile>html.theme--documenter-dark .column.is-four-fifths{flex:none;width:80%}.columns.is-mobile>html.theme--documenter-dark .column.is-offset-three-quarters{margin-left:75%}.columns.is-mobile>html.theme--documenter-dark .column.is-offset-two-thirds{margin-left:66.6666%}.columns.is-mobile>html.theme--documenter-dark .column.is-offset-half{margin-left:50%}.columns.is-mobile>html.theme--documenter-dark .column.is-offset-one-third{margin-left:33.3333%}.columns.is-mobile>html.theme--documenter-dark .column.is-offset-one-quarter{margin-left:25%}.columns.is-mobile>html.theme--documenter-dark .column.is-offset-one-fifth{margin-left:20%}.columns.is-mobile>html.theme--documenter-dark .column.is-offset-two-fifths{margin-left:40%}.columns.is-mobile>html.theme--documenter-dark .column.is-offset-three-fifths{margin-left:60%}.columns.is-mobile>html.theme--documenter-dark .column.is-offset-four-fifths{margin-left:80%}.columns.is-mobile>html.theme--documenter-dark .column.is-0{flex:none;width:0%}.columns.is-mobile>html.theme--documenter-dark .column.is-offset-0{margin-left:0%}.columns.is-mobile>html.theme--documenter-dark .column.is-1{flex:none;width:8.33333337%}.columns.is-mobile>html.theme--documenter-dark .column.is-offset-1{margin-left:8.33333337%}.columns.is-mobile>html.theme--documenter-dark .column.is-2{flex:none;width:16.66666674%}.columns.is-mobile>html.theme--documenter-dark .column.is-offset-2{margin-left:16.66666674%}.columns.is-mobile>html.theme--documenter-dark .column.is-3{flex:none;width:25%}.columns.is-mobile>html.theme--documenter-dark .column.is-offset-3{margin-left:25%}.columns.is-mobile>html.theme--documenter-dark .column.is-4{flex:none;width:33.33333337%}.columns.is-mobile>html.theme--documenter-dark .column.is-offset-4{margin-left:33.33333337%}.columns.is-mobile>html.theme--documenter-dark .column.is-5{flex:none;width:41.66666674%}.columns.is-mobile>html.theme--documenter-dark .column.is-offset-5{margin-left:41.66666674%}.columns.is-mobile>html.theme--documenter-dark .column.is-6{flex:none;width:50%}.columns.is-mobile>html.theme--documenter-dark .column.is-offset-6{margin-left:50%}.columns.is-mobile>html.theme--documenter-dark .column.is-7{flex:none;width:58.33333337%}.columns.is-mobile>html.theme--documenter-dark .column.is-offset-7{margin-left:58.33333337%}.columns.is-mobile>html.theme--documenter-dark .column.is-8{flex:none;width:66.66666674%}.columns.is-mobile>html.theme--documenter-dark .column.is-offset-8{margin-left:66.66666674%}.columns.is-mobile>html.theme--documenter-dark .column.is-9{flex:none;width:75%}.columns.is-mobile>html.theme--documenter-dark .column.is-offset-9{margin-left:75%}.columns.is-mobile>html.theme--documenter-dark .column.is-10{flex:none;width:83.33333337%}.columns.is-mobile>html.theme--documenter-dark .column.is-offset-10{margin-left:83.33333337%}.columns.is-mobile>html.theme--documenter-dark .column.is-11{flex:none;width:91.66666674%}.columns.is-mobile>html.theme--documenter-dark .column.is-offset-11{margin-left:91.66666674%}.columns.is-mobile>html.theme--documenter-dark .column.is-12{flex:none;width:100%}.columns.is-mobile>html.theme--documenter-dark .column.is-offset-12{margin-left:100%}@media screen and (max-width: 768px){html.theme--documenter-dark .column.is-narrow-mobile{flex:none;width:unset}html.theme--documenter-dark .column.is-full-mobile{flex:none;width:100%}html.theme--documenter-dark .column.is-three-quarters-mobile{flex:none;width:75%}html.theme--documenter-dark .column.is-two-thirds-mobile{flex:none;width:66.6666%}html.theme--documenter-dark .column.is-half-mobile{flex:none;width:50%}html.theme--documenter-dark .column.is-one-third-mobile{flex:none;width:33.3333%}html.theme--documenter-dark .column.is-one-quarter-mobile{flex:none;width:25%}html.theme--documenter-dark .column.is-one-fifth-mobile{flex:none;width:20%}html.theme--documenter-dark .column.is-two-fifths-mobile{flex:none;width:40%}html.theme--documenter-dark .column.is-three-fifths-mobile{flex:none;width:60%}html.theme--documenter-dark .column.is-four-fifths-mobile{flex:none;width:80%}html.theme--documenter-dark .column.is-offset-three-quarters-mobile{margin-left:75%}html.theme--documenter-dark .column.is-offset-two-thirds-mobile{margin-left:66.6666%}html.theme--documenter-dark .column.is-offset-half-mobile{margin-left:50%}html.theme--documenter-dark .column.is-offset-one-third-mobile{margin-left:33.3333%}html.theme--documenter-dark .column.is-offset-one-quarter-mobile{margin-left:25%}html.theme--documenter-dark .column.is-offset-one-fifth-mobile{margin-left:20%}html.theme--documenter-dark .column.is-offset-two-fifths-mobile{margin-left:40%}html.theme--documenter-dark .column.is-offset-three-fifths-mobile{margin-left:60%}html.theme--documenter-dark .column.is-offset-four-fifths-mobile{margin-left:80%}html.theme--documenter-dark .column.is-0-mobile{flex:none;width:0%}html.theme--documenter-dark .column.is-offset-0-mobile{margin-left:0%}html.theme--documenter-dark .column.is-1-mobile{flex:none;width:8.33333337%}html.theme--documenter-dark .column.is-offset-1-mobile{margin-left:8.33333337%}html.theme--documenter-dark .column.is-2-mobile{flex:none;width:16.66666674%}html.theme--documenter-dark .column.is-offset-2-mobile{margin-left:16.66666674%}html.theme--documenter-dark .column.is-3-mobile{flex:none;width:25%}html.theme--documenter-dark .column.is-offset-3-mobile{margin-left:25%}html.theme--documenter-dark .column.is-4-mobile{flex:none;width:33.33333337%}html.theme--documenter-dark .column.is-offset-4-mobile{margin-left:33.33333337%}html.theme--documenter-dark .column.is-5-mobile{flex:none;width:41.66666674%}html.theme--documenter-dark .column.is-offset-5-mobile{margin-left:41.66666674%}html.theme--documenter-dark .column.is-6-mobile{flex:none;width:50%}html.theme--documenter-dark .column.is-offset-6-mobile{margin-left:50%}html.theme--documenter-dark .column.is-7-mobile{flex:none;width:58.33333337%}html.theme--documenter-dark .column.is-offset-7-mobile{margin-left:58.33333337%}html.theme--documenter-dark .column.is-8-mobile{flex:none;width:66.66666674%}html.theme--documenter-dark .column.is-offset-8-mobile{margin-left:66.66666674%}html.theme--documenter-dark .column.is-9-mobile{flex:none;width:75%}html.theme--documenter-dark .column.is-offset-9-mobile{margin-left:75%}html.theme--documenter-dark .column.is-10-mobile{flex:none;width:83.33333337%}html.theme--documenter-dark .column.is-offset-10-mobile{margin-left:83.33333337%}html.theme--documenter-dark .column.is-11-mobile{flex:none;width:91.66666674%}html.theme--documenter-dark .column.is-offset-11-mobile{margin-left:91.66666674%}html.theme--documenter-dark .column.is-12-mobile{flex:none;width:100%}html.theme--documenter-dark .column.is-offset-12-mobile{margin-left:100%}}@media screen and (min-width: 769px),print{html.theme--documenter-dark .column.is-narrow,html.theme--documenter-dark .column.is-narrow-tablet{flex:none;width:unset}html.theme--documenter-dark .column.is-full,html.theme--documenter-dark .column.is-full-tablet{flex:none;width:100%}html.theme--documenter-dark .column.is-three-quarters,html.theme--documenter-dark .column.is-three-quarters-tablet{flex:none;width:75%}html.theme--documenter-dark .column.is-two-thirds,html.theme--documenter-dark .column.is-two-thirds-tablet{flex:none;width:66.6666%}html.theme--documenter-dark .column.is-half,html.theme--documenter-dark .column.is-half-tablet{flex:none;width:50%}html.theme--documenter-dark .column.is-one-third,html.theme--documenter-dark .column.is-one-third-tablet{flex:none;width:33.3333%}html.theme--documenter-dark .column.is-one-quarter,html.theme--documenter-dark .column.is-one-quarter-tablet{flex:none;width:25%}html.theme--documenter-dark .column.is-one-fifth,html.theme--documenter-dark .column.is-one-fifth-tablet{flex:none;width:20%}html.theme--documenter-dark .column.is-two-fifths,html.theme--documenter-dark .column.is-two-fifths-tablet{flex:none;width:40%}html.theme--documenter-dark .column.is-three-fifths,html.theme--documenter-dark .column.is-three-fifths-tablet{flex:none;width:60%}html.theme--documenter-dark .column.is-four-fifths,html.theme--documenter-dark .column.is-four-fifths-tablet{flex:none;width:80%}html.theme--documenter-dark .column.is-offset-three-quarters,html.theme--documenter-dark .column.is-offset-three-quarters-tablet{margin-left:75%}html.theme--documenter-dark .column.is-offset-two-thirds,html.theme--documenter-dark .column.is-offset-two-thirds-tablet{margin-left:66.6666%}html.theme--documenter-dark .column.is-offset-half,html.theme--documenter-dark .column.is-offset-half-tablet{margin-left:50%}html.theme--documenter-dark .column.is-offset-one-third,html.theme--documenter-dark .column.is-offset-one-third-tablet{margin-left:33.3333%}html.theme--documenter-dark .column.is-offset-one-quarter,html.theme--documenter-dark .column.is-offset-one-quarter-tablet{margin-left:25%}html.theme--documenter-dark .column.is-offset-one-fifth,html.theme--documenter-dark .column.is-offset-one-fifth-tablet{margin-left:20%}html.theme--documenter-dark .column.is-offset-two-fifths,html.theme--documenter-dark .column.is-offset-two-fifths-tablet{margin-left:40%}html.theme--documenter-dark .column.is-offset-three-fifths,html.theme--documenter-dark .column.is-offset-three-fifths-tablet{margin-left:60%}html.theme--documenter-dark .column.is-offset-four-fifths,html.theme--documenter-dark .column.is-offset-four-fifths-tablet{margin-left:80%}html.theme--documenter-dark .column.is-0,html.theme--documenter-dark .column.is-0-tablet{flex:none;width:0%}html.theme--documenter-dark .column.is-offset-0,html.theme--documenter-dark .column.is-offset-0-tablet{margin-left:0%}html.theme--documenter-dark .column.is-1,html.theme--documenter-dark .column.is-1-tablet{flex:none;width:8.33333337%}html.theme--documenter-dark .column.is-offset-1,html.theme--documenter-dark .column.is-offset-1-tablet{margin-left:8.33333337%}html.theme--documenter-dark .column.is-2,html.theme--documenter-dark .column.is-2-tablet{flex:none;width:16.66666674%}html.theme--documenter-dark .column.is-offset-2,html.theme--documenter-dark .column.is-offset-2-tablet{margin-left:16.66666674%}html.theme--documenter-dark .column.is-3,html.theme--documenter-dark .column.is-3-tablet{flex:none;width:25%}html.theme--documenter-dark .column.is-offset-3,html.theme--documenter-dark .column.is-offset-3-tablet{margin-left:25%}html.theme--documenter-dark .column.is-4,html.theme--documenter-dark .column.is-4-tablet{flex:none;width:33.33333337%}html.theme--documenter-dark .column.is-offset-4,html.theme--documenter-dark .column.is-offset-4-tablet{margin-left:33.33333337%}html.theme--documenter-dark .column.is-5,html.theme--documenter-dark .column.is-5-tablet{flex:none;width:41.66666674%}html.theme--documenter-dark .column.is-offset-5,html.theme--documenter-dark .column.is-offset-5-tablet{margin-left:41.66666674%}html.theme--documenter-dark .column.is-6,html.theme--documenter-dark .column.is-6-tablet{flex:none;width:50%}html.theme--documenter-dark .column.is-offset-6,html.theme--documenter-dark .column.is-offset-6-tablet{margin-left:50%}html.theme--documenter-dark .column.is-7,html.theme--documenter-dark .column.is-7-tablet{flex:none;width:58.33333337%}html.theme--documenter-dark .column.is-offset-7,html.theme--documenter-dark .column.is-offset-7-tablet{margin-left:58.33333337%}html.theme--documenter-dark .column.is-8,html.theme--documenter-dark .column.is-8-tablet{flex:none;width:66.66666674%}html.theme--documenter-dark .column.is-offset-8,html.theme--documenter-dark .column.is-offset-8-tablet{margin-left:66.66666674%}html.theme--documenter-dark .column.is-9,html.theme--documenter-dark .column.is-9-tablet{flex:none;width:75%}html.theme--documenter-dark .column.is-offset-9,html.theme--documenter-dark .column.is-offset-9-tablet{margin-left:75%}html.theme--documenter-dark .column.is-10,html.theme--documenter-dark .column.is-10-tablet{flex:none;width:83.33333337%}html.theme--documenter-dark .column.is-offset-10,html.theme--documenter-dark .column.is-offset-10-tablet{margin-left:83.33333337%}html.theme--documenter-dark .column.is-11,html.theme--documenter-dark .column.is-11-tablet{flex:none;width:91.66666674%}html.theme--documenter-dark .column.is-offset-11,html.theme--documenter-dark .column.is-offset-11-tablet{margin-left:91.66666674%}html.theme--documenter-dark .column.is-12,html.theme--documenter-dark .column.is-12-tablet{flex:none;width:100%}html.theme--documenter-dark .column.is-offset-12,html.theme--documenter-dark .column.is-offset-12-tablet{margin-left:100%}}@media screen and (max-width: 1055px){html.theme--documenter-dark .column.is-narrow-touch{flex:none;width:unset}html.theme--documenter-dark .column.is-full-touch{flex:none;width:100%}html.theme--documenter-dark .column.is-three-quarters-touch{flex:none;width:75%}html.theme--documenter-dark .column.is-two-thirds-touch{flex:none;width:66.6666%}html.theme--documenter-dark .column.is-half-touch{flex:none;width:50%}html.theme--documenter-dark .column.is-one-third-touch{flex:none;width:33.3333%}html.theme--documenter-dark .column.is-one-quarter-touch{flex:none;width:25%}html.theme--documenter-dark .column.is-one-fifth-touch{flex:none;width:20%}html.theme--documenter-dark .column.is-two-fifths-touch{flex:none;width:40%}html.theme--documenter-dark .column.is-three-fifths-touch{flex:none;width:60%}html.theme--documenter-dark .column.is-four-fifths-touch{flex:none;width:80%}html.theme--documenter-dark .column.is-offset-three-quarters-touch{margin-left:75%}html.theme--documenter-dark .column.is-offset-two-thirds-touch{margin-left:66.6666%}html.theme--documenter-dark .column.is-offset-half-touch{margin-left:50%}html.theme--documenter-dark .column.is-offset-one-third-touch{margin-left:33.3333%}html.theme--documenter-dark .column.is-offset-one-quarter-touch{margin-left:25%}html.theme--documenter-dark .column.is-offset-one-fifth-touch{margin-left:20%}html.theme--documenter-dark .column.is-offset-two-fifths-touch{margin-left:40%}html.theme--documenter-dark .column.is-offset-three-fifths-touch{margin-left:60%}html.theme--documenter-dark .column.is-offset-four-fifths-touch{margin-left:80%}html.theme--documenter-dark .column.is-0-touch{flex:none;width:0%}html.theme--documenter-dark .column.is-offset-0-touch{margin-left:0%}html.theme--documenter-dark .column.is-1-touch{flex:none;width:8.33333337%}html.theme--documenter-dark .column.is-offset-1-touch{margin-left:8.33333337%}html.theme--documenter-dark .column.is-2-touch{flex:none;width:16.66666674%}html.theme--documenter-dark .column.is-offset-2-touch{margin-left:16.66666674%}html.theme--documenter-dark .column.is-3-touch{flex:none;width:25%}html.theme--documenter-dark .column.is-offset-3-touch{margin-left:25%}html.theme--documenter-dark .column.is-4-touch{flex:none;width:33.33333337%}html.theme--documenter-dark .column.is-offset-4-touch{margin-left:33.33333337%}html.theme--documenter-dark .column.is-5-touch{flex:none;width:41.66666674%}html.theme--documenter-dark .column.is-offset-5-touch{margin-left:41.66666674%}html.theme--documenter-dark .column.is-6-touch{flex:none;width:50%}html.theme--documenter-dark .column.is-offset-6-touch{margin-left:50%}html.theme--documenter-dark .column.is-7-touch{flex:none;width:58.33333337%}html.theme--documenter-dark .column.is-offset-7-touch{margin-left:58.33333337%}html.theme--documenter-dark .column.is-8-touch{flex:none;width:66.66666674%}html.theme--documenter-dark .column.is-offset-8-touch{margin-left:66.66666674%}html.theme--documenter-dark .column.is-9-touch{flex:none;width:75%}html.theme--documenter-dark .column.is-offset-9-touch{margin-left:75%}html.theme--documenter-dark .column.is-10-touch{flex:none;width:83.33333337%}html.theme--documenter-dark .column.is-offset-10-touch{margin-left:83.33333337%}html.theme--documenter-dark .column.is-11-touch{flex:none;width:91.66666674%}html.theme--documenter-dark .column.is-offset-11-touch{margin-left:91.66666674%}html.theme--documenter-dark .column.is-12-touch{flex:none;width:100%}html.theme--documenter-dark .column.is-offset-12-touch{margin-left:100%}}@media screen and (min-width: 1056px){html.theme--documenter-dark .column.is-narrow-desktop{flex:none;width:unset}html.theme--documenter-dark .column.is-full-desktop{flex:none;width:100%}html.theme--documenter-dark .column.is-three-quarters-desktop{flex:none;width:75%}html.theme--documenter-dark .column.is-two-thirds-desktop{flex:none;width:66.6666%}html.theme--documenter-dark .column.is-half-desktop{flex:none;width:50%}html.theme--documenter-dark .column.is-one-third-desktop{flex:none;width:33.3333%}html.theme--documenter-dark .column.is-one-quarter-desktop{flex:none;width:25%}html.theme--documenter-dark .column.is-one-fifth-desktop{flex:none;width:20%}html.theme--documenter-dark .column.is-two-fifths-desktop{flex:none;width:40%}html.theme--documenter-dark .column.is-three-fifths-desktop{flex:none;width:60%}html.theme--documenter-dark .column.is-four-fifths-desktop{flex:none;width:80%}html.theme--documenter-dark .column.is-offset-three-quarters-desktop{margin-left:75%}html.theme--documenter-dark .column.is-offset-two-thirds-desktop{margin-left:66.6666%}html.theme--documenter-dark .column.is-offset-half-desktop{margin-left:50%}html.theme--documenter-dark .column.is-offset-one-third-desktop{margin-left:33.3333%}html.theme--documenter-dark .column.is-offset-one-quarter-desktop{margin-left:25%}html.theme--documenter-dark .column.is-offset-one-fifth-desktop{margin-left:20%}html.theme--documenter-dark .column.is-offset-two-fifths-desktop{margin-left:40%}html.theme--documenter-dark .column.is-offset-three-fifths-desktop{margin-left:60%}html.theme--documenter-dark .column.is-offset-four-fifths-desktop{margin-left:80%}html.theme--documenter-dark .column.is-0-desktop{flex:none;width:0%}html.theme--documenter-dark .column.is-offset-0-desktop{margin-left:0%}html.theme--documenter-dark .column.is-1-desktop{flex:none;width:8.33333337%}html.theme--documenter-dark .column.is-offset-1-desktop{margin-left:8.33333337%}html.theme--documenter-dark .column.is-2-desktop{flex:none;width:16.66666674%}html.theme--documenter-dark .column.is-offset-2-desktop{margin-left:16.66666674%}html.theme--documenter-dark .column.is-3-desktop{flex:none;width:25%}html.theme--documenter-dark .column.is-offset-3-desktop{margin-left:25%}html.theme--documenter-dark .column.is-4-desktop{flex:none;width:33.33333337%}html.theme--documenter-dark .column.is-offset-4-desktop{margin-left:33.33333337%}html.theme--documenter-dark .column.is-5-desktop{flex:none;width:41.66666674%}html.theme--documenter-dark .column.is-offset-5-desktop{margin-left:41.66666674%}html.theme--documenter-dark .column.is-6-desktop{flex:none;width:50%}html.theme--documenter-dark .column.is-offset-6-desktop{margin-left:50%}html.theme--documenter-dark .column.is-7-desktop{flex:none;width:58.33333337%}html.theme--documenter-dark .column.is-offset-7-desktop{margin-left:58.33333337%}html.theme--documenter-dark .column.is-8-desktop{flex:none;width:66.66666674%}html.theme--documenter-dark .column.is-offset-8-desktop{margin-left:66.66666674%}html.theme--documenter-dark .column.is-9-desktop{flex:none;width:75%}html.theme--documenter-dark .column.is-offset-9-desktop{margin-left:75%}html.theme--documenter-dark .column.is-10-desktop{flex:none;width:83.33333337%}html.theme--documenter-dark .column.is-offset-10-desktop{margin-left:83.33333337%}html.theme--documenter-dark .column.is-11-desktop{flex:none;width:91.66666674%}html.theme--documenter-dark .column.is-offset-11-desktop{margin-left:91.66666674%}html.theme--documenter-dark .column.is-12-desktop{flex:none;width:100%}html.theme--documenter-dark .column.is-offset-12-desktop{margin-left:100%}}@media screen and (min-width: 1216px){html.theme--documenter-dark .column.is-narrow-widescreen{flex:none;width:unset}html.theme--documenter-dark .column.is-full-widescreen{flex:none;width:100%}html.theme--documenter-dark .column.is-three-quarters-widescreen{flex:none;width:75%}html.theme--documenter-dark .column.is-two-thirds-widescreen{flex:none;width:66.6666%}html.theme--documenter-dark .column.is-half-widescreen{flex:none;width:50%}html.theme--documenter-dark .column.is-one-third-widescreen{flex:none;width:33.3333%}html.theme--documenter-dark .column.is-one-quarter-widescreen{flex:none;width:25%}html.theme--documenter-dark .column.is-one-fifth-widescreen{flex:none;width:20%}html.theme--documenter-dark .column.is-two-fifths-widescreen{flex:none;width:40%}html.theme--documenter-dark .column.is-three-fifths-widescreen{flex:none;width:60%}html.theme--documenter-dark .column.is-four-fifths-widescreen{flex:none;width:80%}html.theme--documenter-dark .column.is-offset-three-quarters-widescreen{margin-left:75%}html.theme--documenter-dark .column.is-offset-two-thirds-widescreen{margin-left:66.6666%}html.theme--documenter-dark .column.is-offset-half-widescreen{margin-left:50%}html.theme--documenter-dark .column.is-offset-one-third-widescreen{margin-left:33.3333%}html.theme--documenter-dark .column.is-offset-one-quarter-widescreen{margin-left:25%}html.theme--documenter-dark .column.is-offset-one-fifth-widescreen{margin-left:20%}html.theme--documenter-dark .column.is-offset-two-fifths-widescreen{margin-left:40%}html.theme--documenter-dark .column.is-offset-three-fifths-widescreen{margin-left:60%}html.theme--documenter-dark .column.is-offset-four-fifths-widescreen{margin-left:80%}html.theme--documenter-dark .column.is-0-widescreen{flex:none;width:0%}html.theme--documenter-dark .column.is-offset-0-widescreen{margin-left:0%}html.theme--documenter-dark .column.is-1-widescreen{flex:none;width:8.33333337%}html.theme--documenter-dark .column.is-offset-1-widescreen{margin-left:8.33333337%}html.theme--documenter-dark .column.is-2-widescreen{flex:none;width:16.66666674%}html.theme--documenter-dark .column.is-offset-2-widescreen{margin-left:16.66666674%}html.theme--documenter-dark .column.is-3-widescreen{flex:none;width:25%}html.theme--documenter-dark .column.is-offset-3-widescreen{margin-left:25%}html.theme--documenter-dark .column.is-4-widescreen{flex:none;width:33.33333337%}html.theme--documenter-dark .column.is-offset-4-widescreen{margin-left:33.33333337%}html.theme--documenter-dark .column.is-5-widescreen{flex:none;width:41.66666674%}html.theme--documenter-dark .column.is-offset-5-widescreen{margin-left:41.66666674%}html.theme--documenter-dark .column.is-6-widescreen{flex:none;width:50%}html.theme--documenter-dark .column.is-offset-6-widescreen{margin-left:50%}html.theme--documenter-dark .column.is-7-widescreen{flex:none;width:58.33333337%}html.theme--documenter-dark .column.is-offset-7-widescreen{margin-left:58.33333337%}html.theme--documenter-dark .column.is-8-widescreen{flex:none;width:66.66666674%}html.theme--documenter-dark .column.is-offset-8-widescreen{margin-left:66.66666674%}html.theme--documenter-dark .column.is-9-widescreen{flex:none;width:75%}html.theme--documenter-dark .column.is-offset-9-widescreen{margin-left:75%}html.theme--documenter-dark .column.is-10-widescreen{flex:none;width:83.33333337%}html.theme--documenter-dark .column.is-offset-10-widescreen{margin-left:83.33333337%}html.theme--documenter-dark .column.is-11-widescreen{flex:none;width:91.66666674%}html.theme--documenter-dark .column.is-offset-11-widescreen{margin-left:91.66666674%}html.theme--documenter-dark .column.is-12-widescreen{flex:none;width:100%}html.theme--documenter-dark .column.is-offset-12-widescreen{margin-left:100%}}@media screen and (min-width: 1408px){html.theme--documenter-dark .column.is-narrow-fullhd{flex:none;width:unset}html.theme--documenter-dark .column.is-full-fullhd{flex:none;width:100%}html.theme--documenter-dark .column.is-three-quarters-fullhd{flex:none;width:75%}html.theme--documenter-dark .column.is-two-thirds-fullhd{flex:none;width:66.6666%}html.theme--documenter-dark .column.is-half-fullhd{flex:none;width:50%}html.theme--documenter-dark .column.is-one-third-fullhd{flex:none;width:33.3333%}html.theme--documenter-dark .column.is-one-quarter-fullhd{flex:none;width:25%}html.theme--documenter-dark .column.is-one-fifth-fullhd{flex:none;width:20%}html.theme--documenter-dark .column.is-two-fifths-fullhd{flex:none;width:40%}html.theme--documenter-dark .column.is-three-fifths-fullhd{flex:none;width:60%}html.theme--documenter-dark .column.is-four-fifths-fullhd{flex:none;width:80%}html.theme--documenter-dark .column.is-offset-three-quarters-fullhd{margin-left:75%}html.theme--documenter-dark .column.is-offset-two-thirds-fullhd{margin-left:66.6666%}html.theme--documenter-dark .column.is-offset-half-fullhd{margin-left:50%}html.theme--documenter-dark .column.is-offset-one-third-fullhd{margin-left:33.3333%}html.theme--documenter-dark .column.is-offset-one-quarter-fullhd{margin-left:25%}html.theme--documenter-dark .column.is-offset-one-fifth-fullhd{margin-left:20%}html.theme--documenter-dark .column.is-offset-two-fifths-fullhd{margin-left:40%}html.theme--documenter-dark .column.is-offset-three-fifths-fullhd{margin-left:60%}html.theme--documenter-dark .column.is-offset-four-fifths-fullhd{margin-left:80%}html.theme--documenter-dark .column.is-0-fullhd{flex:none;width:0%}html.theme--documenter-dark .column.is-offset-0-fullhd{margin-left:0%}html.theme--documenter-dark .column.is-1-fullhd{flex:none;width:8.33333337%}html.theme--documenter-dark .column.is-offset-1-fullhd{margin-left:8.33333337%}html.theme--documenter-dark .column.is-2-fullhd{flex:none;width:16.66666674%}html.theme--documenter-dark .column.is-offset-2-fullhd{margin-left:16.66666674%}html.theme--documenter-dark .column.is-3-fullhd{flex:none;width:25%}html.theme--documenter-dark .column.is-offset-3-fullhd{margin-left:25%}html.theme--documenter-dark .column.is-4-fullhd{flex:none;width:33.33333337%}html.theme--documenter-dark .column.is-offset-4-fullhd{margin-left:33.33333337%}html.theme--documenter-dark .column.is-5-fullhd{flex:none;width:41.66666674%}html.theme--documenter-dark .column.is-offset-5-fullhd{margin-left:41.66666674%}html.theme--documenter-dark .column.is-6-fullhd{flex:none;width:50%}html.theme--documenter-dark .column.is-offset-6-fullhd{margin-left:50%}html.theme--documenter-dark .column.is-7-fullhd{flex:none;width:58.33333337%}html.theme--documenter-dark .column.is-offset-7-fullhd{margin-left:58.33333337%}html.theme--documenter-dark .column.is-8-fullhd{flex:none;width:66.66666674%}html.theme--documenter-dark .column.is-offset-8-fullhd{margin-left:66.66666674%}html.theme--documenter-dark .column.is-9-fullhd{flex:none;width:75%}html.theme--documenter-dark .column.is-offset-9-fullhd{margin-left:75%}html.theme--documenter-dark .column.is-10-fullhd{flex:none;width:83.33333337%}html.theme--documenter-dark .column.is-offset-10-fullhd{margin-left:83.33333337%}html.theme--documenter-dark .column.is-11-fullhd{flex:none;width:91.66666674%}html.theme--documenter-dark .column.is-offset-11-fullhd{margin-left:91.66666674%}html.theme--documenter-dark .column.is-12-fullhd{flex:none;width:100%}html.theme--documenter-dark .column.is-offset-12-fullhd{margin-left:100%}}html.theme--documenter-dark .columns{margin-left:-.75rem;margin-right:-.75rem;margin-top:-.75rem}html.theme--documenter-dark .columns:last-child{margin-bottom:-.75rem}html.theme--documenter-dark .columns:not(:last-child){margin-bottom:calc(1.5rem - .75rem)}html.theme--documenter-dark .columns.is-centered{justify-content:center}html.theme--documenter-dark .columns.is-gapless{margin-left:0;margin-right:0;margin-top:0}html.theme--documenter-dark .columns.is-gapless>.column{margin:0;padding:0 !important}html.theme--documenter-dark .columns.is-gapless:not(:last-child){margin-bottom:1.5rem}html.theme--documenter-dark .columns.is-gapless:last-child{margin-bottom:0}html.theme--documenter-dark .columns.is-mobile{display:flex}html.theme--documenter-dark .columns.is-multiline{flex-wrap:wrap}html.theme--documenter-dark .columns.is-vcentered{align-items:center}@media screen and (min-width: 769px),print{html.theme--documenter-dark .columns:not(.is-desktop){display:flex}}@media screen and (min-width: 1056px){html.theme--documenter-dark .columns.is-desktop{display:flex}}html.theme--documenter-dark .columns.is-variable{--columnGap: 0.75rem;margin-left:calc(-1 * var(--columnGap));margin-right:calc(-1 * var(--columnGap))}html.theme--documenter-dark .columns.is-variable>.column{padding-left:var(--columnGap);padding-right:var(--columnGap)}html.theme--documenter-dark .columns.is-variable.is-0{--columnGap: 0rem}@media screen and (max-width: 768px){html.theme--documenter-dark .columns.is-variable.is-0-mobile{--columnGap: 0rem}}@media screen and (min-width: 769px),print{html.theme--documenter-dark .columns.is-variable.is-0-tablet{--columnGap: 0rem}}@media screen and (min-width: 769px) and (max-width: 1055px){html.theme--documenter-dark .columns.is-variable.is-0-tablet-only{--columnGap: 0rem}}@media screen and (max-width: 1055px){html.theme--documenter-dark .columns.is-variable.is-0-touch{--columnGap: 0rem}}@media screen and (min-width: 1056px){html.theme--documenter-dark .columns.is-variable.is-0-desktop{--columnGap: 0rem}}@media screen and (min-width: 1056px) and (max-width: 1215px){html.theme--documenter-dark .columns.is-variable.is-0-desktop-only{--columnGap: 0rem}}@media screen and (min-width: 1216px){html.theme--documenter-dark .columns.is-variable.is-0-widescreen{--columnGap: 0rem}}@media screen and (min-width: 1216px) and (max-width: 1407px){html.theme--documenter-dark .columns.is-variable.is-0-widescreen-only{--columnGap: 0rem}}@media screen and (min-width: 1408px){html.theme--documenter-dark .columns.is-variable.is-0-fullhd{--columnGap: 0rem}}html.theme--documenter-dark .columns.is-variable.is-1{--columnGap: .25rem}@media screen and (max-width: 768px){html.theme--documenter-dark .columns.is-variable.is-1-mobile{--columnGap: .25rem}}@media screen and (min-width: 769px),print{html.theme--documenter-dark .columns.is-variable.is-1-tablet{--columnGap: .25rem}}@media screen and (min-width: 769px) and (max-width: 1055px){html.theme--documenter-dark .columns.is-variable.is-1-tablet-only{--columnGap: .25rem}}@media screen and (max-width: 1055px){html.theme--documenter-dark .columns.is-variable.is-1-touch{--columnGap: .25rem}}@media screen and (min-width: 1056px){html.theme--documenter-dark .columns.is-variable.is-1-desktop{--columnGap: .25rem}}@media screen and (min-width: 1056px) and (max-width: 1215px){html.theme--documenter-dark .columns.is-variable.is-1-desktop-only{--columnGap: .25rem}}@media screen and (min-width: 1216px){html.theme--documenter-dark .columns.is-variable.is-1-widescreen{--columnGap: .25rem}}@media screen and (min-width: 1216px) and (max-width: 1407px){html.theme--documenter-dark .columns.is-variable.is-1-widescreen-only{--columnGap: .25rem}}@media screen and (min-width: 1408px){html.theme--documenter-dark .columns.is-variable.is-1-fullhd{--columnGap: .25rem}}html.theme--documenter-dark .columns.is-variable.is-2{--columnGap: .5rem}@media screen and (max-width: 768px){html.theme--documenter-dark .columns.is-variable.is-2-mobile{--columnGap: .5rem}}@media screen and (min-width: 769px),print{html.theme--documenter-dark .columns.is-variable.is-2-tablet{--columnGap: .5rem}}@media screen and (min-width: 769px) and (max-width: 1055px){html.theme--documenter-dark .columns.is-variable.is-2-tablet-only{--columnGap: .5rem}}@media screen and (max-width: 1055px){html.theme--documenter-dark .columns.is-variable.is-2-touch{--columnGap: .5rem}}@media screen and (min-width: 1056px){html.theme--documenter-dark .columns.is-variable.is-2-desktop{--columnGap: .5rem}}@media screen and (min-width: 1056px) and (max-width: 1215px){html.theme--documenter-dark .columns.is-variable.is-2-desktop-only{--columnGap: .5rem}}@media screen and (min-width: 1216px){html.theme--documenter-dark .columns.is-variable.is-2-widescreen{--columnGap: .5rem}}@media screen and (min-width: 1216px) and (max-width: 1407px){html.theme--documenter-dark .columns.is-variable.is-2-widescreen-only{--columnGap: .5rem}}@media screen and (min-width: 1408px){html.theme--documenter-dark .columns.is-variable.is-2-fullhd{--columnGap: .5rem}}html.theme--documenter-dark .columns.is-variable.is-3{--columnGap: .75rem}@media screen and (max-width: 768px){html.theme--documenter-dark .columns.is-variable.is-3-mobile{--columnGap: .75rem}}@media screen and (min-width: 769px),print{html.theme--documenter-dark .columns.is-variable.is-3-tablet{--columnGap: .75rem}}@media screen and (min-width: 769px) and (max-width: 1055px){html.theme--documenter-dark .columns.is-variable.is-3-tablet-only{--columnGap: .75rem}}@media screen and (max-width: 1055px){html.theme--documenter-dark .columns.is-variable.is-3-touch{--columnGap: .75rem}}@media screen and (min-width: 1056px){html.theme--documenter-dark .columns.is-variable.is-3-desktop{--columnGap: .75rem}}@media screen and (min-width: 1056px) and (max-width: 1215px){html.theme--documenter-dark .columns.is-variable.is-3-desktop-only{--columnGap: .75rem}}@media screen and (min-width: 1216px){html.theme--documenter-dark .columns.is-variable.is-3-widescreen{--columnGap: .75rem}}@media screen and (min-width: 1216px) and (max-width: 1407px){html.theme--documenter-dark .columns.is-variable.is-3-widescreen-only{--columnGap: .75rem}}@media screen and (min-width: 1408px){html.theme--documenter-dark .columns.is-variable.is-3-fullhd{--columnGap: .75rem}}html.theme--documenter-dark .columns.is-variable.is-4{--columnGap: 1rem}@media screen and (max-width: 768px){html.theme--documenter-dark .columns.is-variable.is-4-mobile{--columnGap: 1rem}}@media screen and (min-width: 769px),print{html.theme--documenter-dark .columns.is-variable.is-4-tablet{--columnGap: 1rem}}@media screen and (min-width: 769px) and (max-width: 1055px){html.theme--documenter-dark .columns.is-variable.is-4-tablet-only{--columnGap: 1rem}}@media screen and (max-width: 1055px){html.theme--documenter-dark .columns.is-variable.is-4-touch{--columnGap: 1rem}}@media screen and (min-width: 1056px){html.theme--documenter-dark .columns.is-variable.is-4-desktop{--columnGap: 1rem}}@media screen and (min-width: 1056px) and (max-width: 1215px){html.theme--documenter-dark .columns.is-variable.is-4-desktop-only{--columnGap: 1rem}}@media screen and (min-width: 1216px){html.theme--documenter-dark .columns.is-variable.is-4-widescreen{--columnGap: 1rem}}@media screen and (min-width: 1216px) and (max-width: 1407px){html.theme--documenter-dark .columns.is-variable.is-4-widescreen-only{--columnGap: 1rem}}@media screen and (min-width: 1408px){html.theme--documenter-dark .columns.is-variable.is-4-fullhd{--columnGap: 1rem}}html.theme--documenter-dark .columns.is-variable.is-5{--columnGap: 1.25rem}@media screen and (max-width: 768px){html.theme--documenter-dark .columns.is-variable.is-5-mobile{--columnGap: 1.25rem}}@media screen and (min-width: 769px),print{html.theme--documenter-dark .columns.is-variable.is-5-tablet{--columnGap: 1.25rem}}@media screen and (min-width: 769px) and (max-width: 1055px){html.theme--documenter-dark .columns.is-variable.is-5-tablet-only{--columnGap: 1.25rem}}@media screen and (max-width: 1055px){html.theme--documenter-dark .columns.is-variable.is-5-touch{--columnGap: 1.25rem}}@media screen and (min-width: 1056px){html.theme--documenter-dark .columns.is-variable.is-5-desktop{--columnGap: 1.25rem}}@media screen and (min-width: 1056px) and (max-width: 1215px){html.theme--documenter-dark .columns.is-variable.is-5-desktop-only{--columnGap: 1.25rem}}@media screen and (min-width: 1216px){html.theme--documenter-dark .columns.is-variable.is-5-widescreen{--columnGap: 1.25rem}}@media screen and (min-width: 1216px) and (max-width: 1407px){html.theme--documenter-dark .columns.is-variable.is-5-widescreen-only{--columnGap: 1.25rem}}@media screen and (min-width: 1408px){html.theme--documenter-dark .columns.is-variable.is-5-fullhd{--columnGap: 1.25rem}}html.theme--documenter-dark .columns.is-variable.is-6{--columnGap: 1.5rem}@media screen and (max-width: 768px){html.theme--documenter-dark .columns.is-variable.is-6-mobile{--columnGap: 1.5rem}}@media screen and (min-width: 769px),print{html.theme--documenter-dark .columns.is-variable.is-6-tablet{--columnGap: 1.5rem}}@media screen and (min-width: 769px) and (max-width: 1055px){html.theme--documenter-dark .columns.is-variable.is-6-tablet-only{--columnGap: 1.5rem}}@media screen and (max-width: 1055px){html.theme--documenter-dark .columns.is-variable.is-6-touch{--columnGap: 1.5rem}}@media screen and (min-width: 1056px){html.theme--documenter-dark .columns.is-variable.is-6-desktop{--columnGap: 1.5rem}}@media screen and (min-width: 1056px) and (max-width: 1215px){html.theme--documenter-dark .columns.is-variable.is-6-desktop-only{--columnGap: 1.5rem}}@media screen and (min-width: 1216px){html.theme--documenter-dark .columns.is-variable.is-6-widescreen{--columnGap: 1.5rem}}@media screen and (min-width: 1216px) and (max-width: 1407px){html.theme--documenter-dark .columns.is-variable.is-6-widescreen-only{--columnGap: 1.5rem}}@media screen and (min-width: 1408px){html.theme--documenter-dark .columns.is-variable.is-6-fullhd{--columnGap: 1.5rem}}html.theme--documenter-dark .columns.is-variable.is-7{--columnGap: 1.75rem}@media screen and (max-width: 768px){html.theme--documenter-dark .columns.is-variable.is-7-mobile{--columnGap: 1.75rem}}@media screen and (min-width: 769px),print{html.theme--documenter-dark .columns.is-variable.is-7-tablet{--columnGap: 1.75rem}}@media screen and (min-width: 769px) and (max-width: 1055px){html.theme--documenter-dark .columns.is-variable.is-7-tablet-only{--columnGap: 1.75rem}}@media screen and (max-width: 1055px){html.theme--documenter-dark .columns.is-variable.is-7-touch{--columnGap: 1.75rem}}@media screen and (min-width: 1056px){html.theme--documenter-dark .columns.is-variable.is-7-desktop{--columnGap: 1.75rem}}@media screen and (min-width: 1056px) and (max-width: 1215px){html.theme--documenter-dark .columns.is-variable.is-7-desktop-only{--columnGap: 1.75rem}}@media screen and (min-width: 1216px){html.theme--documenter-dark .columns.is-variable.is-7-widescreen{--columnGap: 1.75rem}}@media screen and (min-width: 1216px) and (max-width: 1407px){html.theme--documenter-dark .columns.is-variable.is-7-widescreen-only{--columnGap: 1.75rem}}@media screen and (min-width: 1408px){html.theme--documenter-dark .columns.is-variable.is-7-fullhd{--columnGap: 1.75rem}}html.theme--documenter-dark .columns.is-variable.is-8{--columnGap: 2rem}@media screen and (max-width: 768px){html.theme--documenter-dark .columns.is-variable.is-8-mobile{--columnGap: 2rem}}@media screen and (min-width: 769px),print{html.theme--documenter-dark .columns.is-variable.is-8-tablet{--columnGap: 2rem}}@media screen and (min-width: 769px) and (max-width: 1055px){html.theme--documenter-dark .columns.is-variable.is-8-tablet-only{--columnGap: 2rem}}@media screen and (max-width: 1055px){html.theme--documenter-dark .columns.is-variable.is-8-touch{--columnGap: 2rem}}@media screen and (min-width: 1056px){html.theme--documenter-dark .columns.is-variable.is-8-desktop{--columnGap: 2rem}}@media screen and (min-width: 1056px) and (max-width: 1215px){html.theme--documenter-dark .columns.is-variable.is-8-desktop-only{--columnGap: 2rem}}@media screen and (min-width: 1216px){html.theme--documenter-dark .columns.is-variable.is-8-widescreen{--columnGap: 2rem}}@media screen and (min-width: 1216px) and (max-width: 1407px){html.theme--documenter-dark .columns.is-variable.is-8-widescreen-only{--columnGap: 2rem}}@media screen and (min-width: 1408px){html.theme--documenter-dark .columns.is-variable.is-8-fullhd{--columnGap: 2rem}}html.theme--documenter-dark .tile{align-items:stretch;display:block;flex-basis:0;flex-grow:1;flex-shrink:1;min-height:min-content}html.theme--documenter-dark .tile.is-ancestor{margin-left:-.75rem;margin-right:-.75rem;margin-top:-.75rem}html.theme--documenter-dark .tile.is-ancestor:last-child{margin-bottom:-.75rem}html.theme--documenter-dark .tile.is-ancestor:not(:last-child){margin-bottom:.75rem}html.theme--documenter-dark .tile.is-child{margin:0 !important}html.theme--documenter-dark .tile.is-parent{padding:.75rem}html.theme--documenter-dark .tile.is-vertical{flex-direction:column}html.theme--documenter-dark .tile.is-vertical>.tile.is-child:not(:last-child){margin-bottom:1.5rem !important}@media screen and (min-width: 769px),print{html.theme--documenter-dark .tile:not(.is-child){display:flex}html.theme--documenter-dark .tile.is-1{flex:none;width:8.33333337%}html.theme--documenter-dark .tile.is-2{flex:none;width:16.66666674%}html.theme--documenter-dark .tile.is-3{flex:none;width:25%}html.theme--documenter-dark .tile.is-4{flex:none;width:33.33333337%}html.theme--documenter-dark .tile.is-5{flex:none;width:41.66666674%}html.theme--documenter-dark .tile.is-6{flex:none;width:50%}html.theme--documenter-dark .tile.is-7{flex:none;width:58.33333337%}html.theme--documenter-dark .tile.is-8{flex:none;width:66.66666674%}html.theme--documenter-dark .tile.is-9{flex:none;width:75%}html.theme--documenter-dark .tile.is-10{flex:none;width:83.33333337%}html.theme--documenter-dark .tile.is-11{flex:none;width:91.66666674%}html.theme--documenter-dark .tile.is-12{flex:none;width:100%}}html.theme--documenter-dark .hero{align-items:stretch;display:flex;flex-direction:column;justify-content:space-between}html.theme--documenter-dark .hero .navbar{background:none}html.theme--documenter-dark .hero .tabs ul{border-bottom:none}html.theme--documenter-dark .hero.is-white{background-color:#fff;color:#0a0a0a}html.theme--documenter-dark .hero.is-white a:not(.button):not(.dropdown-item):not(.tag):not(.pagination-link.is-current),html.theme--documenter-dark .hero.is-white strong{color:inherit}html.theme--documenter-dark .hero.is-white .title{color:#0a0a0a}html.theme--documenter-dark .hero.is-white .subtitle{color:rgba(10,10,10,0.9)}html.theme--documenter-dark .hero.is-white .subtitle a:not(.button),html.theme--documenter-dark .hero.is-white .subtitle strong{color:#0a0a0a}@media screen and (max-width: 1055px){html.theme--documenter-dark .hero.is-white .navbar-menu{background-color:#fff}}html.theme--documenter-dark .hero.is-white .navbar-item,html.theme--documenter-dark .hero.is-white .navbar-link{color:rgba(10,10,10,0.7)}html.theme--documenter-dark .hero.is-white a.navbar-item:hover,html.theme--documenter-dark .hero.is-white a.navbar-item.is-active,html.theme--documenter-dark .hero.is-white .navbar-link:hover,html.theme--documenter-dark .hero.is-white .navbar-link.is-active{background-color:#f2f2f2;color:#0a0a0a}html.theme--documenter-dark .hero.is-white .tabs a{color:#0a0a0a;opacity:0.9}html.theme--documenter-dark .hero.is-white .tabs a:hover{opacity:1}html.theme--documenter-dark .hero.is-white .tabs li.is-active a{color:#fff !important;opacity:1}html.theme--documenter-dark .hero.is-white .tabs.is-boxed a,html.theme--documenter-dark .hero.is-white .tabs.is-toggle a{color:#0a0a0a}html.theme--documenter-dark .hero.is-white .tabs.is-boxed a:hover,html.theme--documenter-dark .hero.is-white .tabs.is-toggle a:hover{background-color:rgba(10,10,10,0.1)}html.theme--documenter-dark .hero.is-white .tabs.is-boxed li.is-active a,html.theme--documenter-dark .hero.is-white .tabs.is-boxed li.is-active a:hover,html.theme--documenter-dark .hero.is-white .tabs.is-toggle li.is-active a,html.theme--documenter-dark .hero.is-white .tabs.is-toggle li.is-active a:hover{background-color:#0a0a0a;border-color:#0a0a0a;color:#fff}html.theme--documenter-dark .hero.is-white.is-bold{background-image:linear-gradient(141deg, #e8e3e4 0%, #fff 71%, #fff 100%)}@media screen and (max-width: 768px){html.theme--documenter-dark .hero.is-white.is-bold .navbar-menu{background-image:linear-gradient(141deg, #e8e3e4 0%, #fff 71%, #fff 100%)}}html.theme--documenter-dark .hero.is-black{background-color:#0a0a0a;color:#fff}html.theme--documenter-dark .hero.is-black a:not(.button):not(.dropdown-item):not(.tag):not(.pagination-link.is-current),html.theme--documenter-dark .hero.is-black strong{color:inherit}html.theme--documenter-dark .hero.is-black .title{color:#fff}html.theme--documenter-dark .hero.is-black .subtitle{color:rgba(255,255,255,0.9)}html.theme--documenter-dark .hero.is-black .subtitle a:not(.button),html.theme--documenter-dark .hero.is-black .subtitle strong{color:#fff}@media screen and (max-width: 1055px){html.theme--documenter-dark .hero.is-black .navbar-menu{background-color:#0a0a0a}}html.theme--documenter-dark .hero.is-black .navbar-item,html.theme--documenter-dark .hero.is-black .navbar-link{color:rgba(255,255,255,0.7)}html.theme--documenter-dark .hero.is-black a.navbar-item:hover,html.theme--documenter-dark .hero.is-black a.navbar-item.is-active,html.theme--documenter-dark .hero.is-black .navbar-link:hover,html.theme--documenter-dark .hero.is-black .navbar-link.is-active{background-color:#000;color:#fff}html.theme--documenter-dark .hero.is-black .tabs a{color:#fff;opacity:0.9}html.theme--documenter-dark .hero.is-black .tabs a:hover{opacity:1}html.theme--documenter-dark .hero.is-black .tabs li.is-active a{color:#0a0a0a !important;opacity:1}html.theme--documenter-dark .hero.is-black .tabs.is-boxed a,html.theme--documenter-dark .hero.is-black .tabs.is-toggle a{color:#fff}html.theme--documenter-dark .hero.is-black .tabs.is-boxed a:hover,html.theme--documenter-dark .hero.is-black .tabs.is-toggle a:hover{background-color:rgba(10,10,10,0.1)}html.theme--documenter-dark .hero.is-black .tabs.is-boxed li.is-active a,html.theme--documenter-dark .hero.is-black .tabs.is-boxed li.is-active a:hover,html.theme--documenter-dark .hero.is-black .tabs.is-toggle li.is-active a,html.theme--documenter-dark .hero.is-black .tabs.is-toggle li.is-active a:hover{background-color:#fff;border-color:#fff;color:#0a0a0a}html.theme--documenter-dark .hero.is-black.is-bold{background-image:linear-gradient(141deg, #000 0%, #0a0a0a 71%, #181616 100%)}@media screen and (max-width: 768px){html.theme--documenter-dark .hero.is-black.is-bold .navbar-menu{background-image:linear-gradient(141deg, #000 0%, #0a0a0a 71%, #181616 100%)}}html.theme--documenter-dark .hero.is-light{background-color:#ecf0f1;color:rgba(0,0,0,0.7)}html.theme--documenter-dark .hero.is-light a:not(.button):not(.dropdown-item):not(.tag):not(.pagination-link.is-current),html.theme--documenter-dark .hero.is-light strong{color:inherit}html.theme--documenter-dark .hero.is-light .title{color:rgba(0,0,0,0.7)}html.theme--documenter-dark .hero.is-light .subtitle{color:rgba(0,0,0,0.9)}html.theme--documenter-dark .hero.is-light .subtitle a:not(.button),html.theme--documenter-dark .hero.is-light .subtitle strong{color:rgba(0,0,0,0.7)}@media screen and (max-width: 1055px){html.theme--documenter-dark .hero.is-light .navbar-menu{background-color:#ecf0f1}}html.theme--documenter-dark .hero.is-light .navbar-item,html.theme--documenter-dark .hero.is-light .navbar-link{color:rgba(0,0,0,0.7)}html.theme--documenter-dark .hero.is-light a.navbar-item:hover,html.theme--documenter-dark .hero.is-light a.navbar-item.is-active,html.theme--documenter-dark .hero.is-light .navbar-link:hover,html.theme--documenter-dark .hero.is-light .navbar-link.is-active{background-color:#dde4e6;color:rgba(0,0,0,0.7)}html.theme--documenter-dark .hero.is-light .tabs a{color:rgba(0,0,0,0.7);opacity:0.9}html.theme--documenter-dark .hero.is-light .tabs a:hover{opacity:1}html.theme--documenter-dark .hero.is-light .tabs li.is-active a{color:#ecf0f1 !important;opacity:1}html.theme--documenter-dark .hero.is-light .tabs.is-boxed a,html.theme--documenter-dark .hero.is-light .tabs.is-toggle a{color:rgba(0,0,0,0.7)}html.theme--documenter-dark .hero.is-light .tabs.is-boxed a:hover,html.theme--documenter-dark .hero.is-light .tabs.is-toggle a:hover{background-color:rgba(10,10,10,0.1)}html.theme--documenter-dark .hero.is-light .tabs.is-boxed li.is-active a,html.theme--documenter-dark .hero.is-light .tabs.is-boxed li.is-active a:hover,html.theme--documenter-dark .hero.is-light .tabs.is-toggle li.is-active a,html.theme--documenter-dark .hero.is-light .tabs.is-toggle li.is-active a:hover{background-color:rgba(0,0,0,0.7);border-color:rgba(0,0,0,0.7);color:#ecf0f1}html.theme--documenter-dark .hero.is-light.is-bold{background-image:linear-gradient(141deg, #cadfe0 0%, #ecf0f1 71%, #fafbfc 100%)}@media screen and (max-width: 768px){html.theme--documenter-dark .hero.is-light.is-bold .navbar-menu{background-image:linear-gradient(141deg, #cadfe0 0%, #ecf0f1 71%, #fafbfc 100%)}}html.theme--documenter-dark .hero.is-dark,html.theme--documenter-dark .content kbd.hero{background-color:#282f2f;color:#fff}html.theme--documenter-dark .hero.is-dark a:not(.button):not(.dropdown-item):not(.tag):not(.pagination-link.is-current),html.theme--documenter-dark .content kbd.hero a:not(.button):not(.dropdown-item):not(.tag):not(.pagination-link.is-current),html.theme--documenter-dark .hero.is-dark strong,html.theme--documenter-dark .content kbd.hero strong{color:inherit}html.theme--documenter-dark .hero.is-dark .title,html.theme--documenter-dark .content kbd.hero .title{color:#fff}html.theme--documenter-dark .hero.is-dark .subtitle,html.theme--documenter-dark .content kbd.hero .subtitle{color:rgba(255,255,255,0.9)}html.theme--documenter-dark .hero.is-dark .subtitle a:not(.button),html.theme--documenter-dark .content kbd.hero .subtitle a:not(.button),html.theme--documenter-dark .hero.is-dark .subtitle strong,html.theme--documenter-dark .content kbd.hero .subtitle strong{color:#fff}@media screen and (max-width: 1055px){html.theme--documenter-dark .hero.is-dark .navbar-menu,html.theme--documenter-dark .content kbd.hero .navbar-menu{background-color:#282f2f}}html.theme--documenter-dark .hero.is-dark .navbar-item,html.theme--documenter-dark .content kbd.hero .navbar-item,html.theme--documenter-dark .hero.is-dark .navbar-link,html.theme--documenter-dark .content kbd.hero .navbar-link{color:rgba(255,255,255,0.7)}html.theme--documenter-dark .hero.is-dark a.navbar-item:hover,html.theme--documenter-dark .content kbd.hero a.navbar-item:hover,html.theme--documenter-dark .hero.is-dark a.navbar-item.is-active,html.theme--documenter-dark .content kbd.hero a.navbar-item.is-active,html.theme--documenter-dark .hero.is-dark .navbar-link:hover,html.theme--documenter-dark .content kbd.hero .navbar-link:hover,html.theme--documenter-dark .hero.is-dark .navbar-link.is-active,html.theme--documenter-dark .content kbd.hero .navbar-link.is-active{background-color:#1d2122;color:#fff}html.theme--documenter-dark .hero.is-dark .tabs a,html.theme--documenter-dark .content kbd.hero .tabs a{color:#fff;opacity:0.9}html.theme--documenter-dark .hero.is-dark .tabs a:hover,html.theme--documenter-dark .content kbd.hero .tabs a:hover{opacity:1}html.theme--documenter-dark .hero.is-dark .tabs li.is-active a,html.theme--documenter-dark .content kbd.hero .tabs li.is-active a{color:#282f2f !important;opacity:1}html.theme--documenter-dark .hero.is-dark .tabs.is-boxed a,html.theme--documenter-dark .content kbd.hero .tabs.is-boxed a,html.theme--documenter-dark .hero.is-dark .tabs.is-toggle a,html.theme--documenter-dark .content kbd.hero .tabs.is-toggle a{color:#fff}html.theme--documenter-dark .hero.is-dark .tabs.is-boxed a:hover,html.theme--documenter-dark .content kbd.hero .tabs.is-boxed a:hover,html.theme--documenter-dark .hero.is-dark .tabs.is-toggle a:hover,html.theme--documenter-dark .content kbd.hero .tabs.is-toggle a:hover{background-color:rgba(10,10,10,0.1)}html.theme--documenter-dark .hero.is-dark .tabs.is-boxed li.is-active a,html.theme--documenter-dark .content kbd.hero .tabs.is-boxed li.is-active a,html.theme--documenter-dark .hero.is-dark .tabs.is-boxed li.is-active a:hover,html.theme--documenter-dark .hero.is-dark .tabs.is-toggle li.is-active a,html.theme--documenter-dark .content kbd.hero .tabs.is-toggle li.is-active a,html.theme--documenter-dark .hero.is-dark .tabs.is-toggle li.is-active a:hover{background-color:#fff;border-color:#fff;color:#282f2f}html.theme--documenter-dark .hero.is-dark.is-bold,html.theme--documenter-dark .content kbd.hero.is-bold{background-image:linear-gradient(141deg, #0f1615 0%, #282f2f 71%, #313c40 100%)}@media screen and (max-width: 768px){html.theme--documenter-dark .hero.is-dark.is-bold .navbar-menu,html.theme--documenter-dark .content kbd.hero.is-bold .navbar-menu{background-image:linear-gradient(141deg, #0f1615 0%, #282f2f 71%, #313c40 100%)}}html.theme--documenter-dark .hero.is-primary,html.theme--documenter-dark .docstring>section>a.hero.docs-sourcelink{background-color:#375a7f;color:#fff}html.theme--documenter-dark .hero.is-primary a:not(.button):not(.dropdown-item):not(.tag):not(.pagination-link.is-current),html.theme--documenter-dark .docstring>section>a.hero.docs-sourcelink a:not(.button):not(.dropdown-item):not(.tag):not(.pagination-link.is-current),html.theme--documenter-dark .hero.is-primary strong,html.theme--documenter-dark .docstring>section>a.hero.docs-sourcelink strong{color:inherit}html.theme--documenter-dark .hero.is-primary .title,html.theme--documenter-dark .docstring>section>a.hero.docs-sourcelink .title{color:#fff}html.theme--documenter-dark .hero.is-primary .subtitle,html.theme--documenter-dark .docstring>section>a.hero.docs-sourcelink .subtitle{color:rgba(255,255,255,0.9)}html.theme--documenter-dark .hero.is-primary .subtitle a:not(.button),html.theme--documenter-dark .docstring>section>a.hero.docs-sourcelink .subtitle a:not(.button),html.theme--documenter-dark .hero.is-primary .subtitle strong,html.theme--documenter-dark .docstring>section>a.hero.docs-sourcelink .subtitle strong{color:#fff}@media screen and (max-width: 1055px){html.theme--documenter-dark .hero.is-primary .navbar-menu,html.theme--documenter-dark .docstring>section>a.hero.docs-sourcelink .navbar-menu{background-color:#375a7f}}html.theme--documenter-dark .hero.is-primary .navbar-item,html.theme--documenter-dark .docstring>section>a.hero.docs-sourcelink .navbar-item,html.theme--documenter-dark .hero.is-primary .navbar-link,html.theme--documenter-dark .docstring>section>a.hero.docs-sourcelink .navbar-link{color:rgba(255,255,255,0.7)}html.theme--documenter-dark .hero.is-primary a.navbar-item:hover,html.theme--documenter-dark .docstring>section>a.hero.docs-sourcelink a.navbar-item:hover,html.theme--documenter-dark .hero.is-primary a.navbar-item.is-active,html.theme--documenter-dark .docstring>section>a.hero.docs-sourcelink a.navbar-item.is-active,html.theme--documenter-dark .hero.is-primary .navbar-link:hover,html.theme--documenter-dark .docstring>section>a.hero.docs-sourcelink .navbar-link:hover,html.theme--documenter-dark .hero.is-primary .navbar-link.is-active,html.theme--documenter-dark .docstring>section>a.hero.docs-sourcelink .navbar-link.is-active{background-color:#2f4d6d;color:#fff}html.theme--documenter-dark .hero.is-primary .tabs a,html.theme--documenter-dark .docstring>section>a.hero.docs-sourcelink .tabs a{color:#fff;opacity:0.9}html.theme--documenter-dark .hero.is-primary .tabs a:hover,html.theme--documenter-dark .docstring>section>a.hero.docs-sourcelink .tabs a:hover{opacity:1}html.theme--documenter-dark .hero.is-primary .tabs li.is-active a,html.theme--documenter-dark .docstring>section>a.hero.docs-sourcelink .tabs li.is-active a{color:#375a7f !important;opacity:1}html.theme--documenter-dark .hero.is-primary .tabs.is-boxed a,html.theme--documenter-dark .docstring>section>a.hero.docs-sourcelink .tabs.is-boxed a,html.theme--documenter-dark .hero.is-primary .tabs.is-toggle a,html.theme--documenter-dark .docstring>section>a.hero.docs-sourcelink .tabs.is-toggle a{color:#fff}html.theme--documenter-dark .hero.is-primary .tabs.is-boxed a:hover,html.theme--documenter-dark .docstring>section>a.hero.docs-sourcelink .tabs.is-boxed a:hover,html.theme--documenter-dark .hero.is-primary .tabs.is-toggle a:hover,html.theme--documenter-dark .docstring>section>a.hero.docs-sourcelink .tabs.is-toggle a:hover{background-color:rgba(10,10,10,0.1)}html.theme--documenter-dark .hero.is-primary .tabs.is-boxed li.is-active a,html.theme--documenter-dark .docstring>section>a.hero.docs-sourcelink .tabs.is-boxed li.is-active a,html.theme--documenter-dark .hero.is-primary .tabs.is-boxed li.is-active a:hover,html.theme--documenter-dark .hero.is-primary .tabs.is-toggle li.is-active a,html.theme--documenter-dark .docstring>section>a.hero.docs-sourcelink .tabs.is-toggle li.is-active a,html.theme--documenter-dark .hero.is-primary .tabs.is-toggle li.is-active a:hover{background-color:#fff;border-color:#fff;color:#375a7f}html.theme--documenter-dark .hero.is-primary.is-bold,html.theme--documenter-dark .docstring>section>a.hero.is-bold.docs-sourcelink{background-image:linear-gradient(141deg, #214b62 0%, #375a7f 71%, #3a5796 100%)}@media screen and (max-width: 768px){html.theme--documenter-dark .hero.is-primary.is-bold .navbar-menu,html.theme--documenter-dark .docstring>section>a.hero.is-bold.docs-sourcelink .navbar-menu{background-image:linear-gradient(141deg, #214b62 0%, #375a7f 71%, #3a5796 100%)}}html.theme--documenter-dark .hero.is-link{background-color:#1abc9c;color:#fff}html.theme--documenter-dark .hero.is-link a:not(.button):not(.dropdown-item):not(.tag):not(.pagination-link.is-current),html.theme--documenter-dark .hero.is-link strong{color:inherit}html.theme--documenter-dark .hero.is-link .title{color:#fff}html.theme--documenter-dark .hero.is-link .subtitle{color:rgba(255,255,255,0.9)}html.theme--documenter-dark .hero.is-link .subtitle a:not(.button),html.theme--documenter-dark .hero.is-link .subtitle strong{color:#fff}@media screen and (max-width: 1055px){html.theme--documenter-dark .hero.is-link .navbar-menu{background-color:#1abc9c}}html.theme--documenter-dark .hero.is-link .navbar-item,html.theme--documenter-dark .hero.is-link .navbar-link{color:rgba(255,255,255,0.7)}html.theme--documenter-dark .hero.is-link a.navbar-item:hover,html.theme--documenter-dark .hero.is-link a.navbar-item.is-active,html.theme--documenter-dark .hero.is-link .navbar-link:hover,html.theme--documenter-dark .hero.is-link .navbar-link.is-active{background-color:#17a689;color:#fff}html.theme--documenter-dark .hero.is-link .tabs a{color:#fff;opacity:0.9}html.theme--documenter-dark .hero.is-link .tabs a:hover{opacity:1}html.theme--documenter-dark .hero.is-link .tabs li.is-active a{color:#1abc9c !important;opacity:1}html.theme--documenter-dark .hero.is-link .tabs.is-boxed a,html.theme--documenter-dark .hero.is-link .tabs.is-toggle a{color:#fff}html.theme--documenter-dark .hero.is-link .tabs.is-boxed a:hover,html.theme--documenter-dark .hero.is-link .tabs.is-toggle a:hover{background-color:rgba(10,10,10,0.1)}html.theme--documenter-dark .hero.is-link .tabs.is-boxed li.is-active a,html.theme--documenter-dark .hero.is-link .tabs.is-boxed li.is-active a:hover,html.theme--documenter-dark .hero.is-link .tabs.is-toggle li.is-active a,html.theme--documenter-dark .hero.is-link .tabs.is-toggle li.is-active a:hover{background-color:#fff;border-color:#fff;color:#1abc9c}html.theme--documenter-dark .hero.is-link.is-bold{background-image:linear-gradient(141deg, #0c9764 0%, #1abc9c 71%, #17d8d2 100%)}@media screen and (max-width: 768px){html.theme--documenter-dark .hero.is-link.is-bold .navbar-menu{background-image:linear-gradient(141deg, #0c9764 0%, #1abc9c 71%, #17d8d2 100%)}}html.theme--documenter-dark .hero.is-info{background-color:#024c7d;color:#fff}html.theme--documenter-dark .hero.is-info a:not(.button):not(.dropdown-item):not(.tag):not(.pagination-link.is-current),html.theme--documenter-dark .hero.is-info strong{color:inherit}html.theme--documenter-dark .hero.is-info .title{color:#fff}html.theme--documenter-dark .hero.is-info .subtitle{color:rgba(255,255,255,0.9)}html.theme--documenter-dark .hero.is-info .subtitle a:not(.button),html.theme--documenter-dark .hero.is-info .subtitle strong{color:#fff}@media screen and (max-width: 1055px){html.theme--documenter-dark .hero.is-info .navbar-menu{background-color:#024c7d}}html.theme--documenter-dark .hero.is-info .navbar-item,html.theme--documenter-dark .hero.is-info .navbar-link{color:rgba(255,255,255,0.7)}html.theme--documenter-dark .hero.is-info a.navbar-item:hover,html.theme--documenter-dark .hero.is-info a.navbar-item.is-active,html.theme--documenter-dark .hero.is-info .navbar-link:hover,html.theme--documenter-dark .hero.is-info .navbar-link.is-active{background-color:#023d64;color:#fff}html.theme--documenter-dark .hero.is-info .tabs a{color:#fff;opacity:0.9}html.theme--documenter-dark .hero.is-info .tabs a:hover{opacity:1}html.theme--documenter-dark .hero.is-info .tabs li.is-active a{color:#024c7d !important;opacity:1}html.theme--documenter-dark .hero.is-info .tabs.is-boxed a,html.theme--documenter-dark .hero.is-info .tabs.is-toggle a{color:#fff}html.theme--documenter-dark .hero.is-info .tabs.is-boxed a:hover,html.theme--documenter-dark .hero.is-info .tabs.is-toggle a:hover{background-color:rgba(10,10,10,0.1)}html.theme--documenter-dark .hero.is-info .tabs.is-boxed li.is-active a,html.theme--documenter-dark .hero.is-info .tabs.is-boxed li.is-active a:hover,html.theme--documenter-dark .hero.is-info .tabs.is-toggle li.is-active a,html.theme--documenter-dark .hero.is-info .tabs.is-toggle li.is-active a:hover{background-color:#fff;border-color:#fff;color:#024c7d}html.theme--documenter-dark .hero.is-info.is-bold{background-image:linear-gradient(141deg, #003a4c 0%, #024c7d 71%, #004299 100%)}@media screen and (max-width: 768px){html.theme--documenter-dark .hero.is-info.is-bold .navbar-menu{background-image:linear-gradient(141deg, #003a4c 0%, #024c7d 71%, #004299 100%)}}html.theme--documenter-dark .hero.is-success{background-color:#008438;color:#fff}html.theme--documenter-dark .hero.is-success a:not(.button):not(.dropdown-item):not(.tag):not(.pagination-link.is-current),html.theme--documenter-dark .hero.is-success strong{color:inherit}html.theme--documenter-dark .hero.is-success .title{color:#fff}html.theme--documenter-dark .hero.is-success .subtitle{color:rgba(255,255,255,0.9)}html.theme--documenter-dark .hero.is-success .subtitle a:not(.button),html.theme--documenter-dark .hero.is-success .subtitle strong{color:#fff}@media screen and (max-width: 1055px){html.theme--documenter-dark .hero.is-success .navbar-menu{background-color:#008438}}html.theme--documenter-dark .hero.is-success .navbar-item,html.theme--documenter-dark .hero.is-success .navbar-link{color:rgba(255,255,255,0.7)}html.theme--documenter-dark .hero.is-success a.navbar-item:hover,html.theme--documenter-dark .hero.is-success a.navbar-item.is-active,html.theme--documenter-dark .hero.is-success .navbar-link:hover,html.theme--documenter-dark .hero.is-success .navbar-link.is-active{background-color:#006b2d;color:#fff}html.theme--documenter-dark .hero.is-success .tabs a{color:#fff;opacity:0.9}html.theme--documenter-dark .hero.is-success .tabs a:hover{opacity:1}html.theme--documenter-dark .hero.is-success .tabs li.is-active a{color:#008438 !important;opacity:1}html.theme--documenter-dark .hero.is-success .tabs.is-boxed a,html.theme--documenter-dark .hero.is-success .tabs.is-toggle a{color:#fff}html.theme--documenter-dark .hero.is-success .tabs.is-boxed a:hover,html.theme--documenter-dark .hero.is-success .tabs.is-toggle a:hover{background-color:rgba(10,10,10,0.1)}html.theme--documenter-dark .hero.is-success .tabs.is-boxed li.is-active a,html.theme--documenter-dark .hero.is-success .tabs.is-boxed li.is-active a:hover,html.theme--documenter-dark .hero.is-success .tabs.is-toggle li.is-active a,html.theme--documenter-dark .hero.is-success .tabs.is-toggle li.is-active a:hover{background-color:#fff;border-color:#fff;color:#008438}html.theme--documenter-dark .hero.is-success.is-bold{background-image:linear-gradient(141deg, #005115 0%, #008438 71%, #009e5d 100%)}@media screen and (max-width: 768px){html.theme--documenter-dark .hero.is-success.is-bold .navbar-menu{background-image:linear-gradient(141deg, #005115 0%, #008438 71%, #009e5d 100%)}}html.theme--documenter-dark .hero.is-warning{background-color:#ad8100;color:#fff}html.theme--documenter-dark .hero.is-warning a:not(.button):not(.dropdown-item):not(.tag):not(.pagination-link.is-current),html.theme--documenter-dark .hero.is-warning strong{color:inherit}html.theme--documenter-dark .hero.is-warning .title{color:#fff}html.theme--documenter-dark .hero.is-warning .subtitle{color:rgba(255,255,255,0.9)}html.theme--documenter-dark .hero.is-warning .subtitle a:not(.button),html.theme--documenter-dark .hero.is-warning .subtitle strong{color:#fff}@media screen and (max-width: 1055px){html.theme--documenter-dark .hero.is-warning .navbar-menu{background-color:#ad8100}}html.theme--documenter-dark .hero.is-warning .navbar-item,html.theme--documenter-dark .hero.is-warning .navbar-link{color:rgba(255,255,255,0.7)}html.theme--documenter-dark .hero.is-warning a.navbar-item:hover,html.theme--documenter-dark .hero.is-warning a.navbar-item.is-active,html.theme--documenter-dark .hero.is-warning .navbar-link:hover,html.theme--documenter-dark .hero.is-warning .navbar-link.is-active{background-color:#946e00;color:#fff}html.theme--documenter-dark .hero.is-warning .tabs a{color:#fff;opacity:0.9}html.theme--documenter-dark .hero.is-warning .tabs a:hover{opacity:1}html.theme--documenter-dark .hero.is-warning .tabs li.is-active a{color:#ad8100 !important;opacity:1}html.theme--documenter-dark .hero.is-warning .tabs.is-boxed a,html.theme--documenter-dark .hero.is-warning .tabs.is-toggle a{color:#fff}html.theme--documenter-dark .hero.is-warning .tabs.is-boxed a:hover,html.theme--documenter-dark .hero.is-warning .tabs.is-toggle a:hover{background-color:rgba(10,10,10,0.1)}html.theme--documenter-dark .hero.is-warning .tabs.is-boxed li.is-active a,html.theme--documenter-dark .hero.is-warning .tabs.is-boxed li.is-active a:hover,html.theme--documenter-dark .hero.is-warning .tabs.is-toggle li.is-active a,html.theme--documenter-dark .hero.is-warning .tabs.is-toggle li.is-active a:hover{background-color:#fff;border-color:#fff;color:#ad8100}html.theme--documenter-dark .hero.is-warning.is-bold{background-image:linear-gradient(141deg, #7a4700 0%, #ad8100 71%, #c7b500 100%)}@media screen and (max-width: 768px){html.theme--documenter-dark .hero.is-warning.is-bold .navbar-menu{background-image:linear-gradient(141deg, #7a4700 0%, #ad8100 71%, #c7b500 100%)}}html.theme--documenter-dark .hero.is-danger{background-color:#9e1b0d;color:#fff}html.theme--documenter-dark .hero.is-danger a:not(.button):not(.dropdown-item):not(.tag):not(.pagination-link.is-current),html.theme--documenter-dark .hero.is-danger strong{color:inherit}html.theme--documenter-dark .hero.is-danger .title{color:#fff}html.theme--documenter-dark .hero.is-danger .subtitle{color:rgba(255,255,255,0.9)}html.theme--documenter-dark .hero.is-danger .subtitle a:not(.button),html.theme--documenter-dark .hero.is-danger .subtitle strong{color:#fff}@media screen and (max-width: 1055px){html.theme--documenter-dark .hero.is-danger .navbar-menu{background-color:#9e1b0d}}html.theme--documenter-dark .hero.is-danger .navbar-item,html.theme--documenter-dark .hero.is-danger .navbar-link{color:rgba(255,255,255,0.7)}html.theme--documenter-dark .hero.is-danger a.navbar-item:hover,html.theme--documenter-dark .hero.is-danger a.navbar-item.is-active,html.theme--documenter-dark .hero.is-danger .navbar-link:hover,html.theme--documenter-dark .hero.is-danger .navbar-link.is-active{background-color:#86170b;color:#fff}html.theme--documenter-dark .hero.is-danger .tabs a{color:#fff;opacity:0.9}html.theme--documenter-dark .hero.is-danger .tabs a:hover{opacity:1}html.theme--documenter-dark .hero.is-danger .tabs li.is-active a{color:#9e1b0d !important;opacity:1}html.theme--documenter-dark .hero.is-danger .tabs.is-boxed a,html.theme--documenter-dark .hero.is-danger .tabs.is-toggle a{color:#fff}html.theme--documenter-dark .hero.is-danger .tabs.is-boxed a:hover,html.theme--documenter-dark .hero.is-danger .tabs.is-toggle a:hover{background-color:rgba(10,10,10,0.1)}html.theme--documenter-dark .hero.is-danger .tabs.is-boxed li.is-active a,html.theme--documenter-dark .hero.is-danger .tabs.is-boxed li.is-active a:hover,html.theme--documenter-dark .hero.is-danger .tabs.is-toggle li.is-active a,html.theme--documenter-dark .hero.is-danger .tabs.is-toggle li.is-active a:hover{background-color:#fff;border-color:#fff;color:#9e1b0d}html.theme--documenter-dark .hero.is-danger.is-bold{background-image:linear-gradient(141deg, #75030b 0%, #9e1b0d 71%, #ba380a 100%)}@media screen and (max-width: 768px){html.theme--documenter-dark .hero.is-danger.is-bold .navbar-menu{background-image:linear-gradient(141deg, #75030b 0%, #9e1b0d 71%, #ba380a 100%)}}html.theme--documenter-dark .hero.is-small .hero-body,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.hero .hero-body{padding:1.5rem}@media screen and (min-width: 769px),print{html.theme--documenter-dark .hero.is-medium .hero-body{padding:9rem 4.5rem}}@media screen and (min-width: 769px),print{html.theme--documenter-dark .hero.is-large .hero-body{padding:18rem 6rem}}html.theme--documenter-dark .hero.is-halfheight .hero-body,html.theme--documenter-dark .hero.is-fullheight .hero-body,html.theme--documenter-dark .hero.is-fullheight-with-navbar .hero-body{align-items:center;display:flex}html.theme--documenter-dark .hero.is-halfheight .hero-body>.container,html.theme--documenter-dark .hero.is-fullheight .hero-body>.container,html.theme--documenter-dark .hero.is-fullheight-with-navbar .hero-body>.container{flex-grow:1;flex-shrink:1}html.theme--documenter-dark .hero.is-halfheight{min-height:50vh}html.theme--documenter-dark .hero.is-fullheight{min-height:100vh}html.theme--documenter-dark .hero-video{overflow:hidden}html.theme--documenter-dark .hero-video video{left:50%;min-height:100%;min-width:100%;position:absolute;top:50%;transform:translate3d(-50%, -50%, 0)}html.theme--documenter-dark .hero-video.is-transparent{opacity:0.3}@media screen and (max-width: 768px){html.theme--documenter-dark .hero-video{display:none}}html.theme--documenter-dark .hero-buttons{margin-top:1.5rem}@media screen and (max-width: 768px){html.theme--documenter-dark .hero-buttons .button{display:flex}html.theme--documenter-dark .hero-buttons .button:not(:last-child){margin-bottom:0.75rem}}@media screen and (min-width: 769px),print{html.theme--documenter-dark .hero-buttons{display:flex;justify-content:center}html.theme--documenter-dark .hero-buttons .button:not(:last-child){margin-right:1.5rem}}html.theme--documenter-dark .hero-head,html.theme--documenter-dark .hero-foot{flex-grow:0;flex-shrink:0}html.theme--documenter-dark .hero-body{flex-grow:1;flex-shrink:0;padding:3rem 1.5rem}@media screen and (min-width: 769px),print{html.theme--documenter-dark .hero-body{padding:3rem 3rem}}html.theme--documenter-dark .section{padding:3rem 1.5rem}@media screen and (min-width: 1056px){html.theme--documenter-dark .section{padding:3rem 3rem}html.theme--documenter-dark .section.is-medium{padding:9rem 4.5rem}html.theme--documenter-dark .section.is-large{padding:18rem 6rem}}html.theme--documenter-dark .footer{background-color:#282f2f;padding:3rem 1.5rem 6rem}html.theme--documenter-dark hr{height:1px}html.theme--documenter-dark h6{text-transform:uppercase;letter-spacing:0.5px}html.theme--documenter-dark .hero{background-color:#343c3d}html.theme--documenter-dark a{transition:all 200ms ease}html.theme--documenter-dark .button{transition:all 200ms ease;border-width:1px;color:#fff}html.theme--documenter-dark .button.is-active,html.theme--documenter-dark .button.is-focused,html.theme--documenter-dark .button:active,html.theme--documenter-dark .button:focus{box-shadow:0 0 0 2px rgba(140,155,157,0.5)}html.theme--documenter-dark .button.is-white.is-hovered,html.theme--documenter-dark .button.is-white:hover{background-color:#fff}html.theme--documenter-dark .button.is-white.is-active,html.theme--documenter-dark .button.is-white.is-focused,html.theme--documenter-dark .button.is-white:active,html.theme--documenter-dark .button.is-white:focus{border-color:#fff;box-shadow:0 0 0 2px rgba(255,255,255,0.5)}html.theme--documenter-dark .button.is-black.is-hovered,html.theme--documenter-dark .button.is-black:hover{background-color:#1d1d1d}html.theme--documenter-dark .button.is-black.is-active,html.theme--documenter-dark .button.is-black.is-focused,html.theme--documenter-dark .button.is-black:active,html.theme--documenter-dark .button.is-black:focus{border-color:#0a0a0a;box-shadow:0 0 0 2px rgba(10,10,10,0.5)}html.theme--documenter-dark .button.is-light.is-hovered,html.theme--documenter-dark .button.is-light:hover{background-color:#fff}html.theme--documenter-dark .button.is-light.is-active,html.theme--documenter-dark .button.is-light.is-focused,html.theme--documenter-dark .button.is-light:active,html.theme--documenter-dark .button.is-light:focus{border-color:#ecf0f1;box-shadow:0 0 0 2px rgba(236,240,241,0.5)}html.theme--documenter-dark .button.is-dark.is-hovered,html.theme--documenter-dark .content kbd.button.is-hovered,html.theme--documenter-dark .button.is-dark:hover,html.theme--documenter-dark .content kbd.button:hover{background-color:#3a4344}html.theme--documenter-dark .button.is-dark.is-active,html.theme--documenter-dark .content kbd.button.is-active,html.theme--documenter-dark .button.is-dark.is-focused,html.theme--documenter-dark .content kbd.button.is-focused,html.theme--documenter-dark .button.is-dark:active,html.theme--documenter-dark .content kbd.button:active,html.theme--documenter-dark .button.is-dark:focus,html.theme--documenter-dark .content kbd.button:focus{border-color:#282f2f;box-shadow:0 0 0 2px rgba(40,47,47,0.5)}html.theme--documenter-dark .button.is-primary.is-hovered,html.theme--documenter-dark .docstring>section>a.button.is-hovered.docs-sourcelink,html.theme--documenter-dark .button.is-primary:hover,html.theme--documenter-dark .docstring>section>a.button.docs-sourcelink:hover{background-color:#436d9a}html.theme--documenter-dark .button.is-primary.is-active,html.theme--documenter-dark .docstring>section>a.button.is-active.docs-sourcelink,html.theme--documenter-dark .button.is-primary.is-focused,html.theme--documenter-dark .docstring>section>a.button.is-focused.docs-sourcelink,html.theme--documenter-dark .button.is-primary:active,html.theme--documenter-dark .docstring>section>a.button.docs-sourcelink:active,html.theme--documenter-dark .button.is-primary:focus,html.theme--documenter-dark .docstring>section>a.button.docs-sourcelink:focus{border-color:#375a7f;box-shadow:0 0 0 2px rgba(55,90,127,0.5)}html.theme--documenter-dark .button.is-link.is-hovered,html.theme--documenter-dark .button.is-link:hover{background-color:#1fdeb8}html.theme--documenter-dark .button.is-link.is-active,html.theme--documenter-dark .button.is-link.is-focused,html.theme--documenter-dark .button.is-link:active,html.theme--documenter-dark .button.is-link:focus{border-color:#1abc9c;box-shadow:0 0 0 2px rgba(26,188,156,0.5)}html.theme--documenter-dark .button.is-info.is-hovered,html.theme--documenter-dark .button.is-info:hover{background-color:#0363a3}html.theme--documenter-dark .button.is-info.is-active,html.theme--documenter-dark .button.is-info.is-focused,html.theme--documenter-dark .button.is-info:active,html.theme--documenter-dark .button.is-info:focus{border-color:#024c7d;box-shadow:0 0 0 2px rgba(2,76,125,0.5)}html.theme--documenter-dark .button.is-success.is-hovered,html.theme--documenter-dark .button.is-success:hover{background-color:#00aa48}html.theme--documenter-dark .button.is-success.is-active,html.theme--documenter-dark .button.is-success.is-focused,html.theme--documenter-dark .button.is-success:active,html.theme--documenter-dark .button.is-success:focus{border-color:#008438;box-shadow:0 0 0 2px rgba(0,132,56,0.5)}html.theme--documenter-dark .button.is-warning.is-hovered,html.theme--documenter-dark .button.is-warning:hover{background-color:#d39e00}html.theme--documenter-dark .button.is-warning.is-active,html.theme--documenter-dark .button.is-warning.is-focused,html.theme--documenter-dark .button.is-warning:active,html.theme--documenter-dark .button.is-warning:focus{border-color:#ad8100;box-shadow:0 0 0 2px rgba(173,129,0,0.5)}html.theme--documenter-dark .button.is-danger.is-hovered,html.theme--documenter-dark .button.is-danger:hover{background-color:#c12110}html.theme--documenter-dark .button.is-danger.is-active,html.theme--documenter-dark .button.is-danger.is-focused,html.theme--documenter-dark .button.is-danger:active,html.theme--documenter-dark .button.is-danger:focus{border-color:#9e1b0d;box-shadow:0 0 0 2px rgba(158,27,13,0.5)}html.theme--documenter-dark .label{color:#dbdee0}html.theme--documenter-dark .button,html.theme--documenter-dark .control.has-icons-left .icon,html.theme--documenter-dark .control.has-icons-right .icon,html.theme--documenter-dark .input,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input,html.theme--documenter-dark .pagination-ellipsis,html.theme--documenter-dark .pagination-link,html.theme--documenter-dark .pagination-next,html.theme--documenter-dark .pagination-previous,html.theme--documenter-dark .select,html.theme--documenter-dark .select select,html.theme--documenter-dark .textarea{height:2.5em}html.theme--documenter-dark .input,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input,html.theme--documenter-dark .textarea{transition:all 200ms ease;box-shadow:none;border-width:1px;padding-left:1em;padding-right:1em}html.theme--documenter-dark .select:after,html.theme--documenter-dark .select select{border-width:1px}html.theme--documenter-dark .control.has-addons .button,html.theme--documenter-dark .control.has-addons .input,html.theme--documenter-dark .control.has-addons #documenter .docs-sidebar form.docs-search>input,html.theme--documenter-dark #documenter .docs-sidebar .control.has-addons form.docs-search>input,html.theme--documenter-dark .control.has-addons .select{margin-right:-1px}html.theme--documenter-dark .notification{background-color:#343c3d}html.theme--documenter-dark .card{box-shadow:none;border:1px solid #343c3d;background-color:#282f2f;border-radius:.4em}html.theme--documenter-dark .card .card-image img{border-radius:.4em .4em 0 0}html.theme--documenter-dark .card .card-header{box-shadow:none;background-color:rgba(18,18,18,0.2);border-radius:.4em .4em 0 0}html.theme--documenter-dark .card .card-footer{background-color:rgba(18,18,18,0.2)}html.theme--documenter-dark .card .card-footer,html.theme--documenter-dark .card .card-footer-item{border-width:1px;border-color:#343c3d}html.theme--documenter-dark .notification.is-white a:not(.button){color:#0a0a0a;text-decoration:underline}html.theme--documenter-dark .notification.is-black a:not(.button){color:#fff;text-decoration:underline}html.theme--documenter-dark .notification.is-light a:not(.button){color:rgba(0,0,0,0.7);text-decoration:underline}html.theme--documenter-dark .notification.is-dark a:not(.button),html.theme--documenter-dark .content kbd.notification a:not(.button){color:#fff;text-decoration:underline}html.theme--documenter-dark .notification.is-primary a:not(.button),html.theme--documenter-dark .docstring>section>a.notification.docs-sourcelink a:not(.button){color:#fff;text-decoration:underline}html.theme--documenter-dark .notification.is-link a:not(.button){color:#fff;text-decoration:underline}html.theme--documenter-dark .notification.is-info a:not(.button){color:#fff;text-decoration:underline}html.theme--documenter-dark .notification.is-success a:not(.button){color:#fff;text-decoration:underline}html.theme--documenter-dark .notification.is-warning a:not(.button){color:#fff;text-decoration:underline}html.theme--documenter-dark .notification.is-danger a:not(.button){color:#fff;text-decoration:underline}html.theme--documenter-dark .tag,html.theme--documenter-dark .content kbd,html.theme--documenter-dark .docstring>section>a.docs-sourcelink{border-radius:.4em}html.theme--documenter-dark .menu-list a{transition:all 300ms ease}html.theme--documenter-dark .modal-card-body{background-color:#282f2f}html.theme--documenter-dark .modal-card-foot,html.theme--documenter-dark .modal-card-head{border-color:#343c3d}html.theme--documenter-dark .message-header{font-weight:700;background-color:#343c3d;color:#fff}html.theme--documenter-dark .message-body{border-width:1px;border-color:#343c3d}html.theme--documenter-dark .navbar{border-radius:.4em}html.theme--documenter-dark .navbar.is-transparent{background:none}html.theme--documenter-dark .navbar.is-primary .navbar-dropdown a.navbar-item.is-active,html.theme--documenter-dark .docstring>section>a.navbar.docs-sourcelink .navbar-dropdown a.navbar-item.is-active{background-color:#1abc9c}@media screen and (max-width: 1055px){html.theme--documenter-dark .navbar .navbar-menu{background-color:#375a7f;border-radius:0 0 .4em .4em}}html.theme--documenter-dark .hero .navbar,html.theme--documenter-dark body>.navbar{border-radius:0}html.theme--documenter-dark .pagination-link,html.theme--documenter-dark .pagination-next,html.theme--documenter-dark .pagination-previous{border-width:1px}html.theme--documenter-dark .panel-block,html.theme--documenter-dark .panel-heading,html.theme--documenter-dark .panel-tabs{border-width:1px}html.theme--documenter-dark .panel-block:first-child,html.theme--documenter-dark .panel-heading:first-child,html.theme--documenter-dark .panel-tabs:first-child{border-top-width:1px}html.theme--documenter-dark .panel-heading{font-weight:700}html.theme--documenter-dark .panel-tabs a{border-width:1px;margin-bottom:-1px}html.theme--documenter-dark .panel-tabs a.is-active{border-bottom-color:#17a689}html.theme--documenter-dark .panel-block:hover{color:#1dd2af}html.theme--documenter-dark .panel-block:hover .panel-icon{color:#1dd2af}html.theme--documenter-dark .panel-block.is-active .panel-icon{color:#17a689}html.theme--documenter-dark .tabs a{border-bottom-width:1px;margin-bottom:-1px}html.theme--documenter-dark .tabs ul{border-bottom-width:1px}html.theme--documenter-dark .tabs.is-boxed a{border-width:1px}html.theme--documenter-dark .tabs.is-boxed li.is-active a{background-color:#1f2424}html.theme--documenter-dark .tabs.is-toggle li a{border-width:1px;margin-bottom:0}html.theme--documenter-dark .tabs.is-toggle li+li{margin-left:-1px}html.theme--documenter-dark .hero.is-white .navbar .navbar-dropdown .navbar-item:hover{background-color:rgba(0,0,0,0)}html.theme--documenter-dark .hero.is-black .navbar .navbar-dropdown .navbar-item:hover{background-color:rgba(0,0,0,0)}html.theme--documenter-dark .hero.is-light .navbar .navbar-dropdown .navbar-item:hover{background-color:rgba(0,0,0,0)}html.theme--documenter-dark .hero.is-dark .navbar .navbar-dropdown .navbar-item:hover,html.theme--documenter-dark .content kbd.hero .navbar .navbar-dropdown .navbar-item:hover{background-color:rgba(0,0,0,0)}html.theme--documenter-dark .hero.is-primary .navbar .navbar-dropdown .navbar-item:hover,html.theme--documenter-dark .docstring>section>a.hero.docs-sourcelink .navbar .navbar-dropdown .navbar-item:hover{background-color:rgba(0,0,0,0)}html.theme--documenter-dark .hero.is-link .navbar .navbar-dropdown .navbar-item:hover{background-color:rgba(0,0,0,0)}html.theme--documenter-dark .hero.is-info .navbar .navbar-dropdown .navbar-item:hover{background-color:rgba(0,0,0,0)}html.theme--documenter-dark .hero.is-success .navbar .navbar-dropdown .navbar-item:hover{background-color:rgba(0,0,0,0)}html.theme--documenter-dark .hero.is-warning .navbar .navbar-dropdown .navbar-item:hover{background-color:rgba(0,0,0,0)}html.theme--documenter-dark .hero.is-danger .navbar .navbar-dropdown .navbar-item:hover{background-color:rgba(0,0,0,0)}html.theme--documenter-dark h1 .docs-heading-anchor,html.theme--documenter-dark h1 .docs-heading-anchor:hover,html.theme--documenter-dark h1 .docs-heading-anchor:visited,html.theme--documenter-dark h2 .docs-heading-anchor,html.theme--documenter-dark h2 .docs-heading-anchor:hover,html.theme--documenter-dark h2 .docs-heading-anchor:visited,html.theme--documenter-dark h3 .docs-heading-anchor,html.theme--documenter-dark h3 .docs-heading-anchor:hover,html.theme--documenter-dark h3 .docs-heading-anchor:visited,html.theme--documenter-dark h4 .docs-heading-anchor,html.theme--documenter-dark h4 .docs-heading-anchor:hover,html.theme--documenter-dark h4 .docs-heading-anchor:visited,html.theme--documenter-dark h5 .docs-heading-anchor,html.theme--documenter-dark h5 .docs-heading-anchor:hover,html.theme--documenter-dark h5 .docs-heading-anchor:visited,html.theme--documenter-dark h6 .docs-heading-anchor,html.theme--documenter-dark h6 .docs-heading-anchor:hover,html.theme--documenter-dark h6 .docs-heading-anchor:visited{color:#f2f2f2}html.theme--documenter-dark h1 .docs-heading-anchor-permalink,html.theme--documenter-dark h2 .docs-heading-anchor-permalink,html.theme--documenter-dark h3 .docs-heading-anchor-permalink,html.theme--documenter-dark h4 .docs-heading-anchor-permalink,html.theme--documenter-dark h5 .docs-heading-anchor-permalink,html.theme--documenter-dark h6 .docs-heading-anchor-permalink{visibility:hidden;vertical-align:middle;margin-left:0.5em;font-size:0.7rem}html.theme--documenter-dark h1 .docs-heading-anchor-permalink::before,html.theme--documenter-dark h2 .docs-heading-anchor-permalink::before,html.theme--documenter-dark h3 .docs-heading-anchor-permalink::before,html.theme--documenter-dark h4 .docs-heading-anchor-permalink::before,html.theme--documenter-dark h5 .docs-heading-anchor-permalink::before,html.theme--documenter-dark h6 .docs-heading-anchor-permalink::before{font-family:"Font Awesome 6 Free";font-weight:900;content:"\f0c1"}html.theme--documenter-dark h1:hover .docs-heading-anchor-permalink,html.theme--documenter-dark h2:hover .docs-heading-anchor-permalink,html.theme--documenter-dark h3:hover .docs-heading-anchor-permalink,html.theme--documenter-dark h4:hover .docs-heading-anchor-permalink,html.theme--documenter-dark h5:hover .docs-heading-anchor-permalink,html.theme--documenter-dark h6:hover .docs-heading-anchor-permalink{visibility:visible}html.theme--documenter-dark .docs-light-only{display:none !important}html.theme--documenter-dark pre{position:relative;overflow:hidden}html.theme--documenter-dark pre code,html.theme--documenter-dark pre code.hljs{padding:0 .75rem !important;overflow:auto;display:block}html.theme--documenter-dark pre code:first-of-type,html.theme--documenter-dark pre code.hljs:first-of-type{padding-top:0.5rem !important}html.theme--documenter-dark pre code:last-of-type,html.theme--documenter-dark pre code.hljs:last-of-type{padding-bottom:0.5rem !important}html.theme--documenter-dark pre .copy-button{opacity:0.2;transition:opacity 0.2s;position:absolute;right:0em;top:0em;padding:0.5em;width:2.5em;height:2.5em;background:transparent;border:none;font-family:"Font Awesome 6 Free";color:#fff;cursor:pointer;text-align:center}html.theme--documenter-dark pre .copy-button:focus,html.theme--documenter-dark pre .copy-button:hover{opacity:1;background:rgba(255,255,255,0.1);color:#1abc9c}html.theme--documenter-dark pre .copy-button.success{color:#259a12;opacity:1}html.theme--documenter-dark pre .copy-button.error{color:#cb3c33;opacity:1}html.theme--documenter-dark pre:hover .copy-button{opacity:1}html.theme--documenter-dark .admonition{background-color:#282f2f;border-style:solid;border-width:1px;border-color:#5e6d6f;border-radius:.4em;font-size:1rem}html.theme--documenter-dark .admonition strong{color:currentColor}html.theme--documenter-dark .admonition.is-small,html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input.admonition{font-size:.75rem}html.theme--documenter-dark .admonition.is-medium{font-size:1.25rem}html.theme--documenter-dark .admonition.is-large{font-size:1.5rem}html.theme--documenter-dark .admonition.is-default{background-color:#282f2f;border-color:#5e6d6f}html.theme--documenter-dark .admonition.is-default>.admonition-header{background-color:#5e6d6f;color:#fff}html.theme--documenter-dark .admonition.is-default>.admonition-body{color:#fff}html.theme--documenter-dark .admonition.is-info{background-color:#282f2f;border-color:#024c7d}html.theme--documenter-dark .admonition.is-info>.admonition-header{background-color:#024c7d;color:#fff}html.theme--documenter-dark .admonition.is-info>.admonition-body{color:#fff}html.theme--documenter-dark .admonition.is-success{background-color:#282f2f;border-color:#008438}html.theme--documenter-dark .admonition.is-success>.admonition-header{background-color:#008438;color:#fff}html.theme--documenter-dark .admonition.is-success>.admonition-body{color:#fff}html.theme--documenter-dark .admonition.is-warning{background-color:#282f2f;border-color:#ad8100}html.theme--documenter-dark .admonition.is-warning>.admonition-header{background-color:#ad8100;color:#fff}html.theme--documenter-dark .admonition.is-warning>.admonition-body{color:#fff}html.theme--documenter-dark .admonition.is-danger{background-color:#282f2f;border-color:#9e1b0d}html.theme--documenter-dark .admonition.is-danger>.admonition-header{background-color:#9e1b0d;color:#fff}html.theme--documenter-dark .admonition.is-danger>.admonition-body{color:#fff}html.theme--documenter-dark .admonition.is-compat{background-color:#282f2f;border-color:#137886}html.theme--documenter-dark .admonition.is-compat>.admonition-header{background-color:#137886;color:#fff}html.theme--documenter-dark .admonition.is-compat>.admonition-body{color:#fff}html.theme--documenter-dark .admonition-header{color:#fff;background-color:#5e6d6f;align-items:center;font-weight:700;justify-content:space-between;line-height:1.25;padding:0.5rem .75rem;position:relative}html.theme--documenter-dark .admonition-header:before{font-family:"Font Awesome 6 Free";font-weight:900;margin-right:.75rem;content:"\f06a"}html.theme--documenter-dark details.admonition.is-details>.admonition-header{list-style:none}html.theme--documenter-dark details.admonition.is-details>.admonition-header:before{font-family:"Font Awesome 6 Free";font-weight:900;content:"\f055"}html.theme--documenter-dark details.admonition.is-details[open]>.admonition-header:before{font-family:"Font Awesome 6 Free";font-weight:900;content:"\f056"}html.theme--documenter-dark .admonition-body{color:#fff;padding:0.5rem .75rem}html.theme--documenter-dark .admonition-body pre{background-color:#282f2f}html.theme--documenter-dark .admonition-body code{background-color:rgba(255,255,255,0.05)}html.theme--documenter-dark .docstring{margin-bottom:1em;background-color:rgba(0,0,0,0);border:1px solid #5e6d6f;box-shadow:none;max-width:100%}html.theme--documenter-dark .docstring>header{cursor:pointer;display:flex;flex-grow:1;align-items:stretch;padding:0.5rem .75rem;background-color:#282f2f;box-shadow:0 0.125em 0.25em rgba(10,10,10,0.1);box-shadow:none;border-bottom:1px solid #5e6d6f;overflow:auto}html.theme--documenter-dark .docstring>header code{background-color:transparent}html.theme--documenter-dark .docstring>header .docstring-article-toggle-button{min-width:1.1rem;padding:0.2rem 0.2rem 0.2rem 0}html.theme--documenter-dark .docstring>header .docstring-binding{margin-right:0.3em}html.theme--documenter-dark .docstring>header .docstring-category{margin-left:0.3em}html.theme--documenter-dark .docstring>section{position:relative;padding:.75rem .75rem;border-bottom:1px solid #5e6d6f}html.theme--documenter-dark .docstring>section:last-child{border-bottom:none}html.theme--documenter-dark .docstring>section>a.docs-sourcelink{transition:opacity 0.3s;opacity:0;position:absolute;right:.375rem;bottom:.375rem}html.theme--documenter-dark .docstring>section>a.docs-sourcelink:focus{opacity:1 !important}html.theme--documenter-dark .docstring:hover>section>a.docs-sourcelink{opacity:0.2}html.theme--documenter-dark .docstring:focus-within>section>a.docs-sourcelink{opacity:0.2}html.theme--documenter-dark .docstring>section:hover a.docs-sourcelink{opacity:1}html.theme--documenter-dark .documenter-example-output{background-color:#1f2424}html.theme--documenter-dark .outdated-warning-overlay{position:fixed;top:0;left:0;right:0;box-shadow:0 0 10px rgba(0,0,0,0.3);z-index:999;background-color:#282f2f;color:#fff;border-bottom:3px solid #9e1b0d;padding:10px 35px;text-align:center;font-size:15px}html.theme--documenter-dark .outdated-warning-overlay .outdated-warning-closer{position:absolute;top:calc(50% - 10px);right:18px;cursor:pointer;width:12px}html.theme--documenter-dark .outdated-warning-overlay a{color:#1abc9c}html.theme--documenter-dark .outdated-warning-overlay a:hover{color:#1dd2af}html.theme--documenter-dark .content pre{border:1px solid #5e6d6f}html.theme--documenter-dark .content code{font-weight:inherit}html.theme--documenter-dark .content a code{color:#1abc9c}html.theme--documenter-dark .content h1 code,html.theme--documenter-dark .content h2 code,html.theme--documenter-dark .content h3 code,html.theme--documenter-dark .content h4 code,html.theme--documenter-dark .content h5 code,html.theme--documenter-dark .content h6 code{color:#f2f2f2}html.theme--documenter-dark .content table{display:block;width:initial;max-width:100%;overflow-x:auto}html.theme--documenter-dark .content blockquote>ul:first-child,html.theme--documenter-dark .content blockquote>ol:first-child,html.theme--documenter-dark .content .admonition-body>ul:first-child,html.theme--documenter-dark .content .admonition-body>ol:first-child{margin-top:0}html.theme--documenter-dark pre,html.theme--documenter-dark code{font-variant-ligatures:no-contextual}html.theme--documenter-dark .breadcrumb a.is-disabled{cursor:default;pointer-events:none}html.theme--documenter-dark .breadcrumb a.is-disabled,html.theme--documenter-dark .breadcrumb a.is-disabled:hover{color:#f2f2f2}html.theme--documenter-dark .hljs{background:initial !important}html.theme--documenter-dark .katex .katex-mathml{top:0;right:0}html.theme--documenter-dark .katex-display,html.theme--documenter-dark mjx-container,html.theme--documenter-dark .MathJax_Display{margin:0.5em 0 !important}html.theme--documenter-dark html{-moz-osx-font-smoothing:auto;-webkit-font-smoothing:auto}html.theme--documenter-dark li.no-marker{list-style:none}html.theme--documenter-dark #documenter .docs-main>article{overflow-wrap:break-word}html.theme--documenter-dark #documenter .docs-main>article .math-container{overflow-x:auto;overflow-y:hidden}@media screen and (min-width: 1056px){html.theme--documenter-dark #documenter .docs-main{max-width:52rem;margin-left:20rem;padding-right:1rem}}@media screen and (max-width: 1055px){html.theme--documenter-dark #documenter .docs-main{width:100%}html.theme--documenter-dark #documenter .docs-main>article{max-width:52rem;margin-left:auto;margin-right:auto;margin-bottom:1rem;padding:0 1rem}html.theme--documenter-dark #documenter .docs-main>header,html.theme--documenter-dark #documenter .docs-main>nav{max-width:100%;width:100%;margin:0}}html.theme--documenter-dark #documenter .docs-main header.docs-navbar{background-color:#1f2424;border-bottom:1px solid #5e6d6f;z-index:2;min-height:4rem;margin-bottom:1rem;display:flex}html.theme--documenter-dark #documenter .docs-main header.docs-navbar .breadcrumb{flex-grow:1;overflow-x:hidden}html.theme--documenter-dark #documenter .docs-main header.docs-navbar .docs-sidebar-button{display:block;font-size:1.5rem;padding-bottom:0.1rem;margin-right:1rem}html.theme--documenter-dark #documenter .docs-main header.docs-navbar .docs-right{display:flex;white-space:nowrap;gap:1rem;align-items:center}html.theme--documenter-dark #documenter .docs-main header.docs-navbar .docs-right .docs-icon,html.theme--documenter-dark #documenter .docs-main header.docs-navbar .docs-right .docs-label{display:inline-block}html.theme--documenter-dark #documenter .docs-main header.docs-navbar .docs-right .docs-label{padding:0;margin-left:0.3em}@media screen and (max-width: 1055px){html.theme--documenter-dark #documenter .docs-main header.docs-navbar .docs-right .docs-navbar-link{margin-left:0.4rem;margin-right:0.4rem}}html.theme--documenter-dark #documenter .docs-main header.docs-navbar>*{margin:auto 0}@media screen and (max-width: 1055px){html.theme--documenter-dark #documenter .docs-main header.docs-navbar{position:sticky;top:0;padding:0 1rem;transition-property:top, box-shadow;-webkit-transition-property:top, box-shadow;transition-duration:0.3s;-webkit-transition-duration:0.3s}html.theme--documenter-dark #documenter .docs-main header.docs-navbar.headroom--not-top{box-shadow:.2rem 0rem .4rem #171717;transition-duration:0.7s;-webkit-transition-duration:0.7s}html.theme--documenter-dark #documenter .docs-main header.docs-navbar.headroom--unpinned.headroom--not-top.headroom--not-bottom{top:-4.5rem;transition-duration:0.7s;-webkit-transition-duration:0.7s}}html.theme--documenter-dark #documenter .docs-main section.footnotes{border-top:1px solid #5e6d6f}html.theme--documenter-dark #documenter .docs-main section.footnotes li .tag:first-child,html.theme--documenter-dark #documenter .docs-main section.footnotes li .docstring>section>a.docs-sourcelink:first-child,html.theme--documenter-dark #documenter .docs-main section.footnotes li .content kbd:first-child,html.theme--documenter-dark .content #documenter .docs-main section.footnotes li kbd:first-child{margin-right:1em;margin-bottom:0.4em}html.theme--documenter-dark #documenter .docs-main .docs-footer{display:flex;flex-wrap:wrap;margin-left:0;margin-right:0;border-top:1px solid #5e6d6f;padding-top:1rem;padding-bottom:1rem}@media screen and (max-width: 1055px){html.theme--documenter-dark #documenter .docs-main .docs-footer{padding-left:1rem;padding-right:1rem}}html.theme--documenter-dark #documenter .docs-main .docs-footer .docs-footer-nextpage,html.theme--documenter-dark #documenter .docs-main .docs-footer .docs-footer-prevpage{flex-grow:1}html.theme--documenter-dark #documenter .docs-main .docs-footer .docs-footer-nextpage{text-align:right}html.theme--documenter-dark #documenter .docs-main .docs-footer .flexbox-break{flex-basis:100%;height:0}html.theme--documenter-dark #documenter .docs-main .docs-footer .footer-message{font-size:0.8em;margin:0.5em auto 0 auto;text-align:center}html.theme--documenter-dark #documenter .docs-sidebar{display:flex;flex-direction:column;color:#fff;background-color:#282f2f;border-right:1px solid #5e6d6f;padding:0;flex:0 0 18rem;z-index:5;font-size:1rem;position:fixed;left:-18rem;width:18rem;height:100%;transition:left 0.3s}html.theme--documenter-dark #documenter .docs-sidebar.visible{left:0;box-shadow:.4rem 0rem .8rem #171717}@media screen and (min-width: 1056px){html.theme--documenter-dark #documenter .docs-sidebar.visible{box-shadow:none}}@media screen and (min-width: 1056px){html.theme--documenter-dark #documenter .docs-sidebar{left:0;top:0}}html.theme--documenter-dark #documenter .docs-sidebar .docs-logo{margin-top:1rem;padding:0 1rem}html.theme--documenter-dark #documenter .docs-sidebar .docs-logo>img{max-height:6rem;margin:auto}html.theme--documenter-dark #documenter .docs-sidebar .docs-package-name{flex-shrink:0;font-size:1.5rem;font-weight:700;text-align:center;white-space:nowrap;overflow:hidden;padding:0.5rem 0}html.theme--documenter-dark #documenter .docs-sidebar .docs-package-name .docs-autofit{max-width:16.2rem}html.theme--documenter-dark #documenter .docs-sidebar .docs-package-name a,html.theme--documenter-dark #documenter .docs-sidebar .docs-package-name a:hover{color:#fff}html.theme--documenter-dark #documenter .docs-sidebar .docs-version-selector{border-top:1px solid #5e6d6f;display:none;padding:0.5rem}html.theme--documenter-dark #documenter .docs-sidebar .docs-version-selector.visible{display:flex}html.theme--documenter-dark #documenter .docs-sidebar ul.docs-menu{flex-grow:1;user-select:none;border-top:1px solid #5e6d6f;padding-bottom:1.5rem}html.theme--documenter-dark #documenter .docs-sidebar ul.docs-menu>li>.tocitem{font-weight:bold}html.theme--documenter-dark #documenter .docs-sidebar ul.docs-menu>li li{font-size:.95rem;margin-left:1em;border-left:1px solid #5e6d6f}html.theme--documenter-dark #documenter .docs-sidebar ul.docs-menu input.collapse-toggle{display:none}html.theme--documenter-dark #documenter .docs-sidebar ul.docs-menu ul.collapsed{display:none}html.theme--documenter-dark #documenter .docs-sidebar ul.docs-menu input:checked~ul.collapsed{display:block}html.theme--documenter-dark #documenter .docs-sidebar ul.docs-menu label.tocitem{display:flex}html.theme--documenter-dark #documenter .docs-sidebar ul.docs-menu label.tocitem .docs-label{flex-grow:2}html.theme--documenter-dark #documenter .docs-sidebar ul.docs-menu label.tocitem .docs-chevron{display:inline-block;font-style:normal;font-variant:normal;text-rendering:auto;line-height:1;font-size:.75rem;margin-left:1rem;margin-top:auto;margin-bottom:auto}html.theme--documenter-dark #documenter .docs-sidebar ul.docs-menu label.tocitem .docs-chevron::before{font-family:"Font Awesome 6 Free";font-weight:900;content:"\f054"}html.theme--documenter-dark #documenter .docs-sidebar ul.docs-menu input:checked~label.tocitem .docs-chevron::before{content:"\f078"}html.theme--documenter-dark #documenter .docs-sidebar ul.docs-menu .tocitem{display:block;padding:0.5rem 0.5rem}html.theme--documenter-dark #documenter .docs-sidebar ul.docs-menu .tocitem,html.theme--documenter-dark #documenter .docs-sidebar ul.docs-menu .tocitem:hover{color:#fff;background:#282f2f}html.theme--documenter-dark #documenter .docs-sidebar ul.docs-menu a.tocitem:hover,html.theme--documenter-dark #documenter .docs-sidebar ul.docs-menu label.tocitem:hover{color:#fff;background-color:#32393a}html.theme--documenter-dark #documenter .docs-sidebar ul.docs-menu li.is-active{border-top:1px solid #5e6d6f;border-bottom:1px solid #5e6d6f;background-color:#1f2424}html.theme--documenter-dark #documenter .docs-sidebar ul.docs-menu li.is-active .tocitem,html.theme--documenter-dark #documenter .docs-sidebar ul.docs-menu li.is-active .tocitem:hover{background-color:#1f2424;color:#fff}html.theme--documenter-dark #documenter .docs-sidebar ul.docs-menu li.is-active ul.internal .tocitem:hover{background-color:#32393a;color:#fff}html.theme--documenter-dark #documenter .docs-sidebar ul.docs-menu>li.is-active:first-child{border-top:none}html.theme--documenter-dark #documenter .docs-sidebar ul.docs-menu ul.internal{margin:0 0.5rem 0.5rem;border-top:1px solid #5e6d6f}html.theme--documenter-dark #documenter .docs-sidebar ul.docs-menu ul.internal li{font-size:.85rem;border-left:none;margin-left:0;margin-top:0.5rem}html.theme--documenter-dark #documenter .docs-sidebar ul.docs-menu ul.internal .tocitem{width:100%;padding:0}html.theme--documenter-dark #documenter .docs-sidebar ul.docs-menu ul.internal .tocitem::before{content:"⚬";margin-right:0.4em}html.theme--documenter-dark #documenter .docs-sidebar form.docs-search{margin:auto;margin-top:0.5rem;margin-bottom:0.5rem}html.theme--documenter-dark #documenter .docs-sidebar form.docs-search>input{width:14.4rem}html.theme--documenter-dark #documenter .docs-sidebar #documenter-search-query{color:#868c98;width:14.4rem;box-shadow:inset 0 1px 2px rgba(10,10,10,0.1)}@media screen and (min-width: 1056px){html.theme--documenter-dark #documenter .docs-sidebar ul.docs-menu{overflow-y:auto;-webkit-overflow-scroll:touch}html.theme--documenter-dark #documenter .docs-sidebar ul.docs-menu::-webkit-scrollbar{width:.3rem;background:none}html.theme--documenter-dark #documenter .docs-sidebar ul.docs-menu::-webkit-scrollbar-thumb{border-radius:5px 0px 0px 5px;background:#3b4445}html.theme--documenter-dark #documenter .docs-sidebar ul.docs-menu::-webkit-scrollbar-thumb:hover{background:#4e5a5c}}@media screen and (max-width: 1055px){html.theme--documenter-dark #documenter .docs-sidebar{overflow-y:auto;-webkit-overflow-scroll:touch}html.theme--documenter-dark #documenter .docs-sidebar::-webkit-scrollbar{width:.3rem;background:none}html.theme--documenter-dark #documenter .docs-sidebar::-webkit-scrollbar-thumb{border-radius:5px 0px 0px 5px;background:#3b4445}html.theme--documenter-dark #documenter .docs-sidebar::-webkit-scrollbar-thumb:hover{background:#4e5a5c}}html.theme--documenter-dark kbd.search-modal-key-hints{border-radius:0.25rem;border:1px solid rgba(245,245,245,0.6);box-shadow:0 2px 0 1px rgba(245,245,245,0.6);cursor:default;font-size:0.9rem;line-height:1.5;min-width:0.75rem;text-align:center;padding:0.1rem 0.3rem;position:relative;top:-1px}html.theme--documenter-dark .search-min-width-50{min-width:50%}html.theme--documenter-dark .search-min-height-100{min-height:100%}html.theme--documenter-dark .search-modal-card-body{max-height:calc(100vh - 15rem)}html.theme--documenter-dark .search-result-link{border-radius:0.7em;transition:all 300ms}html.theme--documenter-dark .search-result-link:hover,html.theme--documenter-dark .search-result-link:focus{background-color:rgba(0,128,128,0.1)}html.theme--documenter-dark .search-result-link .property-search-result-badge,html.theme--documenter-dark .search-result-link .search-filter{transition:all 300ms}html.theme--documenter-dark .property-search-result-badge,html.theme--documenter-dark .search-filter{padding:0.15em 0.5em;font-size:0.8em;font-style:italic;text-transform:none !important;line-height:1.5;color:#f5f5f5;background-color:rgba(51,65,85,0.501961);border-radius:0.6rem}html.theme--documenter-dark .search-result-link:hover .property-search-result-badge,html.theme--documenter-dark .search-result-link:hover .search-filter,html.theme--documenter-dark .search-result-link:focus .property-search-result-badge,html.theme--documenter-dark .search-result-link:focus .search-filter{color:#333;background-color:#f1f5f9}html.theme--documenter-dark .search-filter{color:#333;background-color:#f5f5f5;transition:all 300ms}html.theme--documenter-dark .search-filter:hover,html.theme--documenter-dark .search-filter:focus{color:#333}html.theme--documenter-dark .search-filter-selected{color:#f5f5f5;background-color:rgba(139,0,139,0.5)}html.theme--documenter-dark .search-filter-selected:hover,html.theme--documenter-dark .search-filter-selected:focus{color:#f5f5f5}html.theme--documenter-dark .search-result-highlight{background-color:#ffdd57;color:black}html.theme--documenter-dark .search-divider{border-bottom:1px solid #5e6d6f}html.theme--documenter-dark .search-result-title{width:85%;color:#f5f5f5}html.theme--documenter-dark .search-result-code-title{font-size:0.875rem;font-family:"JuliaMono","SFMono-Regular","Menlo","Consolas","Liberation Mono","DejaVu Sans Mono",monospace}html.theme--documenter-dark #search-modal .modal-card-body::-webkit-scrollbar,html.theme--documenter-dark #search-modal .filter-tabs::-webkit-scrollbar{height:10px;width:10px;background-color:transparent}html.theme--documenter-dark #search-modal .modal-card-body::-webkit-scrollbar-thumb,html.theme--documenter-dark #search-modal .filter-tabs::-webkit-scrollbar-thumb{background-color:gray;border-radius:1rem}html.theme--documenter-dark #search-modal .modal-card-body::-webkit-scrollbar-track,html.theme--documenter-dark #search-modal .filter-tabs::-webkit-scrollbar-track{-webkit-box-shadow:inset 0 0 6px rgba(0,0,0,0.6);background-color:transparent}html.theme--documenter-dark .w-100{width:100%}html.theme--documenter-dark .gap-2{gap:0.5rem}html.theme--documenter-dark .gap-4{gap:1rem}html.theme--documenter-dark .gap-8{gap:2rem}html.theme--documenter-dark{background-color:#1f2424;font-size:16px;min-width:300px;overflow-x:auto;overflow-y:scroll;text-rendering:optimizeLegibility;text-size-adjust:100%}html.theme--documenter-dark .ansi span.sgr1{font-weight:bolder}html.theme--documenter-dark .ansi span.sgr2{font-weight:lighter}html.theme--documenter-dark .ansi span.sgr3{font-style:italic}html.theme--documenter-dark .ansi span.sgr4{text-decoration:underline}html.theme--documenter-dark .ansi span.sgr7{color:#1f2424;background-color:#fff}html.theme--documenter-dark .ansi span.sgr8{color:transparent}html.theme--documenter-dark .ansi span.sgr8 span{color:transparent}html.theme--documenter-dark .ansi span.sgr9{text-decoration:line-through}html.theme--documenter-dark .ansi span.sgr30{color:#242424}html.theme--documenter-dark .ansi span.sgr31{color:#f6705f}html.theme--documenter-dark .ansi span.sgr32{color:#4fb43a}html.theme--documenter-dark .ansi span.sgr33{color:#f4c72f}html.theme--documenter-dark .ansi span.sgr34{color:#7587f0}html.theme--documenter-dark .ansi span.sgr35{color:#bc89d3}html.theme--documenter-dark .ansi span.sgr36{color:#49b6ca}html.theme--documenter-dark .ansi span.sgr37{color:#b3bdbe}html.theme--documenter-dark .ansi span.sgr40{background-color:#242424}html.theme--documenter-dark .ansi span.sgr41{background-color:#f6705f}html.theme--documenter-dark .ansi span.sgr42{background-color:#4fb43a}html.theme--documenter-dark .ansi span.sgr43{background-color:#f4c72f}html.theme--documenter-dark .ansi span.sgr44{background-color:#7587f0}html.theme--documenter-dark .ansi span.sgr45{background-color:#bc89d3}html.theme--documenter-dark .ansi span.sgr46{background-color:#49b6ca}html.theme--documenter-dark .ansi span.sgr47{background-color:#b3bdbe}html.theme--documenter-dark .ansi span.sgr90{color:#92a0a2}html.theme--documenter-dark .ansi span.sgr91{color:#ff8674}html.theme--documenter-dark .ansi span.sgr92{color:#79d462}html.theme--documenter-dark .ansi span.sgr93{color:#ffe76b}html.theme--documenter-dark .ansi span.sgr94{color:#8a98ff}html.theme--documenter-dark .ansi span.sgr95{color:#d2a4e6}html.theme--documenter-dark .ansi span.sgr96{color:#6bc8db}html.theme--documenter-dark .ansi span.sgr97{color:#ecf0f1}html.theme--documenter-dark .ansi span.sgr100{background-color:#92a0a2}html.theme--documenter-dark .ansi span.sgr101{background-color:#ff8674}html.theme--documenter-dark .ansi span.sgr102{background-color:#79d462}html.theme--documenter-dark .ansi span.sgr103{background-color:#ffe76b}html.theme--documenter-dark .ansi span.sgr104{background-color:#8a98ff}html.theme--documenter-dark .ansi span.sgr105{background-color:#d2a4e6}html.theme--documenter-dark .ansi span.sgr106{background-color:#6bc8db}html.theme--documenter-dark .ansi span.sgr107{background-color:#ecf0f1}html.theme--documenter-dark code.language-julia-repl>span.hljs-meta{color:#4fb43a;font-weight:bolder}html.theme--documenter-dark .hljs{background:#2b2b2b;color:#f8f8f2}html.theme--documenter-dark .hljs-comment,html.theme--documenter-dark .hljs-quote{color:#d4d0ab}html.theme--documenter-dark .hljs-variable,html.theme--documenter-dark .hljs-template-variable,html.theme--documenter-dark .hljs-tag,html.theme--documenter-dark .hljs-name,html.theme--documenter-dark .hljs-selector-id,html.theme--documenter-dark .hljs-selector-class,html.theme--documenter-dark .hljs-regexp,html.theme--documenter-dark .hljs-deletion{color:#ffa07a}html.theme--documenter-dark .hljs-number,html.theme--documenter-dark .hljs-built_in,html.theme--documenter-dark .hljs-literal,html.theme--documenter-dark .hljs-type,html.theme--documenter-dark .hljs-params,html.theme--documenter-dark .hljs-meta,html.theme--documenter-dark .hljs-link{color:#f5ab35}html.theme--documenter-dark .hljs-attribute{color:#ffd700}html.theme--documenter-dark .hljs-string,html.theme--documenter-dark .hljs-symbol,html.theme--documenter-dark .hljs-bullet,html.theme--documenter-dark .hljs-addition{color:#abe338}html.theme--documenter-dark .hljs-title,html.theme--documenter-dark .hljs-section{color:#00e0e0}html.theme--documenter-dark .hljs-keyword,html.theme--documenter-dark .hljs-selector-tag{color:#dcc6e0}html.theme--documenter-dark .hljs-emphasis{font-style:italic}html.theme--documenter-dark .hljs-strong{font-weight:bold}@media screen and (-ms-high-contrast: active){html.theme--documenter-dark .hljs-addition,html.theme--documenter-dark .hljs-attribute,html.theme--documenter-dark .hljs-built_in,html.theme--documenter-dark .hljs-bullet,html.theme--documenter-dark .hljs-comment,html.theme--documenter-dark .hljs-link,html.theme--documenter-dark .hljs-literal,html.theme--documenter-dark .hljs-meta,html.theme--documenter-dark .hljs-number,html.theme--documenter-dark .hljs-params,html.theme--documenter-dark .hljs-string,html.theme--documenter-dark .hljs-symbol,html.theme--documenter-dark .hljs-type,html.theme--documenter-dark .hljs-quote{color:highlight}html.theme--documenter-dark .hljs-keyword,html.theme--documenter-dark .hljs-selector-tag{font-weight:bold}}html.theme--documenter-dark .hljs-subst{color:#f8f8f2}html.theme--documenter-dark .search-result-link{border-radius:0.7em;transition:all 300ms}html.theme--documenter-dark .search-result-link:hover,html.theme--documenter-dark .search-result-link:focus{background-color:rgba(0,128,128,0.1)}html.theme--documenter-dark .search-result-link .property-search-result-badge,html.theme--documenter-dark .search-result-link .search-filter{transition:all 300ms}html.theme--documenter-dark .search-result-link:hover .property-search-result-badge,html.theme--documenter-dark .search-result-link:hover .search-filter,html.theme--documenter-dark .search-result-link:focus .property-search-result-badge,html.theme--documenter-dark .search-result-link:focus .search-filter{color:#333 !important;background-color:#f1f5f9 !important}html.theme--documenter-dark .search-result-title{color:whitesmoke}html.theme--documenter-dark .search-result-highlight{background-color:greenyellow;color:black}html.theme--documenter-dark .search-divider{border-bottom:1px solid #5e6d6f50}html.theme--documenter-dark .w-100{width:100%}html.theme--documenter-dark .gap-2{gap:0.5rem}html.theme--documenter-dark .gap-4{gap:1rem} diff --git a/v0.20.3/assets/themes/documenter-light.css b/v0.20.3/assets/themes/documenter-light.css new file mode 100644 index 000000000..2f168c77b --- /dev/null +++ b/v0.20.3/assets/themes/documenter-light.css @@ -0,0 +1,9 @@ +.pagination-previous,.pagination-next,.pagination-link,.pagination-ellipsis,.file-cta,.file-name,.select select,.textarea,.input,#documenter .docs-sidebar form.docs-search>input,.button{-moz-appearance:none;-webkit-appearance:none;align-items:center;border:1px solid transparent;border-radius:4px;box-shadow:none;display:inline-flex;font-size:1rem;height:2.5em;justify-content:flex-start;line-height:1.5;padding-bottom:calc(0.5em - 1px);padding-left:calc(0.75em - 1px);padding-right:calc(0.75em - 1px);padding-top:calc(0.5em - 1px);position:relative;vertical-align:top}.pagination-previous:focus,.pagination-next:focus,.pagination-link:focus,.pagination-ellipsis:focus,.file-cta:focus,.file-name:focus,.select select:focus,.textarea:focus,.input:focus,#documenter .docs-sidebar form.docs-search>input:focus,.button:focus,.is-focused.pagination-previous,.is-focused.pagination-next,.is-focused.pagination-link,.is-focused.pagination-ellipsis,.is-focused.file-cta,.is-focused.file-name,.select select.is-focused,.is-focused.textarea,.is-focused.input,#documenter .docs-sidebar form.docs-search>input.is-focused,.is-focused.button,.pagination-previous:active,.pagination-next:active,.pagination-link:active,.pagination-ellipsis:active,.file-cta:active,.file-name:active,.select select:active,.textarea:active,.input:active,#documenter .docs-sidebar form.docs-search>input:active,.button:active,.is-active.pagination-previous,.is-active.pagination-next,.is-active.pagination-link,.is-active.pagination-ellipsis,.is-active.file-cta,.is-active.file-name,.select select.is-active,.is-active.textarea,.is-active.input,#documenter .docs-sidebar form.docs-search>input.is-active,.is-active.button{outline:none}.pagination-previous[disabled],.pagination-next[disabled],.pagination-link[disabled],.pagination-ellipsis[disabled],.file-cta[disabled],.file-name[disabled],.select select[disabled],.textarea[disabled],.input[disabled],#documenter .docs-sidebar form.docs-search>input[disabled],.button[disabled],fieldset[disabled] .pagination-previous,fieldset[disabled] .pagination-next,fieldset[disabled] .pagination-link,fieldset[disabled] .pagination-ellipsis,fieldset[disabled] .file-cta,fieldset[disabled] .file-name,fieldset[disabled] .select select,.select fieldset[disabled] select,fieldset[disabled] .textarea,fieldset[disabled] .input,fieldset[disabled] #documenter .docs-sidebar form.docs-search>input,#documenter .docs-sidebar fieldset[disabled] form.docs-search>input,fieldset[disabled] .button{cursor:not-allowed}.tabs,.pagination-previous,.pagination-next,.pagination-link,.pagination-ellipsis,.breadcrumb,.file,.button,.is-unselectable{-webkit-touch-callout:none;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none}.navbar-link:not(.is-arrowless)::after,.select:not(.is-multiple):not(.is-loading)::after{border:3px solid rgba(0,0,0,0);border-radius:2px;border-right:0;border-top:0;content:" ";display:block;height:0.625em;margin-top:-0.4375em;pointer-events:none;position:absolute;top:50%;transform:rotate(-45deg);transform-origin:center;width:0.625em}.admonition:not(:last-child),.tabs:not(:last-child),.pagination:not(:last-child),.message:not(:last-child),.level:not(:last-child),.breadcrumb:not(:last-child),.block:not(:last-child),.title:not(:last-child),.subtitle:not(:last-child),.table-container:not(:last-child),.table:not(:last-child),.progress:not(:last-child),.notification:not(:last-child),.content:not(:last-child),.box:not(:last-child){margin-bottom:1.5rem}.modal-close,.delete{-webkit-touch-callout:none;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none;-moz-appearance:none;-webkit-appearance:none;background-color:rgba(10,10,10,0.2);border:none;border-radius:9999px;cursor:pointer;pointer-events:auto;display:inline-block;flex-grow:0;flex-shrink:0;font-size:0;height:20px;max-height:20px;max-width:20px;min-height:20px;min-width:20px;outline:none;position:relative;vertical-align:top;width:20px}.modal-close::before,.delete::before,.modal-close::after,.delete::after{background-color:#fff;content:"";display:block;left:50%;position:absolute;top:50%;transform:translateX(-50%) translateY(-50%) rotate(45deg);transform-origin:center center}.modal-close::before,.delete::before{height:2px;width:50%}.modal-close::after,.delete::after{height:50%;width:2px}.modal-close:hover,.delete:hover,.modal-close:focus,.delete:focus{background-color:rgba(10,10,10,0.3)}.modal-close:active,.delete:active{background-color:rgba(10,10,10,0.4)}.is-small.modal-close,#documenter .docs-sidebar form.docs-search>input.modal-close,.is-small.delete,#documenter .docs-sidebar form.docs-search>input.delete{height:16px;max-height:16px;max-width:16px;min-height:16px;min-width:16px;width:16px}.is-medium.modal-close,.is-medium.delete{height:24px;max-height:24px;max-width:24px;min-height:24px;min-width:24px;width:24px}.is-large.modal-close,.is-large.delete{height:32px;max-height:32px;max-width:32px;min-height:32px;min-width:32px;width:32px}.control.is-loading::after,.select.is-loading::after,.loader,.button.is-loading::after{animation:spinAround 500ms infinite linear;border:2px solid #dbdbdb;border-radius:9999px;border-right-color:transparent;border-top-color:transparent;content:"";display:block;height:1em;position:relative;width:1em}.hero-video,.modal-background,.modal,.image.is-square img,#documenter .docs-sidebar .docs-logo>img.is-square img,.image.is-square .has-ratio,#documenter .docs-sidebar .docs-logo>img.is-square .has-ratio,.image.is-1by1 img,#documenter .docs-sidebar .docs-logo>img.is-1by1 img,.image.is-1by1 .has-ratio,#documenter .docs-sidebar .docs-logo>img.is-1by1 .has-ratio,.image.is-5by4 img,#documenter .docs-sidebar .docs-logo>img.is-5by4 img,.image.is-5by4 .has-ratio,#documenter .docs-sidebar .docs-logo>img.is-5by4 .has-ratio,.image.is-4by3 img,#documenter .docs-sidebar .docs-logo>img.is-4by3 img,.image.is-4by3 .has-ratio,#documenter .docs-sidebar .docs-logo>img.is-4by3 .has-ratio,.image.is-3by2 img,#documenter .docs-sidebar .docs-logo>img.is-3by2 img,.image.is-3by2 .has-ratio,#documenter .docs-sidebar .docs-logo>img.is-3by2 .has-ratio,.image.is-5by3 img,#documenter .docs-sidebar .docs-logo>img.is-5by3 img,.image.is-5by3 .has-ratio,#documenter .docs-sidebar .docs-logo>img.is-5by3 .has-ratio,.image.is-16by9 img,#documenter .docs-sidebar .docs-logo>img.is-16by9 img,.image.is-16by9 .has-ratio,#documenter .docs-sidebar .docs-logo>img.is-16by9 .has-ratio,.image.is-2by1 img,#documenter .docs-sidebar .docs-logo>img.is-2by1 img,.image.is-2by1 .has-ratio,#documenter .docs-sidebar .docs-logo>img.is-2by1 .has-ratio,.image.is-3by1 img,#documenter .docs-sidebar .docs-logo>img.is-3by1 img,.image.is-3by1 .has-ratio,#documenter .docs-sidebar .docs-logo>img.is-3by1 .has-ratio,.image.is-4by5 img,#documenter .docs-sidebar .docs-logo>img.is-4by5 img,.image.is-4by5 .has-ratio,#documenter .docs-sidebar .docs-logo>img.is-4by5 .has-ratio,.image.is-3by4 img,#documenter .docs-sidebar .docs-logo>img.is-3by4 img,.image.is-3by4 .has-ratio,#documenter .docs-sidebar .docs-logo>img.is-3by4 .has-ratio,.image.is-2by3 img,#documenter .docs-sidebar .docs-logo>img.is-2by3 img,.image.is-2by3 .has-ratio,#documenter .docs-sidebar .docs-logo>img.is-2by3 .has-ratio,.image.is-3by5 img,#documenter .docs-sidebar .docs-logo>img.is-3by5 img,.image.is-3by5 .has-ratio,#documenter .docs-sidebar .docs-logo>img.is-3by5 .has-ratio,.image.is-9by16 img,#documenter .docs-sidebar .docs-logo>img.is-9by16 img,.image.is-9by16 .has-ratio,#documenter .docs-sidebar .docs-logo>img.is-9by16 .has-ratio,.image.is-1by2 img,#documenter .docs-sidebar .docs-logo>img.is-1by2 img,.image.is-1by2 .has-ratio,#documenter .docs-sidebar .docs-logo>img.is-1by2 .has-ratio,.image.is-1by3 img,#documenter .docs-sidebar .docs-logo>img.is-1by3 img,.image.is-1by3 .has-ratio,#documenter .docs-sidebar .docs-logo>img.is-1by3 .has-ratio,.is-overlay{bottom:0;left:0;position:absolute;right:0;top:0}.navbar-burger{-moz-appearance:none;-webkit-appearance:none;appearance:none;background:none;border:none;color:currentColor;font-family:inherit;font-size:1em;margin:0;padding:0}.has-text-white{color:#fff !important}a.has-text-white:hover,a.has-text-white:focus{color:#e6e6e6 !important}.has-background-white{background-color:#fff !important}.has-text-black{color:#0a0a0a !important}a.has-text-black:hover,a.has-text-black:focus{color:#000 !important}.has-background-black{background-color:#0a0a0a !important}.has-text-light{color:#f5f5f5 !important}a.has-text-light:hover,a.has-text-light:focus{color:#dbdbdb !important}.has-background-light{background-color:#f5f5f5 !important}.has-text-dark{color:#363636 !important}a.has-text-dark:hover,a.has-text-dark:focus{color:#1c1c1c !important}.has-background-dark{background-color:#363636 !important}.has-text-primary{color:#4eb5de !important}a.has-text-primary:hover,a.has-text-primary:focus{color:#27a1d2 !important}.has-background-primary{background-color:#4eb5de !important}.has-text-primary-light{color:#eef8fc !important}a.has-text-primary-light:hover,a.has-text-primary-light:focus{color:#c3e6f4 !important}.has-background-primary-light{background-color:#eef8fc !important}.has-text-primary-dark{color:#1a6d8e !important}a.has-text-primary-dark:hover,a.has-text-primary-dark:focus{color:#228eb9 !important}.has-background-primary-dark{background-color:#1a6d8e !important}.has-text-link{color:#2e63b8 !important}a.has-text-link:hover,a.has-text-link:focus{color:#244d8f !important}.has-background-link{background-color:#2e63b8 !important}.has-text-link-light{color:#eff3fb !important}a.has-text-link-light:hover,a.has-text-link-light:focus{color:#c6d6f1 !important}.has-background-link-light{background-color:#eff3fb !important}.has-text-link-dark{color:#3169c4 !important}a.has-text-link-dark:hover,a.has-text-link-dark:focus{color:#5485d4 !important}.has-background-link-dark{background-color:#3169c4 !important}.has-text-info{color:#209cee !important}a.has-text-info:hover,a.has-text-info:focus{color:#1081cb !important}.has-background-info{background-color:#209cee !important}.has-text-info-light{color:#ecf7fe !important}a.has-text-info-light:hover,a.has-text-info-light:focus{color:#bde2fa !important}.has-background-info-light{background-color:#ecf7fe !important}.has-text-info-dark{color:#0e72b4 !important}a.has-text-info-dark:hover,a.has-text-info-dark:focus{color:#1190e3 !important}.has-background-info-dark{background-color:#0e72b4 !important}.has-text-success{color:#22c35b !important}a.has-text-success:hover,a.has-text-success:focus{color:#1a9847 !important}.has-background-success{background-color:#22c35b !important}.has-text-success-light{color:#eefcf3 !important}a.has-text-success-light:hover,a.has-text-success-light:focus{color:#c2f4d4 !important}.has-background-success-light{background-color:#eefcf3 !important}.has-text-success-dark{color:#198f43 !important}a.has-text-success-dark:hover,a.has-text-success-dark:focus{color:#21bb57 !important}.has-background-success-dark{background-color:#198f43 !important}.has-text-warning{color:#ffdd57 !important}a.has-text-warning:hover,a.has-text-warning:focus{color:#ffd324 !important}.has-background-warning{background-color:#ffdd57 !important}.has-text-warning-light{color:#fffbeb !important}a.has-text-warning-light:hover,a.has-text-warning-light:focus{color:#fff1b8 !important}.has-background-warning-light{background-color:#fffbeb !important}.has-text-warning-dark{color:#947600 !important}a.has-text-warning-dark:hover,a.has-text-warning-dark:focus{color:#c79f00 !important}.has-background-warning-dark{background-color:#947600 !important}.has-text-danger{color:#da0b00 !important}a.has-text-danger:hover,a.has-text-danger:focus{color:#a70800 !important}.has-background-danger{background-color:#da0b00 !important}.has-text-danger-light{color:#ffeceb !important}a.has-text-danger-light:hover,a.has-text-danger-light:focus{color:#ffbbb8 !important}.has-background-danger-light{background-color:#ffeceb !important}.has-text-danger-dark{color:#f50c00 !important}a.has-text-danger-dark:hover,a.has-text-danger-dark:focus{color:#ff3429 !important}.has-background-danger-dark{background-color:#f50c00 !important}.has-text-black-bis{color:#121212 !important}.has-background-black-bis{background-color:#121212 !important}.has-text-black-ter{color:#242424 !important}.has-background-black-ter{background-color:#242424 !important}.has-text-grey-darker{color:#363636 !important}.has-background-grey-darker{background-color:#363636 !important}.has-text-grey-dark{color:#4a4a4a !important}.has-background-grey-dark{background-color:#4a4a4a !important}.has-text-grey{color:#6b6b6b !important}.has-background-grey{background-color:#6b6b6b !important}.has-text-grey-light{color:#b5b5b5 !important}.has-background-grey-light{background-color:#b5b5b5 !important}.has-text-grey-lighter{color:#dbdbdb !important}.has-background-grey-lighter{background-color:#dbdbdb !important}.has-text-white-ter{color:#f5f5f5 !important}.has-background-white-ter{background-color:#f5f5f5 !important}.has-text-white-bis{color:#fafafa !important}.has-background-white-bis{background-color:#fafafa !important}.is-flex-direction-row{flex-direction:row !important}.is-flex-direction-row-reverse{flex-direction:row-reverse !important}.is-flex-direction-column{flex-direction:column !important}.is-flex-direction-column-reverse{flex-direction:column-reverse !important}.is-flex-wrap-nowrap{flex-wrap:nowrap !important}.is-flex-wrap-wrap{flex-wrap:wrap !important}.is-flex-wrap-wrap-reverse{flex-wrap:wrap-reverse !important}.is-justify-content-flex-start{justify-content:flex-start !important}.is-justify-content-flex-end{justify-content:flex-end !important}.is-justify-content-center{justify-content:center !important}.is-justify-content-space-between{justify-content:space-between !important}.is-justify-content-space-around{justify-content:space-around !important}.is-justify-content-space-evenly{justify-content:space-evenly !important}.is-justify-content-start{justify-content:start !important}.is-justify-content-end{justify-content:end !important}.is-justify-content-left{justify-content:left !important}.is-justify-content-right{justify-content:right !important}.is-align-content-flex-start{align-content:flex-start !important}.is-align-content-flex-end{align-content:flex-end !important}.is-align-content-center{align-content:center !important}.is-align-content-space-between{align-content:space-between !important}.is-align-content-space-around{align-content:space-around !important}.is-align-content-space-evenly{align-content:space-evenly !important}.is-align-content-stretch{align-content:stretch !important}.is-align-content-start{align-content:start !important}.is-align-content-end{align-content:end !important}.is-align-content-baseline{align-content:baseline !important}.is-align-items-stretch{align-items:stretch !important}.is-align-items-flex-start{align-items:flex-start !important}.is-align-items-flex-end{align-items:flex-end !important}.is-align-items-center{align-items:center !important}.is-align-items-baseline{align-items:baseline !important}.is-align-items-start{align-items:start !important}.is-align-items-end{align-items:end !important}.is-align-items-self-start{align-items:self-start !important}.is-align-items-self-end{align-items:self-end !important}.is-align-self-auto{align-self:auto !important}.is-align-self-flex-start{align-self:flex-start !important}.is-align-self-flex-end{align-self:flex-end !important}.is-align-self-center{align-self:center !important}.is-align-self-baseline{align-self:baseline !important}.is-align-self-stretch{align-self:stretch !important}.is-flex-grow-0{flex-grow:0 !important}.is-flex-grow-1{flex-grow:1 !important}.is-flex-grow-2{flex-grow:2 !important}.is-flex-grow-3{flex-grow:3 !important}.is-flex-grow-4{flex-grow:4 !important}.is-flex-grow-5{flex-grow:5 !important}.is-flex-shrink-0{flex-shrink:0 !important}.is-flex-shrink-1{flex-shrink:1 !important}.is-flex-shrink-2{flex-shrink:2 !important}.is-flex-shrink-3{flex-shrink:3 !important}.is-flex-shrink-4{flex-shrink:4 !important}.is-flex-shrink-5{flex-shrink:5 !important}.is-clearfix::after{clear:both;content:" ";display:table}.is-pulled-left{float:left !important}.is-pulled-right{float:right !important}.is-radiusless{border-radius:0 !important}.is-shadowless{box-shadow:none !important}.is-clickable{cursor:pointer !important;pointer-events:all !important}.is-clipped{overflow:hidden !important}.is-relative{position:relative !important}.is-marginless{margin:0 !important}.is-paddingless{padding:0 !important}.m-0{margin:0 !important}.mt-0{margin-top:0 !important}.mr-0{margin-right:0 !important}.mb-0{margin-bottom:0 !important}.ml-0{margin-left:0 !important}.mx-0{margin-left:0 !important;margin-right:0 !important}.my-0{margin-top:0 !important;margin-bottom:0 !important}.m-1{margin:.25rem !important}.mt-1{margin-top:.25rem !important}.mr-1{margin-right:.25rem !important}.mb-1{margin-bottom:.25rem !important}.ml-1{margin-left:.25rem !important}.mx-1{margin-left:.25rem !important;margin-right:.25rem !important}.my-1{margin-top:.25rem !important;margin-bottom:.25rem !important}.m-2{margin:.5rem !important}.mt-2{margin-top:.5rem !important}.mr-2{margin-right:.5rem !important}.mb-2{margin-bottom:.5rem !important}.ml-2{margin-left:.5rem !important}.mx-2{margin-left:.5rem !important;margin-right:.5rem !important}.my-2{margin-top:.5rem !important;margin-bottom:.5rem !important}.m-3{margin:.75rem !important}.mt-3{margin-top:.75rem !important}.mr-3{margin-right:.75rem !important}.mb-3{margin-bottom:.75rem !important}.ml-3{margin-left:.75rem !important}.mx-3{margin-left:.75rem !important;margin-right:.75rem !important}.my-3{margin-top:.75rem !important;margin-bottom:.75rem !important}.m-4{margin:1rem !important}.mt-4{margin-top:1rem !important}.mr-4{margin-right:1rem !important}.mb-4{margin-bottom:1rem !important}.ml-4{margin-left:1rem !important}.mx-4{margin-left:1rem !important;margin-right:1rem !important}.my-4{margin-top:1rem !important;margin-bottom:1rem !important}.m-5{margin:1.5rem !important}.mt-5{margin-top:1.5rem !important}.mr-5{margin-right:1.5rem !important}.mb-5{margin-bottom:1.5rem !important}.ml-5{margin-left:1.5rem !important}.mx-5{margin-left:1.5rem !important;margin-right:1.5rem !important}.my-5{margin-top:1.5rem !important;margin-bottom:1.5rem !important}.m-6{margin:3rem !important}.mt-6{margin-top:3rem !important}.mr-6{margin-right:3rem !important}.mb-6{margin-bottom:3rem !important}.ml-6{margin-left:3rem !important}.mx-6{margin-left:3rem !important;margin-right:3rem !important}.my-6{margin-top:3rem !important;margin-bottom:3rem !important}.m-auto{margin:auto !important}.mt-auto{margin-top:auto !important}.mr-auto{margin-right:auto !important}.mb-auto{margin-bottom:auto !important}.ml-auto{margin-left:auto !important}.mx-auto{margin-left:auto !important;margin-right:auto !important}.my-auto{margin-top:auto !important;margin-bottom:auto !important}.p-0{padding:0 !important}.pt-0{padding-top:0 !important}.pr-0{padding-right:0 !important}.pb-0{padding-bottom:0 !important}.pl-0{padding-left:0 !important}.px-0{padding-left:0 !important;padding-right:0 !important}.py-0{padding-top:0 !important;padding-bottom:0 !important}.p-1{padding:.25rem !important}.pt-1{padding-top:.25rem !important}.pr-1{padding-right:.25rem !important}.pb-1{padding-bottom:.25rem !important}.pl-1{padding-left:.25rem !important}.px-1{padding-left:.25rem !important;padding-right:.25rem !important}.py-1{padding-top:.25rem !important;padding-bottom:.25rem !important}.p-2{padding:.5rem !important}.pt-2{padding-top:.5rem !important}.pr-2{padding-right:.5rem !important}.pb-2{padding-bottom:.5rem !important}.pl-2{padding-left:.5rem !important}.px-2{padding-left:.5rem !important;padding-right:.5rem !important}.py-2{padding-top:.5rem !important;padding-bottom:.5rem !important}.p-3{padding:.75rem !important}.pt-3{padding-top:.75rem !important}.pr-3{padding-right:.75rem !important}.pb-3{padding-bottom:.75rem !important}.pl-3{padding-left:.75rem !important}.px-3{padding-left:.75rem !important;padding-right:.75rem !important}.py-3{padding-top:.75rem !important;padding-bottom:.75rem !important}.p-4{padding:1rem !important}.pt-4{padding-top:1rem !important}.pr-4{padding-right:1rem !important}.pb-4{padding-bottom:1rem !important}.pl-4{padding-left:1rem !important}.px-4{padding-left:1rem !important;padding-right:1rem !important}.py-4{padding-top:1rem !important;padding-bottom:1rem !important}.p-5{padding:1.5rem !important}.pt-5{padding-top:1.5rem !important}.pr-5{padding-right:1.5rem !important}.pb-5{padding-bottom:1.5rem !important}.pl-5{padding-left:1.5rem !important}.px-5{padding-left:1.5rem !important;padding-right:1.5rem !important}.py-5{padding-top:1.5rem !important;padding-bottom:1.5rem !important}.p-6{padding:3rem !important}.pt-6{padding-top:3rem !important}.pr-6{padding-right:3rem !important}.pb-6{padding-bottom:3rem !important}.pl-6{padding-left:3rem !important}.px-6{padding-left:3rem !important;padding-right:3rem !important}.py-6{padding-top:3rem !important;padding-bottom:3rem !important}.p-auto{padding:auto !important}.pt-auto{padding-top:auto !important}.pr-auto{padding-right:auto !important}.pb-auto{padding-bottom:auto !important}.pl-auto{padding-left:auto !important}.px-auto{padding-left:auto !important;padding-right:auto !important}.py-auto{padding-top:auto !important;padding-bottom:auto !important}.is-size-1{font-size:3rem !important}.is-size-2{font-size:2.5rem !important}.is-size-3{font-size:2rem !important}.is-size-4{font-size:1.5rem !important}.is-size-5{font-size:1.25rem !important}.is-size-6{font-size:1rem !important}.is-size-7,.docstring>section>a.docs-sourcelink{font-size:.75rem !important}@media screen and (max-width: 768px){.is-size-1-mobile{font-size:3rem !important}.is-size-2-mobile{font-size:2.5rem !important}.is-size-3-mobile{font-size:2rem !important}.is-size-4-mobile{font-size:1.5rem !important}.is-size-5-mobile{font-size:1.25rem !important}.is-size-6-mobile{font-size:1rem !important}.is-size-7-mobile{font-size:.75rem !important}}@media screen and (min-width: 769px),print{.is-size-1-tablet{font-size:3rem !important}.is-size-2-tablet{font-size:2.5rem !important}.is-size-3-tablet{font-size:2rem !important}.is-size-4-tablet{font-size:1.5rem !important}.is-size-5-tablet{font-size:1.25rem !important}.is-size-6-tablet{font-size:1rem !important}.is-size-7-tablet{font-size:.75rem !important}}@media screen and (max-width: 1055px){.is-size-1-touch{font-size:3rem !important}.is-size-2-touch{font-size:2.5rem !important}.is-size-3-touch{font-size:2rem !important}.is-size-4-touch{font-size:1.5rem !important}.is-size-5-touch{font-size:1.25rem !important}.is-size-6-touch{font-size:1rem !important}.is-size-7-touch{font-size:.75rem !important}}@media screen and (min-width: 1056px){.is-size-1-desktop{font-size:3rem !important}.is-size-2-desktop{font-size:2.5rem !important}.is-size-3-desktop{font-size:2rem !important}.is-size-4-desktop{font-size:1.5rem !important}.is-size-5-desktop{font-size:1.25rem !important}.is-size-6-desktop{font-size:1rem !important}.is-size-7-desktop{font-size:.75rem !important}}@media screen and (min-width: 1216px){.is-size-1-widescreen{font-size:3rem !important}.is-size-2-widescreen{font-size:2.5rem !important}.is-size-3-widescreen{font-size:2rem !important}.is-size-4-widescreen{font-size:1.5rem !important}.is-size-5-widescreen{font-size:1.25rem !important}.is-size-6-widescreen{font-size:1rem !important}.is-size-7-widescreen{font-size:.75rem !important}}@media screen and (min-width: 1408px){.is-size-1-fullhd{font-size:3rem !important}.is-size-2-fullhd{font-size:2.5rem !important}.is-size-3-fullhd{font-size:2rem !important}.is-size-4-fullhd{font-size:1.5rem !important}.is-size-5-fullhd{font-size:1.25rem !important}.is-size-6-fullhd{font-size:1rem !important}.is-size-7-fullhd{font-size:.75rem !important}}.has-text-centered{text-align:center !important}.has-text-justified{text-align:justify !important}.has-text-left{text-align:left !important}.has-text-right{text-align:right !important}@media screen and (max-width: 768px){.has-text-centered-mobile{text-align:center !important}}@media screen and (min-width: 769px),print{.has-text-centered-tablet{text-align:center !important}}@media screen and (min-width: 769px) and (max-width: 1055px){.has-text-centered-tablet-only{text-align:center !important}}@media screen and (max-width: 1055px){.has-text-centered-touch{text-align:center !important}}@media screen and (min-width: 1056px){.has-text-centered-desktop{text-align:center !important}}@media screen and (min-width: 1056px) and (max-width: 1215px){.has-text-centered-desktop-only{text-align:center !important}}@media screen and (min-width: 1216px){.has-text-centered-widescreen{text-align:center !important}}@media screen and (min-width: 1216px) and (max-width: 1407px){.has-text-centered-widescreen-only{text-align:center !important}}@media screen and (min-width: 1408px){.has-text-centered-fullhd{text-align:center !important}}@media screen and (max-width: 768px){.has-text-justified-mobile{text-align:justify !important}}@media screen and (min-width: 769px),print{.has-text-justified-tablet{text-align:justify !important}}@media screen and (min-width: 769px) and (max-width: 1055px){.has-text-justified-tablet-only{text-align:justify !important}}@media screen and (max-width: 1055px){.has-text-justified-touch{text-align:justify !important}}@media screen and (min-width: 1056px){.has-text-justified-desktop{text-align:justify !important}}@media screen and (min-width: 1056px) and (max-width: 1215px){.has-text-justified-desktop-only{text-align:justify !important}}@media screen and (min-width: 1216px){.has-text-justified-widescreen{text-align:justify !important}}@media screen and (min-width: 1216px) and (max-width: 1407px){.has-text-justified-widescreen-only{text-align:justify !important}}@media screen and (min-width: 1408px){.has-text-justified-fullhd{text-align:justify !important}}@media screen and (max-width: 768px){.has-text-left-mobile{text-align:left !important}}@media screen and (min-width: 769px),print{.has-text-left-tablet{text-align:left !important}}@media screen and (min-width: 769px) and (max-width: 1055px){.has-text-left-tablet-only{text-align:left !important}}@media screen and (max-width: 1055px){.has-text-left-touch{text-align:left !important}}@media screen and (min-width: 1056px){.has-text-left-desktop{text-align:left !important}}@media screen and (min-width: 1056px) and (max-width: 1215px){.has-text-left-desktop-only{text-align:left !important}}@media screen and (min-width: 1216px){.has-text-left-widescreen{text-align:left !important}}@media screen and (min-width: 1216px) and (max-width: 1407px){.has-text-left-widescreen-only{text-align:left !important}}@media screen and (min-width: 1408px){.has-text-left-fullhd{text-align:left !important}}@media screen and (max-width: 768px){.has-text-right-mobile{text-align:right !important}}@media screen and (min-width: 769px),print{.has-text-right-tablet{text-align:right !important}}@media screen and (min-width: 769px) and (max-width: 1055px){.has-text-right-tablet-only{text-align:right !important}}@media screen and (max-width: 1055px){.has-text-right-touch{text-align:right !important}}@media screen and (min-width: 1056px){.has-text-right-desktop{text-align:right !important}}@media screen and (min-width: 1056px) and (max-width: 1215px){.has-text-right-desktop-only{text-align:right !important}}@media screen and (min-width: 1216px){.has-text-right-widescreen{text-align:right !important}}@media screen and (min-width: 1216px) and (max-width: 1407px){.has-text-right-widescreen-only{text-align:right !important}}@media screen and (min-width: 1408px){.has-text-right-fullhd{text-align:right !important}}.is-capitalized{text-transform:capitalize !important}.is-lowercase{text-transform:lowercase !important}.is-uppercase{text-transform:uppercase !important}.is-italic{font-style:italic !important}.is-underlined{text-decoration:underline !important}.has-text-weight-light{font-weight:300 !important}.has-text-weight-normal{font-weight:400 !important}.has-text-weight-medium{font-weight:500 !important}.has-text-weight-semibold{font-weight:600 !important}.has-text-weight-bold{font-weight:700 !important}.is-family-primary{font-family:"Lato Medium",-apple-system,BlinkMacSystemFont,"Segoe UI","Helvetica Neue","Helvetica","Arial",sans-serif !important}.is-family-secondary{font-family:"Lato Medium",-apple-system,BlinkMacSystemFont,"Segoe UI","Helvetica Neue","Helvetica","Arial",sans-serif !important}.is-family-sans-serif{font-family:"Lato Medium",-apple-system,BlinkMacSystemFont,"Segoe UI","Helvetica Neue","Helvetica","Arial",sans-serif !important}.is-family-monospace{font-family:"JuliaMono","SFMono-Regular","Menlo","Consolas","Liberation Mono","DejaVu Sans Mono",monospace !important}.is-family-code{font-family:"JuliaMono","SFMono-Regular","Menlo","Consolas","Liberation Mono","DejaVu Sans Mono",monospace !important}.is-block{display:block !important}@media screen and (max-width: 768px){.is-block-mobile{display:block !important}}@media screen and (min-width: 769px),print{.is-block-tablet{display:block !important}}@media screen and (min-width: 769px) and (max-width: 1055px){.is-block-tablet-only{display:block !important}}@media screen and (max-width: 1055px){.is-block-touch{display:block !important}}@media screen and (min-width: 1056px){.is-block-desktop{display:block !important}}@media screen and (min-width: 1056px) and (max-width: 1215px){.is-block-desktop-only{display:block !important}}@media screen and (min-width: 1216px){.is-block-widescreen{display:block !important}}@media screen and (min-width: 1216px) and (max-width: 1407px){.is-block-widescreen-only{display:block !important}}@media screen and (min-width: 1408px){.is-block-fullhd{display:block !important}}.is-flex{display:flex !important}@media screen and (max-width: 768px){.is-flex-mobile{display:flex !important}}@media screen and (min-width: 769px),print{.is-flex-tablet{display:flex !important}}@media screen and (min-width: 769px) and (max-width: 1055px){.is-flex-tablet-only{display:flex !important}}@media screen and (max-width: 1055px){.is-flex-touch{display:flex !important}}@media screen and (min-width: 1056px){.is-flex-desktop{display:flex !important}}@media screen and (min-width: 1056px) and (max-width: 1215px){.is-flex-desktop-only{display:flex !important}}@media screen and (min-width: 1216px){.is-flex-widescreen{display:flex !important}}@media screen and (min-width: 1216px) and (max-width: 1407px){.is-flex-widescreen-only{display:flex !important}}@media screen and (min-width: 1408px){.is-flex-fullhd{display:flex !important}}.is-inline{display:inline !important}@media screen and (max-width: 768px){.is-inline-mobile{display:inline !important}}@media screen and (min-width: 769px),print{.is-inline-tablet{display:inline !important}}@media screen and (min-width: 769px) and (max-width: 1055px){.is-inline-tablet-only{display:inline !important}}@media screen and (max-width: 1055px){.is-inline-touch{display:inline !important}}@media screen and (min-width: 1056px){.is-inline-desktop{display:inline !important}}@media screen and (min-width: 1056px) and (max-width: 1215px){.is-inline-desktop-only{display:inline !important}}@media screen and (min-width: 1216px){.is-inline-widescreen{display:inline !important}}@media screen and (min-width: 1216px) and (max-width: 1407px){.is-inline-widescreen-only{display:inline !important}}@media screen and (min-width: 1408px){.is-inline-fullhd{display:inline !important}}.is-inline-block{display:inline-block !important}@media screen and (max-width: 768px){.is-inline-block-mobile{display:inline-block !important}}@media screen and (min-width: 769px),print{.is-inline-block-tablet{display:inline-block !important}}@media screen and (min-width: 769px) and (max-width: 1055px){.is-inline-block-tablet-only{display:inline-block !important}}@media screen and (max-width: 1055px){.is-inline-block-touch{display:inline-block !important}}@media screen and (min-width: 1056px){.is-inline-block-desktop{display:inline-block !important}}@media screen and (min-width: 1056px) and (max-width: 1215px){.is-inline-block-desktop-only{display:inline-block !important}}@media screen and (min-width: 1216px){.is-inline-block-widescreen{display:inline-block !important}}@media screen and (min-width: 1216px) and (max-width: 1407px){.is-inline-block-widescreen-only{display:inline-block !important}}@media screen and (min-width: 1408px){.is-inline-block-fullhd{display:inline-block !important}}.is-inline-flex{display:inline-flex !important}@media screen and (max-width: 768px){.is-inline-flex-mobile{display:inline-flex !important}}@media screen and (min-width: 769px),print{.is-inline-flex-tablet{display:inline-flex !important}}@media screen and (min-width: 769px) and (max-width: 1055px){.is-inline-flex-tablet-only{display:inline-flex !important}}@media screen and (max-width: 1055px){.is-inline-flex-touch{display:inline-flex !important}}@media screen and (min-width: 1056px){.is-inline-flex-desktop{display:inline-flex !important}}@media screen and (min-width: 1056px) and (max-width: 1215px){.is-inline-flex-desktop-only{display:inline-flex !important}}@media screen and (min-width: 1216px){.is-inline-flex-widescreen{display:inline-flex !important}}@media screen and (min-width: 1216px) and (max-width: 1407px){.is-inline-flex-widescreen-only{display:inline-flex !important}}@media screen and (min-width: 1408px){.is-inline-flex-fullhd{display:inline-flex !important}}.is-hidden{display:none !important}.is-sr-only{border:none !important;clip:rect(0, 0, 0, 0) !important;height:0.01em !important;overflow:hidden !important;padding:0 !important;position:absolute !important;white-space:nowrap !important;width:0.01em !important}@media screen and (max-width: 768px){.is-hidden-mobile{display:none !important}}@media screen and (min-width: 769px),print{.is-hidden-tablet{display:none !important}}@media screen and (min-width: 769px) and (max-width: 1055px){.is-hidden-tablet-only{display:none !important}}@media screen and (max-width: 1055px){.is-hidden-touch{display:none !important}}@media screen and (min-width: 1056px){.is-hidden-desktop{display:none !important}}@media screen and (min-width: 1056px) and (max-width: 1215px){.is-hidden-desktop-only{display:none !important}}@media screen and (min-width: 1216px){.is-hidden-widescreen{display:none !important}}@media screen and (min-width: 1216px) and (max-width: 1407px){.is-hidden-widescreen-only{display:none !important}}@media screen and (min-width: 1408px){.is-hidden-fullhd{display:none !important}}.is-invisible{visibility:hidden !important}@media screen and (max-width: 768px){.is-invisible-mobile{visibility:hidden !important}}@media screen and (min-width: 769px),print{.is-invisible-tablet{visibility:hidden !important}}@media screen and (min-width: 769px) and (max-width: 1055px){.is-invisible-tablet-only{visibility:hidden !important}}@media screen and (max-width: 1055px){.is-invisible-touch{visibility:hidden !important}}@media screen and (min-width: 1056px){.is-invisible-desktop{visibility:hidden !important}}@media screen and (min-width: 1056px) and (max-width: 1215px){.is-invisible-desktop-only{visibility:hidden !important}}@media screen and (min-width: 1216px){.is-invisible-widescreen{visibility:hidden !important}}@media screen and (min-width: 1216px) and (max-width: 1407px){.is-invisible-widescreen-only{visibility:hidden !important}}@media screen and (min-width: 1408px){.is-invisible-fullhd{visibility:hidden !important}}/*! minireset.css v0.0.6 | MIT License | github.com/jgthms/minireset.css */html,body,p,ol,ul,li,dl,dt,dd,blockquote,figure,fieldset,legend,textarea,pre,iframe,hr,h1,h2,h3,h4,h5,h6{margin:0;padding:0}h1,h2,h3,h4,h5,h6{font-size:100%;font-weight:normal}ul{list-style:none}button,input,select,textarea{margin:0}html{box-sizing:border-box}*,*::before,*::after{box-sizing:inherit}img,video{height:auto;max-width:100%}iframe{border:0}table{border-collapse:collapse;border-spacing:0}td,th{padding:0}td:not([align]),th:not([align]){text-align:inherit}html{background-color:#fff;font-size:16px;-moz-osx-font-smoothing:grayscale;-webkit-font-smoothing:antialiased;min-width:300px;overflow-x:auto;overflow-y:scroll;text-rendering:optimizeLegibility;text-size-adjust:100%}article,aside,figure,footer,header,hgroup,section{display:block}body,button,input,optgroup,select,textarea{font-family:"Lato Medium",-apple-system,BlinkMacSystemFont,"Segoe UI","Helvetica Neue","Helvetica","Arial",sans-serif}code,pre{-moz-osx-font-smoothing:auto;-webkit-font-smoothing:auto;font-family:"JuliaMono","SFMono-Regular","Menlo","Consolas","Liberation Mono","DejaVu Sans Mono",monospace}body{color:#222;font-size:1em;font-weight:400;line-height:1.5}a{color:#2e63b8;cursor:pointer;text-decoration:none}a strong{color:currentColor}a:hover{color:#363636}code{background-color:rgba(0,0,0,0.05);color:#000;font-size:.875em;font-weight:normal;padding:.1em}hr{background-color:#f5f5f5;border:none;display:block;height:2px;margin:1.5rem 0}img{height:auto;max-width:100%}input[type="checkbox"],input[type="radio"]{vertical-align:baseline}small{font-size:.875em}span{font-style:inherit;font-weight:inherit}strong{color:#222;font-weight:700}fieldset{border:none}pre{-webkit-overflow-scrolling:touch;background-color:#f5f5f5;color:#222;font-size:.875em;overflow-x:auto;padding:1.25rem 1.5rem;white-space:pre;word-wrap:normal}pre code{background-color:transparent;color:currentColor;font-size:1em;padding:0}table td,table th{vertical-align:top}table td:not([align]),table th:not([align]){text-align:inherit}table th{color:#222}@keyframes spinAround{from{transform:rotate(0deg)}to{transform:rotate(359deg)}}.box{background-color:#fff;border-radius:6px;box-shadow:#bbb;color:#222;display:block;padding:1.25rem}a.box:hover,a.box:focus{box-shadow:0 0.5em 1em -0.125em rgba(10,10,10,0.1),0 0 0 1px #2e63b8}a.box:active{box-shadow:inset 0 1px 2px rgba(10,10,10,0.2),0 0 0 1px #2e63b8}.button{background-color:#fff;border-color:#dbdbdb;border-width:1px;color:#222;cursor:pointer;justify-content:center;padding-bottom:calc(0.5em - 1px);padding-left:1em;padding-right:1em;padding-top:calc(0.5em - 1px);text-align:center;white-space:nowrap}.button strong{color:inherit}.button .icon,.button .icon.is-small,.button #documenter .docs-sidebar form.docs-search>input.icon,#documenter .docs-sidebar .button form.docs-search>input.icon,.button .icon.is-medium,.button .icon.is-large{height:1.5em;width:1.5em}.button .icon:first-child:not(:last-child){margin-left:calc(-0.5em - 1px);margin-right:.25em}.button .icon:last-child:not(:first-child){margin-left:.25em;margin-right:calc(-0.5em - 1px)}.button .icon:first-child:last-child{margin-left:calc(-0.5em - 1px);margin-right:calc(-0.5em - 1px)}.button:hover,.button.is-hovered{border-color:#b5b5b5;color:#363636}.button:focus,.button.is-focused{border-color:#3c5dcd;color:#363636}.button:focus:not(:active),.button.is-focused:not(:active){box-shadow:0 0 0 0.125em rgba(46,99,184,0.25)}.button:active,.button.is-active{border-color:#4a4a4a;color:#363636}.button.is-text{background-color:transparent;border-color:transparent;color:#222;text-decoration:underline}.button.is-text:hover,.button.is-text.is-hovered,.button.is-text:focus,.button.is-text.is-focused{background-color:#f5f5f5;color:#222}.button.is-text:active,.button.is-text.is-active{background-color:#e8e8e8;color:#222}.button.is-text[disabled],fieldset[disabled] .button.is-text{background-color:transparent;border-color:transparent;box-shadow:none}.button.is-ghost{background:none;border-color:rgba(0,0,0,0);color:#2e63b8;text-decoration:none}.button.is-ghost:hover,.button.is-ghost.is-hovered{color:#2e63b8;text-decoration:underline}.button.is-white{background-color:#fff;border-color:transparent;color:#0a0a0a}.button.is-white:hover,.button.is-white.is-hovered{background-color:#f9f9f9;border-color:transparent;color:#0a0a0a}.button.is-white:focus,.button.is-white.is-focused{border-color:transparent;color:#0a0a0a}.button.is-white:focus:not(:active),.button.is-white.is-focused:not(:active){box-shadow:0 0 0 0.125em rgba(255,255,255,0.25)}.button.is-white:active,.button.is-white.is-active{background-color:#f2f2f2;border-color:transparent;color:#0a0a0a}.button.is-white[disabled],fieldset[disabled] .button.is-white{background-color:#fff;border-color:#fff;box-shadow:none}.button.is-white.is-inverted{background-color:#0a0a0a;color:#fff}.button.is-white.is-inverted:hover,.button.is-white.is-inverted.is-hovered{background-color:#000}.button.is-white.is-inverted[disabled],fieldset[disabled] .button.is-white.is-inverted{background-color:#0a0a0a;border-color:transparent;box-shadow:none;color:#fff}.button.is-white.is-loading::after{border-color:transparent transparent #0a0a0a #0a0a0a !important}.button.is-white.is-outlined{background-color:transparent;border-color:#fff;color:#fff}.button.is-white.is-outlined:hover,.button.is-white.is-outlined.is-hovered,.button.is-white.is-outlined:focus,.button.is-white.is-outlined.is-focused{background-color:#fff;border-color:#fff;color:#0a0a0a}.button.is-white.is-outlined.is-loading::after{border-color:transparent transparent #fff #fff !important}.button.is-white.is-outlined.is-loading:hover::after,.button.is-white.is-outlined.is-loading.is-hovered::after,.button.is-white.is-outlined.is-loading:focus::after,.button.is-white.is-outlined.is-loading.is-focused::after{border-color:transparent transparent #0a0a0a #0a0a0a !important}.button.is-white.is-outlined[disabled],fieldset[disabled] .button.is-white.is-outlined{background-color:transparent;border-color:#fff;box-shadow:none;color:#fff}.button.is-white.is-inverted.is-outlined{background-color:transparent;border-color:#0a0a0a;color:#0a0a0a}.button.is-white.is-inverted.is-outlined:hover,.button.is-white.is-inverted.is-outlined.is-hovered,.button.is-white.is-inverted.is-outlined:focus,.button.is-white.is-inverted.is-outlined.is-focused{background-color:#0a0a0a;color:#fff}.button.is-white.is-inverted.is-outlined.is-loading:hover::after,.button.is-white.is-inverted.is-outlined.is-loading.is-hovered::after,.button.is-white.is-inverted.is-outlined.is-loading:focus::after,.button.is-white.is-inverted.is-outlined.is-loading.is-focused::after{border-color:transparent transparent #fff #fff !important}.button.is-white.is-inverted.is-outlined[disabled],fieldset[disabled] .button.is-white.is-inverted.is-outlined{background-color:transparent;border-color:#0a0a0a;box-shadow:none;color:#0a0a0a}.button.is-black{background-color:#0a0a0a;border-color:transparent;color:#fff}.button.is-black:hover,.button.is-black.is-hovered{background-color:#040404;border-color:transparent;color:#fff}.button.is-black:focus,.button.is-black.is-focused{border-color:transparent;color:#fff}.button.is-black:focus:not(:active),.button.is-black.is-focused:not(:active){box-shadow:0 0 0 0.125em rgba(10,10,10,0.25)}.button.is-black:active,.button.is-black.is-active{background-color:#000;border-color:transparent;color:#fff}.button.is-black[disabled],fieldset[disabled] .button.is-black{background-color:#0a0a0a;border-color:#0a0a0a;box-shadow:none}.button.is-black.is-inverted{background-color:#fff;color:#0a0a0a}.button.is-black.is-inverted:hover,.button.is-black.is-inverted.is-hovered{background-color:#f2f2f2}.button.is-black.is-inverted[disabled],fieldset[disabled] .button.is-black.is-inverted{background-color:#fff;border-color:transparent;box-shadow:none;color:#0a0a0a}.button.is-black.is-loading::after{border-color:transparent transparent #fff #fff !important}.button.is-black.is-outlined{background-color:transparent;border-color:#0a0a0a;color:#0a0a0a}.button.is-black.is-outlined:hover,.button.is-black.is-outlined.is-hovered,.button.is-black.is-outlined:focus,.button.is-black.is-outlined.is-focused{background-color:#0a0a0a;border-color:#0a0a0a;color:#fff}.button.is-black.is-outlined.is-loading::after{border-color:transparent transparent #0a0a0a #0a0a0a !important}.button.is-black.is-outlined.is-loading:hover::after,.button.is-black.is-outlined.is-loading.is-hovered::after,.button.is-black.is-outlined.is-loading:focus::after,.button.is-black.is-outlined.is-loading.is-focused::after{border-color:transparent transparent #fff #fff !important}.button.is-black.is-outlined[disabled],fieldset[disabled] .button.is-black.is-outlined{background-color:transparent;border-color:#0a0a0a;box-shadow:none;color:#0a0a0a}.button.is-black.is-inverted.is-outlined{background-color:transparent;border-color:#fff;color:#fff}.button.is-black.is-inverted.is-outlined:hover,.button.is-black.is-inverted.is-outlined.is-hovered,.button.is-black.is-inverted.is-outlined:focus,.button.is-black.is-inverted.is-outlined.is-focused{background-color:#fff;color:#0a0a0a}.button.is-black.is-inverted.is-outlined.is-loading:hover::after,.button.is-black.is-inverted.is-outlined.is-loading.is-hovered::after,.button.is-black.is-inverted.is-outlined.is-loading:focus::after,.button.is-black.is-inverted.is-outlined.is-loading.is-focused::after{border-color:transparent transparent #0a0a0a #0a0a0a !important}.button.is-black.is-inverted.is-outlined[disabled],fieldset[disabled] .button.is-black.is-inverted.is-outlined{background-color:transparent;border-color:#fff;box-shadow:none;color:#fff}.button.is-light{background-color:#f5f5f5;border-color:transparent;color:rgba(0,0,0,0.7)}.button.is-light:hover,.button.is-light.is-hovered{background-color:#eee;border-color:transparent;color:rgba(0,0,0,0.7)}.button.is-light:focus,.button.is-light.is-focused{border-color:transparent;color:rgba(0,0,0,0.7)}.button.is-light:focus:not(:active),.button.is-light.is-focused:not(:active){box-shadow:0 0 0 0.125em rgba(245,245,245,0.25)}.button.is-light:active,.button.is-light.is-active{background-color:#e8e8e8;border-color:transparent;color:rgba(0,0,0,0.7)}.button.is-light[disabled],fieldset[disabled] .button.is-light{background-color:#f5f5f5;border-color:#f5f5f5;box-shadow:none}.button.is-light.is-inverted{background-color:rgba(0,0,0,0.7);color:#f5f5f5}.button.is-light.is-inverted:hover,.button.is-light.is-inverted.is-hovered{background-color:rgba(0,0,0,0.7)}.button.is-light.is-inverted[disabled],fieldset[disabled] .button.is-light.is-inverted{background-color:rgba(0,0,0,0.7);border-color:transparent;box-shadow:none;color:#f5f5f5}.button.is-light.is-loading::after{border-color:transparent transparent rgba(0,0,0,0.7) rgba(0,0,0,0.7) !important}.button.is-light.is-outlined{background-color:transparent;border-color:#f5f5f5;color:#f5f5f5}.button.is-light.is-outlined:hover,.button.is-light.is-outlined.is-hovered,.button.is-light.is-outlined:focus,.button.is-light.is-outlined.is-focused{background-color:#f5f5f5;border-color:#f5f5f5;color:rgba(0,0,0,0.7)}.button.is-light.is-outlined.is-loading::after{border-color:transparent transparent #f5f5f5 #f5f5f5 !important}.button.is-light.is-outlined.is-loading:hover::after,.button.is-light.is-outlined.is-loading.is-hovered::after,.button.is-light.is-outlined.is-loading:focus::after,.button.is-light.is-outlined.is-loading.is-focused::after{border-color:transparent transparent rgba(0,0,0,0.7) rgba(0,0,0,0.7) !important}.button.is-light.is-outlined[disabled],fieldset[disabled] .button.is-light.is-outlined{background-color:transparent;border-color:#f5f5f5;box-shadow:none;color:#f5f5f5}.button.is-light.is-inverted.is-outlined{background-color:transparent;border-color:rgba(0,0,0,0.7);color:rgba(0,0,0,0.7)}.button.is-light.is-inverted.is-outlined:hover,.button.is-light.is-inverted.is-outlined.is-hovered,.button.is-light.is-inverted.is-outlined:focus,.button.is-light.is-inverted.is-outlined.is-focused{background-color:rgba(0,0,0,0.7);color:#f5f5f5}.button.is-light.is-inverted.is-outlined.is-loading:hover::after,.button.is-light.is-inverted.is-outlined.is-loading.is-hovered::after,.button.is-light.is-inverted.is-outlined.is-loading:focus::after,.button.is-light.is-inverted.is-outlined.is-loading.is-focused::after{border-color:transparent transparent #f5f5f5 #f5f5f5 !important}.button.is-light.is-inverted.is-outlined[disabled],fieldset[disabled] .button.is-light.is-inverted.is-outlined{background-color:transparent;border-color:rgba(0,0,0,0.7);box-shadow:none;color:rgba(0,0,0,0.7)}.button.is-dark,.content kbd.button{background-color:#363636;border-color:transparent;color:#fff}.button.is-dark:hover,.content kbd.button:hover,.button.is-dark.is-hovered,.content kbd.button.is-hovered{background-color:#2f2f2f;border-color:transparent;color:#fff}.button.is-dark:focus,.content kbd.button:focus,.button.is-dark.is-focused,.content kbd.button.is-focused{border-color:transparent;color:#fff}.button.is-dark:focus:not(:active),.content kbd.button:focus:not(:active),.button.is-dark.is-focused:not(:active),.content kbd.button.is-focused:not(:active){box-shadow:0 0 0 0.125em rgba(54,54,54,0.25)}.button.is-dark:active,.content kbd.button:active,.button.is-dark.is-active,.content kbd.button.is-active{background-color:#292929;border-color:transparent;color:#fff}.button.is-dark[disabled],.content kbd.button[disabled],fieldset[disabled] .button.is-dark,fieldset[disabled] .content kbd.button,.content fieldset[disabled] kbd.button{background-color:#363636;border-color:#363636;box-shadow:none}.button.is-dark.is-inverted,.content kbd.button.is-inverted{background-color:#fff;color:#363636}.button.is-dark.is-inverted:hover,.content kbd.button.is-inverted:hover,.button.is-dark.is-inverted.is-hovered,.content kbd.button.is-inverted.is-hovered{background-color:#f2f2f2}.button.is-dark.is-inverted[disabled],.content kbd.button.is-inverted[disabled],fieldset[disabled] .button.is-dark.is-inverted,fieldset[disabled] .content kbd.button.is-inverted,.content fieldset[disabled] kbd.button.is-inverted{background-color:#fff;border-color:transparent;box-shadow:none;color:#363636}.button.is-dark.is-loading::after,.content kbd.button.is-loading::after{border-color:transparent transparent #fff #fff !important}.button.is-dark.is-outlined,.content kbd.button.is-outlined{background-color:transparent;border-color:#363636;color:#363636}.button.is-dark.is-outlined:hover,.content kbd.button.is-outlined:hover,.button.is-dark.is-outlined.is-hovered,.content kbd.button.is-outlined.is-hovered,.button.is-dark.is-outlined:focus,.content kbd.button.is-outlined:focus,.button.is-dark.is-outlined.is-focused,.content kbd.button.is-outlined.is-focused{background-color:#363636;border-color:#363636;color:#fff}.button.is-dark.is-outlined.is-loading::after,.content kbd.button.is-outlined.is-loading::after{border-color:transparent transparent #363636 #363636 !important}.button.is-dark.is-outlined.is-loading:hover::after,.content kbd.button.is-outlined.is-loading:hover::after,.button.is-dark.is-outlined.is-loading.is-hovered::after,.content kbd.button.is-outlined.is-loading.is-hovered::after,.button.is-dark.is-outlined.is-loading:focus::after,.content kbd.button.is-outlined.is-loading:focus::after,.button.is-dark.is-outlined.is-loading.is-focused::after,.content kbd.button.is-outlined.is-loading.is-focused::after{border-color:transparent transparent #fff #fff !important}.button.is-dark.is-outlined[disabled],.content kbd.button.is-outlined[disabled],fieldset[disabled] .button.is-dark.is-outlined,fieldset[disabled] .content kbd.button.is-outlined,.content fieldset[disabled] kbd.button.is-outlined{background-color:transparent;border-color:#363636;box-shadow:none;color:#363636}.button.is-dark.is-inverted.is-outlined,.content kbd.button.is-inverted.is-outlined{background-color:transparent;border-color:#fff;color:#fff}.button.is-dark.is-inverted.is-outlined:hover,.content kbd.button.is-inverted.is-outlined:hover,.button.is-dark.is-inverted.is-outlined.is-hovered,.content kbd.button.is-inverted.is-outlined.is-hovered,.button.is-dark.is-inverted.is-outlined:focus,.content kbd.button.is-inverted.is-outlined:focus,.button.is-dark.is-inverted.is-outlined.is-focused,.content kbd.button.is-inverted.is-outlined.is-focused{background-color:#fff;color:#363636}.button.is-dark.is-inverted.is-outlined.is-loading:hover::after,.content kbd.button.is-inverted.is-outlined.is-loading:hover::after,.button.is-dark.is-inverted.is-outlined.is-loading.is-hovered::after,.content kbd.button.is-inverted.is-outlined.is-loading.is-hovered::after,.button.is-dark.is-inverted.is-outlined.is-loading:focus::after,.content kbd.button.is-inverted.is-outlined.is-loading:focus::after,.button.is-dark.is-inverted.is-outlined.is-loading.is-focused::after,.content kbd.button.is-inverted.is-outlined.is-loading.is-focused::after{border-color:transparent transparent #363636 #363636 !important}.button.is-dark.is-inverted.is-outlined[disabled],.content kbd.button.is-inverted.is-outlined[disabled],fieldset[disabled] .button.is-dark.is-inverted.is-outlined,fieldset[disabled] .content kbd.button.is-inverted.is-outlined,.content fieldset[disabled] kbd.button.is-inverted.is-outlined{background-color:transparent;border-color:#fff;box-shadow:none;color:#fff}.button.is-primary,.docstring>section>a.button.docs-sourcelink{background-color:#4eb5de;border-color:transparent;color:#fff}.button.is-primary:hover,.docstring>section>a.button.docs-sourcelink:hover,.button.is-primary.is-hovered,.docstring>section>a.button.is-hovered.docs-sourcelink{background-color:#43b1dc;border-color:transparent;color:#fff}.button.is-primary:focus,.docstring>section>a.button.docs-sourcelink:focus,.button.is-primary.is-focused,.docstring>section>a.button.is-focused.docs-sourcelink{border-color:transparent;color:#fff}.button.is-primary:focus:not(:active),.docstring>section>a.button.docs-sourcelink:focus:not(:active),.button.is-primary.is-focused:not(:active),.docstring>section>a.button.is-focused.docs-sourcelink:not(:active){box-shadow:0 0 0 0.125em rgba(78,181,222,0.25)}.button.is-primary:active,.docstring>section>a.button.docs-sourcelink:active,.button.is-primary.is-active,.docstring>section>a.button.is-active.docs-sourcelink{background-color:#39acda;border-color:transparent;color:#fff}.button.is-primary[disabled],.docstring>section>a.button.docs-sourcelink[disabled],fieldset[disabled] .button.is-primary,fieldset[disabled] .docstring>section>a.button.docs-sourcelink{background-color:#4eb5de;border-color:#4eb5de;box-shadow:none}.button.is-primary.is-inverted,.docstring>section>a.button.is-inverted.docs-sourcelink{background-color:#fff;color:#4eb5de}.button.is-primary.is-inverted:hover,.docstring>section>a.button.is-inverted.docs-sourcelink:hover,.button.is-primary.is-inverted.is-hovered,.docstring>section>a.button.is-inverted.is-hovered.docs-sourcelink{background-color:#f2f2f2}.button.is-primary.is-inverted[disabled],.docstring>section>a.button.is-inverted.docs-sourcelink[disabled],fieldset[disabled] .button.is-primary.is-inverted,fieldset[disabled] .docstring>section>a.button.is-inverted.docs-sourcelink{background-color:#fff;border-color:transparent;box-shadow:none;color:#4eb5de}.button.is-primary.is-loading::after,.docstring>section>a.button.is-loading.docs-sourcelink::after{border-color:transparent transparent #fff #fff !important}.button.is-primary.is-outlined,.docstring>section>a.button.is-outlined.docs-sourcelink{background-color:transparent;border-color:#4eb5de;color:#4eb5de}.button.is-primary.is-outlined:hover,.docstring>section>a.button.is-outlined.docs-sourcelink:hover,.button.is-primary.is-outlined.is-hovered,.docstring>section>a.button.is-outlined.is-hovered.docs-sourcelink,.button.is-primary.is-outlined:focus,.docstring>section>a.button.is-outlined.docs-sourcelink:focus,.button.is-primary.is-outlined.is-focused,.docstring>section>a.button.is-outlined.is-focused.docs-sourcelink{background-color:#4eb5de;border-color:#4eb5de;color:#fff}.button.is-primary.is-outlined.is-loading::after,.docstring>section>a.button.is-outlined.is-loading.docs-sourcelink::after{border-color:transparent transparent #4eb5de #4eb5de !important}.button.is-primary.is-outlined.is-loading:hover::after,.docstring>section>a.button.is-outlined.is-loading.docs-sourcelink:hover::after,.button.is-primary.is-outlined.is-loading.is-hovered::after,.docstring>section>a.button.is-outlined.is-loading.is-hovered.docs-sourcelink::after,.button.is-primary.is-outlined.is-loading:focus::after,.docstring>section>a.button.is-outlined.is-loading.docs-sourcelink:focus::after,.button.is-primary.is-outlined.is-loading.is-focused::after,.docstring>section>a.button.is-outlined.is-loading.is-focused.docs-sourcelink::after{border-color:transparent transparent #fff #fff !important}.button.is-primary.is-outlined[disabled],.docstring>section>a.button.is-outlined.docs-sourcelink[disabled],fieldset[disabled] .button.is-primary.is-outlined,fieldset[disabled] .docstring>section>a.button.is-outlined.docs-sourcelink{background-color:transparent;border-color:#4eb5de;box-shadow:none;color:#4eb5de}.button.is-primary.is-inverted.is-outlined,.docstring>section>a.button.is-inverted.is-outlined.docs-sourcelink{background-color:transparent;border-color:#fff;color:#fff}.button.is-primary.is-inverted.is-outlined:hover,.docstring>section>a.button.is-inverted.is-outlined.docs-sourcelink:hover,.button.is-primary.is-inverted.is-outlined.is-hovered,.docstring>section>a.button.is-inverted.is-outlined.is-hovered.docs-sourcelink,.button.is-primary.is-inverted.is-outlined:focus,.docstring>section>a.button.is-inverted.is-outlined.docs-sourcelink:focus,.button.is-primary.is-inverted.is-outlined.is-focused,.docstring>section>a.button.is-inverted.is-outlined.is-focused.docs-sourcelink{background-color:#fff;color:#4eb5de}.button.is-primary.is-inverted.is-outlined.is-loading:hover::after,.docstring>section>a.button.is-inverted.is-outlined.is-loading.docs-sourcelink:hover::after,.button.is-primary.is-inverted.is-outlined.is-loading.is-hovered::after,.docstring>section>a.button.is-inverted.is-outlined.is-loading.is-hovered.docs-sourcelink::after,.button.is-primary.is-inverted.is-outlined.is-loading:focus::after,.docstring>section>a.button.is-inverted.is-outlined.is-loading.docs-sourcelink:focus::after,.button.is-primary.is-inverted.is-outlined.is-loading.is-focused::after,.docstring>section>a.button.is-inverted.is-outlined.is-loading.is-focused.docs-sourcelink::after{border-color:transparent transparent #4eb5de #4eb5de !important}.button.is-primary.is-inverted.is-outlined[disabled],.docstring>section>a.button.is-inverted.is-outlined.docs-sourcelink[disabled],fieldset[disabled] .button.is-primary.is-inverted.is-outlined,fieldset[disabled] .docstring>section>a.button.is-inverted.is-outlined.docs-sourcelink{background-color:transparent;border-color:#fff;box-shadow:none;color:#fff}.button.is-primary.is-light,.docstring>section>a.button.is-light.docs-sourcelink{background-color:#eef8fc;color:#1a6d8e}.button.is-primary.is-light:hover,.docstring>section>a.button.is-light.docs-sourcelink:hover,.button.is-primary.is-light.is-hovered,.docstring>section>a.button.is-light.is-hovered.docs-sourcelink{background-color:#e3f3fa;border-color:transparent;color:#1a6d8e}.button.is-primary.is-light:active,.docstring>section>a.button.is-light.docs-sourcelink:active,.button.is-primary.is-light.is-active,.docstring>section>a.button.is-light.is-active.docs-sourcelink{background-color:#d8eff8;border-color:transparent;color:#1a6d8e}.button.is-link{background-color:#2e63b8;border-color:transparent;color:#fff}.button.is-link:hover,.button.is-link.is-hovered{background-color:#2b5eae;border-color:transparent;color:#fff}.button.is-link:focus,.button.is-link.is-focused{border-color:transparent;color:#fff}.button.is-link:focus:not(:active),.button.is-link.is-focused:not(:active){box-shadow:0 0 0 0.125em rgba(46,99,184,0.25)}.button.is-link:active,.button.is-link.is-active{background-color:#2958a4;border-color:transparent;color:#fff}.button.is-link[disabled],fieldset[disabled] .button.is-link{background-color:#2e63b8;border-color:#2e63b8;box-shadow:none}.button.is-link.is-inverted{background-color:#fff;color:#2e63b8}.button.is-link.is-inverted:hover,.button.is-link.is-inverted.is-hovered{background-color:#f2f2f2}.button.is-link.is-inverted[disabled],fieldset[disabled] .button.is-link.is-inverted{background-color:#fff;border-color:transparent;box-shadow:none;color:#2e63b8}.button.is-link.is-loading::after{border-color:transparent transparent #fff #fff !important}.button.is-link.is-outlined{background-color:transparent;border-color:#2e63b8;color:#2e63b8}.button.is-link.is-outlined:hover,.button.is-link.is-outlined.is-hovered,.button.is-link.is-outlined:focus,.button.is-link.is-outlined.is-focused{background-color:#2e63b8;border-color:#2e63b8;color:#fff}.button.is-link.is-outlined.is-loading::after{border-color:transparent transparent #2e63b8 #2e63b8 !important}.button.is-link.is-outlined.is-loading:hover::after,.button.is-link.is-outlined.is-loading.is-hovered::after,.button.is-link.is-outlined.is-loading:focus::after,.button.is-link.is-outlined.is-loading.is-focused::after{border-color:transparent transparent #fff #fff !important}.button.is-link.is-outlined[disabled],fieldset[disabled] .button.is-link.is-outlined{background-color:transparent;border-color:#2e63b8;box-shadow:none;color:#2e63b8}.button.is-link.is-inverted.is-outlined{background-color:transparent;border-color:#fff;color:#fff}.button.is-link.is-inverted.is-outlined:hover,.button.is-link.is-inverted.is-outlined.is-hovered,.button.is-link.is-inverted.is-outlined:focus,.button.is-link.is-inverted.is-outlined.is-focused{background-color:#fff;color:#2e63b8}.button.is-link.is-inverted.is-outlined.is-loading:hover::after,.button.is-link.is-inverted.is-outlined.is-loading.is-hovered::after,.button.is-link.is-inverted.is-outlined.is-loading:focus::after,.button.is-link.is-inverted.is-outlined.is-loading.is-focused::after{border-color:transparent transparent #2e63b8 #2e63b8 !important}.button.is-link.is-inverted.is-outlined[disabled],fieldset[disabled] .button.is-link.is-inverted.is-outlined{background-color:transparent;border-color:#fff;box-shadow:none;color:#fff}.button.is-link.is-light{background-color:#eff3fb;color:#3169c4}.button.is-link.is-light:hover,.button.is-link.is-light.is-hovered{background-color:#e4ecf8;border-color:transparent;color:#3169c4}.button.is-link.is-light:active,.button.is-link.is-light.is-active{background-color:#dae5f6;border-color:transparent;color:#3169c4}.button.is-info{background-color:#209cee;border-color:transparent;color:#fff}.button.is-info:hover,.button.is-info.is-hovered{background-color:#1497ed;border-color:transparent;color:#fff}.button.is-info:focus,.button.is-info.is-focused{border-color:transparent;color:#fff}.button.is-info:focus:not(:active),.button.is-info.is-focused:not(:active){box-shadow:0 0 0 0.125em rgba(32,156,238,0.25)}.button.is-info:active,.button.is-info.is-active{background-color:#1190e3;border-color:transparent;color:#fff}.button.is-info[disabled],fieldset[disabled] .button.is-info{background-color:#209cee;border-color:#209cee;box-shadow:none}.button.is-info.is-inverted{background-color:#fff;color:#209cee}.button.is-info.is-inverted:hover,.button.is-info.is-inverted.is-hovered{background-color:#f2f2f2}.button.is-info.is-inverted[disabled],fieldset[disabled] .button.is-info.is-inverted{background-color:#fff;border-color:transparent;box-shadow:none;color:#209cee}.button.is-info.is-loading::after{border-color:transparent transparent #fff #fff !important}.button.is-info.is-outlined{background-color:transparent;border-color:#209cee;color:#209cee}.button.is-info.is-outlined:hover,.button.is-info.is-outlined.is-hovered,.button.is-info.is-outlined:focus,.button.is-info.is-outlined.is-focused{background-color:#209cee;border-color:#209cee;color:#fff}.button.is-info.is-outlined.is-loading::after{border-color:transparent transparent #209cee #209cee !important}.button.is-info.is-outlined.is-loading:hover::after,.button.is-info.is-outlined.is-loading.is-hovered::after,.button.is-info.is-outlined.is-loading:focus::after,.button.is-info.is-outlined.is-loading.is-focused::after{border-color:transparent transparent #fff #fff !important}.button.is-info.is-outlined[disabled],fieldset[disabled] .button.is-info.is-outlined{background-color:transparent;border-color:#209cee;box-shadow:none;color:#209cee}.button.is-info.is-inverted.is-outlined{background-color:transparent;border-color:#fff;color:#fff}.button.is-info.is-inverted.is-outlined:hover,.button.is-info.is-inverted.is-outlined.is-hovered,.button.is-info.is-inverted.is-outlined:focus,.button.is-info.is-inverted.is-outlined.is-focused{background-color:#fff;color:#209cee}.button.is-info.is-inverted.is-outlined.is-loading:hover::after,.button.is-info.is-inverted.is-outlined.is-loading.is-hovered::after,.button.is-info.is-inverted.is-outlined.is-loading:focus::after,.button.is-info.is-inverted.is-outlined.is-loading.is-focused::after{border-color:transparent transparent #209cee #209cee !important}.button.is-info.is-inverted.is-outlined[disabled],fieldset[disabled] .button.is-info.is-inverted.is-outlined{background-color:transparent;border-color:#fff;box-shadow:none;color:#fff}.button.is-info.is-light{background-color:#ecf7fe;color:#0e72b4}.button.is-info.is-light:hover,.button.is-info.is-light.is-hovered{background-color:#e0f1fd;border-color:transparent;color:#0e72b4}.button.is-info.is-light:active,.button.is-info.is-light.is-active{background-color:#d4ecfc;border-color:transparent;color:#0e72b4}.button.is-success{background-color:#22c35b;border-color:transparent;color:#fff}.button.is-success:hover,.button.is-success.is-hovered{background-color:#20b856;border-color:transparent;color:#fff}.button.is-success:focus,.button.is-success.is-focused{border-color:transparent;color:#fff}.button.is-success:focus:not(:active),.button.is-success.is-focused:not(:active){box-shadow:0 0 0 0.125em rgba(34,195,91,0.25)}.button.is-success:active,.button.is-success.is-active{background-color:#1ead51;border-color:transparent;color:#fff}.button.is-success[disabled],fieldset[disabled] .button.is-success{background-color:#22c35b;border-color:#22c35b;box-shadow:none}.button.is-success.is-inverted{background-color:#fff;color:#22c35b}.button.is-success.is-inverted:hover,.button.is-success.is-inverted.is-hovered{background-color:#f2f2f2}.button.is-success.is-inverted[disabled],fieldset[disabled] .button.is-success.is-inverted{background-color:#fff;border-color:transparent;box-shadow:none;color:#22c35b}.button.is-success.is-loading::after{border-color:transparent transparent #fff #fff !important}.button.is-success.is-outlined{background-color:transparent;border-color:#22c35b;color:#22c35b}.button.is-success.is-outlined:hover,.button.is-success.is-outlined.is-hovered,.button.is-success.is-outlined:focus,.button.is-success.is-outlined.is-focused{background-color:#22c35b;border-color:#22c35b;color:#fff}.button.is-success.is-outlined.is-loading::after{border-color:transparent transparent #22c35b #22c35b !important}.button.is-success.is-outlined.is-loading:hover::after,.button.is-success.is-outlined.is-loading.is-hovered::after,.button.is-success.is-outlined.is-loading:focus::after,.button.is-success.is-outlined.is-loading.is-focused::after{border-color:transparent transparent #fff #fff !important}.button.is-success.is-outlined[disabled],fieldset[disabled] .button.is-success.is-outlined{background-color:transparent;border-color:#22c35b;box-shadow:none;color:#22c35b}.button.is-success.is-inverted.is-outlined{background-color:transparent;border-color:#fff;color:#fff}.button.is-success.is-inverted.is-outlined:hover,.button.is-success.is-inverted.is-outlined.is-hovered,.button.is-success.is-inverted.is-outlined:focus,.button.is-success.is-inverted.is-outlined.is-focused{background-color:#fff;color:#22c35b}.button.is-success.is-inverted.is-outlined.is-loading:hover::after,.button.is-success.is-inverted.is-outlined.is-loading.is-hovered::after,.button.is-success.is-inverted.is-outlined.is-loading:focus::after,.button.is-success.is-inverted.is-outlined.is-loading.is-focused::after{border-color:transparent transparent #22c35b #22c35b !important}.button.is-success.is-inverted.is-outlined[disabled],fieldset[disabled] .button.is-success.is-inverted.is-outlined{background-color:transparent;border-color:#fff;box-shadow:none;color:#fff}.button.is-success.is-light{background-color:#eefcf3;color:#198f43}.button.is-success.is-light:hover,.button.is-success.is-light.is-hovered{background-color:#e3faeb;border-color:transparent;color:#198f43}.button.is-success.is-light:active,.button.is-success.is-light.is-active{background-color:#d8f8e3;border-color:transparent;color:#198f43}.button.is-warning{background-color:#ffdd57;border-color:transparent;color:rgba(0,0,0,0.7)}.button.is-warning:hover,.button.is-warning.is-hovered{background-color:#ffda4a;border-color:transparent;color:rgba(0,0,0,0.7)}.button.is-warning:focus,.button.is-warning.is-focused{border-color:transparent;color:rgba(0,0,0,0.7)}.button.is-warning:focus:not(:active),.button.is-warning.is-focused:not(:active){box-shadow:0 0 0 0.125em rgba(255,221,87,0.25)}.button.is-warning:active,.button.is-warning.is-active{background-color:#ffd83e;border-color:transparent;color:rgba(0,0,0,0.7)}.button.is-warning[disabled],fieldset[disabled] .button.is-warning{background-color:#ffdd57;border-color:#ffdd57;box-shadow:none}.button.is-warning.is-inverted{background-color:rgba(0,0,0,0.7);color:#ffdd57}.button.is-warning.is-inverted:hover,.button.is-warning.is-inverted.is-hovered{background-color:rgba(0,0,0,0.7)}.button.is-warning.is-inverted[disabled],fieldset[disabled] .button.is-warning.is-inverted{background-color:rgba(0,0,0,0.7);border-color:transparent;box-shadow:none;color:#ffdd57}.button.is-warning.is-loading::after{border-color:transparent transparent rgba(0,0,0,0.7) rgba(0,0,0,0.7) !important}.button.is-warning.is-outlined{background-color:transparent;border-color:#ffdd57;color:#ffdd57}.button.is-warning.is-outlined:hover,.button.is-warning.is-outlined.is-hovered,.button.is-warning.is-outlined:focus,.button.is-warning.is-outlined.is-focused{background-color:#ffdd57;border-color:#ffdd57;color:rgba(0,0,0,0.7)}.button.is-warning.is-outlined.is-loading::after{border-color:transparent transparent #ffdd57 #ffdd57 !important}.button.is-warning.is-outlined.is-loading:hover::after,.button.is-warning.is-outlined.is-loading.is-hovered::after,.button.is-warning.is-outlined.is-loading:focus::after,.button.is-warning.is-outlined.is-loading.is-focused::after{border-color:transparent transparent rgba(0,0,0,0.7) rgba(0,0,0,0.7) !important}.button.is-warning.is-outlined[disabled],fieldset[disabled] .button.is-warning.is-outlined{background-color:transparent;border-color:#ffdd57;box-shadow:none;color:#ffdd57}.button.is-warning.is-inverted.is-outlined{background-color:transparent;border-color:rgba(0,0,0,0.7);color:rgba(0,0,0,0.7)}.button.is-warning.is-inverted.is-outlined:hover,.button.is-warning.is-inverted.is-outlined.is-hovered,.button.is-warning.is-inverted.is-outlined:focus,.button.is-warning.is-inverted.is-outlined.is-focused{background-color:rgba(0,0,0,0.7);color:#ffdd57}.button.is-warning.is-inverted.is-outlined.is-loading:hover::after,.button.is-warning.is-inverted.is-outlined.is-loading.is-hovered::after,.button.is-warning.is-inverted.is-outlined.is-loading:focus::after,.button.is-warning.is-inverted.is-outlined.is-loading.is-focused::after{border-color:transparent transparent #ffdd57 #ffdd57 !important}.button.is-warning.is-inverted.is-outlined[disabled],fieldset[disabled] .button.is-warning.is-inverted.is-outlined{background-color:transparent;border-color:rgba(0,0,0,0.7);box-shadow:none;color:rgba(0,0,0,0.7)}.button.is-warning.is-light{background-color:#fffbeb;color:#947600}.button.is-warning.is-light:hover,.button.is-warning.is-light.is-hovered{background-color:#fff8de;border-color:transparent;color:#947600}.button.is-warning.is-light:active,.button.is-warning.is-light.is-active{background-color:#fff6d1;border-color:transparent;color:#947600}.button.is-danger{background-color:#da0b00;border-color:transparent;color:#fff}.button.is-danger:hover,.button.is-danger.is-hovered{background-color:#cd0a00;border-color:transparent;color:#fff}.button.is-danger:focus,.button.is-danger.is-focused{border-color:transparent;color:#fff}.button.is-danger:focus:not(:active),.button.is-danger.is-focused:not(:active){box-shadow:0 0 0 0.125em rgba(218,11,0,0.25)}.button.is-danger:active,.button.is-danger.is-active{background-color:#c10a00;border-color:transparent;color:#fff}.button.is-danger[disabled],fieldset[disabled] .button.is-danger{background-color:#da0b00;border-color:#da0b00;box-shadow:none}.button.is-danger.is-inverted{background-color:#fff;color:#da0b00}.button.is-danger.is-inverted:hover,.button.is-danger.is-inverted.is-hovered{background-color:#f2f2f2}.button.is-danger.is-inverted[disabled],fieldset[disabled] .button.is-danger.is-inverted{background-color:#fff;border-color:transparent;box-shadow:none;color:#da0b00}.button.is-danger.is-loading::after{border-color:transparent transparent #fff #fff !important}.button.is-danger.is-outlined{background-color:transparent;border-color:#da0b00;color:#da0b00}.button.is-danger.is-outlined:hover,.button.is-danger.is-outlined.is-hovered,.button.is-danger.is-outlined:focus,.button.is-danger.is-outlined.is-focused{background-color:#da0b00;border-color:#da0b00;color:#fff}.button.is-danger.is-outlined.is-loading::after{border-color:transparent transparent #da0b00 #da0b00 !important}.button.is-danger.is-outlined.is-loading:hover::after,.button.is-danger.is-outlined.is-loading.is-hovered::after,.button.is-danger.is-outlined.is-loading:focus::after,.button.is-danger.is-outlined.is-loading.is-focused::after{border-color:transparent transparent #fff #fff !important}.button.is-danger.is-outlined[disabled],fieldset[disabled] .button.is-danger.is-outlined{background-color:transparent;border-color:#da0b00;box-shadow:none;color:#da0b00}.button.is-danger.is-inverted.is-outlined{background-color:transparent;border-color:#fff;color:#fff}.button.is-danger.is-inverted.is-outlined:hover,.button.is-danger.is-inverted.is-outlined.is-hovered,.button.is-danger.is-inverted.is-outlined:focus,.button.is-danger.is-inverted.is-outlined.is-focused{background-color:#fff;color:#da0b00}.button.is-danger.is-inverted.is-outlined.is-loading:hover::after,.button.is-danger.is-inverted.is-outlined.is-loading.is-hovered::after,.button.is-danger.is-inverted.is-outlined.is-loading:focus::after,.button.is-danger.is-inverted.is-outlined.is-loading.is-focused::after{border-color:transparent transparent #da0b00 #da0b00 !important}.button.is-danger.is-inverted.is-outlined[disabled],fieldset[disabled] .button.is-danger.is-inverted.is-outlined{background-color:transparent;border-color:#fff;box-shadow:none;color:#fff}.button.is-danger.is-light{background-color:#ffeceb;color:#f50c00}.button.is-danger.is-light:hover,.button.is-danger.is-light.is-hovered{background-color:#ffe0de;border-color:transparent;color:#f50c00}.button.is-danger.is-light:active,.button.is-danger.is-light.is-active{background-color:#ffd3d1;border-color:transparent;color:#f50c00}.button.is-small,#documenter .docs-sidebar form.docs-search>input.button{font-size:.75rem}.button.is-small:not(.is-rounded),#documenter .docs-sidebar form.docs-search>input.button:not(.is-rounded){border-radius:2px}.button.is-normal{font-size:1rem}.button.is-medium{font-size:1.25rem}.button.is-large{font-size:1.5rem}.button[disabled],fieldset[disabled] .button{background-color:#fff;border-color:#dbdbdb;box-shadow:none;opacity:.5}.button.is-fullwidth{display:flex;width:100%}.button.is-loading{color:transparent !important;pointer-events:none}.button.is-loading::after{position:absolute;left:calc(50% - (1em * 0.5));top:calc(50% - (1em * 0.5));position:absolute !important}.button.is-static{background-color:#f5f5f5;border-color:#dbdbdb;color:#6b6b6b;box-shadow:none;pointer-events:none}.button.is-rounded,#documenter .docs-sidebar form.docs-search>input.button{border-radius:9999px;padding-left:calc(1em + 0.25em);padding-right:calc(1em + 0.25em)}.buttons{align-items:center;display:flex;flex-wrap:wrap;justify-content:flex-start}.buttons .button{margin-bottom:0.5rem}.buttons .button:not(:last-child):not(.is-fullwidth){margin-right:.5rem}.buttons:last-child{margin-bottom:-0.5rem}.buttons:not(:last-child){margin-bottom:1rem}.buttons.are-small .button:not(.is-normal):not(.is-medium):not(.is-large){font-size:.75rem}.buttons.are-small .button:not(.is-normal):not(.is-medium):not(.is-large):not(.is-rounded){border-radius:2px}.buttons.are-medium .button:not(.is-small):not(.is-normal):not(.is-large){font-size:1.25rem}.buttons.are-large .button:not(.is-small):not(.is-normal):not(.is-medium){font-size:1.5rem}.buttons.has-addons .button:not(:first-child){border-bottom-left-radius:0;border-top-left-radius:0}.buttons.has-addons .button:not(:last-child){border-bottom-right-radius:0;border-top-right-radius:0;margin-right:-1px}.buttons.has-addons .button:last-child{margin-right:0}.buttons.has-addons .button:hover,.buttons.has-addons .button.is-hovered{z-index:2}.buttons.has-addons .button:focus,.buttons.has-addons .button.is-focused,.buttons.has-addons .button:active,.buttons.has-addons .button.is-active,.buttons.has-addons .button.is-selected{z-index:3}.buttons.has-addons .button:focus:hover,.buttons.has-addons .button.is-focused:hover,.buttons.has-addons .button:active:hover,.buttons.has-addons .button.is-active:hover,.buttons.has-addons .button.is-selected:hover{z-index:4}.buttons.has-addons .button.is-expanded{flex-grow:1;flex-shrink:1}.buttons.is-centered{justify-content:center}.buttons.is-centered:not(.has-addons) .button:not(.is-fullwidth){margin-left:0.25rem;margin-right:0.25rem}.buttons.is-right{justify-content:flex-end}.buttons.is-right:not(.has-addons) .button:not(.is-fullwidth){margin-left:0.25rem;margin-right:0.25rem}@media screen and (max-width: 768px){.button.is-responsive.is-small,#documenter .docs-sidebar form.docs-search>input.is-responsive{font-size:.5625rem}.button.is-responsive,.button.is-responsive.is-normal{font-size:.65625rem}.button.is-responsive.is-medium{font-size:.75rem}.button.is-responsive.is-large{font-size:1rem}}@media screen and (min-width: 769px) and (max-width: 1055px){.button.is-responsive.is-small,#documenter .docs-sidebar form.docs-search>input.is-responsive{font-size:.65625rem}.button.is-responsive,.button.is-responsive.is-normal{font-size:.75rem}.button.is-responsive.is-medium{font-size:1rem}.button.is-responsive.is-large{font-size:1.25rem}}.container{flex-grow:1;margin:0 auto;position:relative;width:auto}.container.is-fluid{max-width:none !important;padding-left:32px;padding-right:32px;width:100%}@media screen and (min-width: 1056px){.container{max-width:992px}}@media screen and (max-width: 1215px){.container.is-widescreen:not(.is-max-desktop){max-width:1152px}}@media screen and (max-width: 1407px){.container.is-fullhd:not(.is-max-desktop):not(.is-max-widescreen){max-width:1344px}}@media screen and (min-width: 1216px){.container:not(.is-max-desktop){max-width:1152px}}@media screen and (min-width: 1408px){.container:not(.is-max-desktop):not(.is-max-widescreen){max-width:1344px}}.content li+li{margin-top:0.25em}.content p:not(:last-child),.content dl:not(:last-child),.content ol:not(:last-child),.content ul:not(:last-child),.content blockquote:not(:last-child),.content pre:not(:last-child),.content table:not(:last-child){margin-bottom:1em}.content h1,.content h2,.content h3,.content h4,.content h5,.content h6{color:#222;font-weight:600;line-height:1.125}.content h1{font-size:2em;margin-bottom:0.5em}.content h1:not(:first-child){margin-top:1em}.content h2{font-size:1.75em;margin-bottom:0.5714em}.content h2:not(:first-child){margin-top:1.1428em}.content h3{font-size:1.5em;margin-bottom:0.6666em}.content h3:not(:first-child){margin-top:1.3333em}.content h4{font-size:1.25em;margin-bottom:0.8em}.content h5{font-size:1.125em;margin-bottom:0.8888em}.content h6{font-size:1em;margin-bottom:1em}.content blockquote{background-color:#f5f5f5;border-left:5px solid #dbdbdb;padding:1.25em 1.5em}.content ol{list-style-position:outside;margin-left:2em;margin-top:1em}.content ol:not([type]){list-style-type:decimal}.content ol.is-lower-alpha:not([type]){list-style-type:lower-alpha}.content ol.is-lower-roman:not([type]){list-style-type:lower-roman}.content ol.is-upper-alpha:not([type]){list-style-type:upper-alpha}.content ol.is-upper-roman:not([type]){list-style-type:upper-roman}.content ul{list-style:disc outside;margin-left:2em;margin-top:1em}.content ul ul{list-style-type:circle;margin-top:0.5em}.content ul ul ul{list-style-type:square}.content dd{margin-left:2em}.content figure{margin-left:2em;margin-right:2em;text-align:center}.content figure:not(:first-child){margin-top:2em}.content figure:not(:last-child){margin-bottom:2em}.content figure img{display:inline-block}.content figure figcaption{font-style:italic}.content pre{-webkit-overflow-scrolling:touch;overflow-x:auto;padding:0;white-space:pre;word-wrap:normal}.content sup,.content sub{font-size:75%}.content table{width:100%}.content table td,.content table th{border:1px solid #dbdbdb;border-width:0 0 1px;padding:0.5em 0.75em;vertical-align:top}.content table th{color:#222}.content table th:not([align]){text-align:inherit}.content table thead td,.content table thead th{border-width:0 0 2px;color:#222}.content table tfoot td,.content table tfoot th{border-width:2px 0 0;color:#222}.content table tbody tr:last-child td,.content table tbody tr:last-child th{border-bottom-width:0}.content .tabs li+li{margin-top:0}.content.is-small,#documenter .docs-sidebar form.docs-search>input.content{font-size:.75rem}.content.is-normal{font-size:1rem}.content.is-medium{font-size:1.25rem}.content.is-large{font-size:1.5rem}.icon{align-items:center;display:inline-flex;justify-content:center;height:1.5rem;width:1.5rem}.icon.is-small,#documenter .docs-sidebar form.docs-search>input.icon{height:1rem;width:1rem}.icon.is-medium{height:2rem;width:2rem}.icon.is-large{height:3rem;width:3rem}.icon-text{align-items:flex-start;color:inherit;display:inline-flex;flex-wrap:wrap;line-height:1.5rem;vertical-align:top}.icon-text .icon{flex-grow:0;flex-shrink:0}.icon-text .icon:not(:last-child){margin-right:.25em}.icon-text .icon:not(:first-child){margin-left:.25em}div.icon-text{display:flex}.image,#documenter .docs-sidebar .docs-logo>img{display:block;position:relative}.image img,#documenter .docs-sidebar .docs-logo>img img{display:block;height:auto;width:100%}.image img.is-rounded,#documenter .docs-sidebar .docs-logo>img img.is-rounded{border-radius:9999px}.image.is-fullwidth,#documenter .docs-sidebar .docs-logo>img.is-fullwidth{width:100%}.image.is-square img,#documenter .docs-sidebar .docs-logo>img.is-square img,.image.is-square .has-ratio,#documenter .docs-sidebar .docs-logo>img.is-square .has-ratio,.image.is-1by1 img,#documenter .docs-sidebar .docs-logo>img.is-1by1 img,.image.is-1by1 .has-ratio,#documenter .docs-sidebar .docs-logo>img.is-1by1 .has-ratio,.image.is-5by4 img,#documenter .docs-sidebar .docs-logo>img.is-5by4 img,.image.is-5by4 .has-ratio,#documenter .docs-sidebar .docs-logo>img.is-5by4 .has-ratio,.image.is-4by3 img,#documenter .docs-sidebar .docs-logo>img.is-4by3 img,.image.is-4by3 .has-ratio,#documenter .docs-sidebar .docs-logo>img.is-4by3 .has-ratio,.image.is-3by2 img,#documenter .docs-sidebar .docs-logo>img.is-3by2 img,.image.is-3by2 .has-ratio,#documenter .docs-sidebar .docs-logo>img.is-3by2 .has-ratio,.image.is-5by3 img,#documenter .docs-sidebar .docs-logo>img.is-5by3 img,.image.is-5by3 .has-ratio,#documenter .docs-sidebar .docs-logo>img.is-5by3 .has-ratio,.image.is-16by9 img,#documenter .docs-sidebar .docs-logo>img.is-16by9 img,.image.is-16by9 .has-ratio,#documenter .docs-sidebar .docs-logo>img.is-16by9 .has-ratio,.image.is-2by1 img,#documenter .docs-sidebar .docs-logo>img.is-2by1 img,.image.is-2by1 .has-ratio,#documenter .docs-sidebar .docs-logo>img.is-2by1 .has-ratio,.image.is-3by1 img,#documenter .docs-sidebar .docs-logo>img.is-3by1 img,.image.is-3by1 .has-ratio,#documenter .docs-sidebar .docs-logo>img.is-3by1 .has-ratio,.image.is-4by5 img,#documenter .docs-sidebar .docs-logo>img.is-4by5 img,.image.is-4by5 .has-ratio,#documenter .docs-sidebar .docs-logo>img.is-4by5 .has-ratio,.image.is-3by4 img,#documenter .docs-sidebar .docs-logo>img.is-3by4 img,.image.is-3by4 .has-ratio,#documenter .docs-sidebar .docs-logo>img.is-3by4 .has-ratio,.image.is-2by3 img,#documenter .docs-sidebar .docs-logo>img.is-2by3 img,.image.is-2by3 .has-ratio,#documenter .docs-sidebar .docs-logo>img.is-2by3 .has-ratio,.image.is-3by5 img,#documenter .docs-sidebar .docs-logo>img.is-3by5 img,.image.is-3by5 .has-ratio,#documenter .docs-sidebar .docs-logo>img.is-3by5 .has-ratio,.image.is-9by16 img,#documenter .docs-sidebar .docs-logo>img.is-9by16 img,.image.is-9by16 .has-ratio,#documenter .docs-sidebar .docs-logo>img.is-9by16 .has-ratio,.image.is-1by2 img,#documenter .docs-sidebar .docs-logo>img.is-1by2 img,.image.is-1by2 .has-ratio,#documenter .docs-sidebar .docs-logo>img.is-1by2 .has-ratio,.image.is-1by3 img,#documenter .docs-sidebar .docs-logo>img.is-1by3 img,.image.is-1by3 .has-ratio,#documenter .docs-sidebar .docs-logo>img.is-1by3 .has-ratio{height:100%;width:100%}.image.is-square,#documenter .docs-sidebar .docs-logo>img.is-square,.image.is-1by1,#documenter .docs-sidebar .docs-logo>img.is-1by1{padding-top:100%}.image.is-5by4,#documenter .docs-sidebar .docs-logo>img.is-5by4{padding-top:80%}.image.is-4by3,#documenter .docs-sidebar .docs-logo>img.is-4by3{padding-top:75%}.image.is-3by2,#documenter .docs-sidebar .docs-logo>img.is-3by2{padding-top:66.6666%}.image.is-5by3,#documenter .docs-sidebar .docs-logo>img.is-5by3{padding-top:60%}.image.is-16by9,#documenter .docs-sidebar .docs-logo>img.is-16by9{padding-top:56.25%}.image.is-2by1,#documenter .docs-sidebar .docs-logo>img.is-2by1{padding-top:50%}.image.is-3by1,#documenter .docs-sidebar .docs-logo>img.is-3by1{padding-top:33.3333%}.image.is-4by5,#documenter .docs-sidebar .docs-logo>img.is-4by5{padding-top:125%}.image.is-3by4,#documenter .docs-sidebar .docs-logo>img.is-3by4{padding-top:133.3333%}.image.is-2by3,#documenter .docs-sidebar .docs-logo>img.is-2by3{padding-top:150%}.image.is-3by5,#documenter .docs-sidebar .docs-logo>img.is-3by5{padding-top:166.6666%}.image.is-9by16,#documenter .docs-sidebar .docs-logo>img.is-9by16{padding-top:177.7777%}.image.is-1by2,#documenter .docs-sidebar .docs-logo>img.is-1by2{padding-top:200%}.image.is-1by3,#documenter .docs-sidebar .docs-logo>img.is-1by3{padding-top:300%}.image.is-16x16,#documenter .docs-sidebar .docs-logo>img.is-16x16{height:16px;width:16px}.image.is-24x24,#documenter .docs-sidebar .docs-logo>img.is-24x24{height:24px;width:24px}.image.is-32x32,#documenter .docs-sidebar .docs-logo>img.is-32x32{height:32px;width:32px}.image.is-48x48,#documenter .docs-sidebar .docs-logo>img.is-48x48{height:48px;width:48px}.image.is-64x64,#documenter .docs-sidebar .docs-logo>img.is-64x64{height:64px;width:64px}.image.is-96x96,#documenter .docs-sidebar .docs-logo>img.is-96x96{height:96px;width:96px}.image.is-128x128,#documenter .docs-sidebar .docs-logo>img.is-128x128{height:128px;width:128px}.notification{background-color:#f5f5f5;border-radius:4px;position:relative;padding:1.25rem 2.5rem 1.25rem 1.5rem}.notification a:not(.button):not(.dropdown-item){color:currentColor;text-decoration:underline}.notification strong{color:currentColor}.notification code,.notification pre{background:#fff}.notification pre code{background:transparent}.notification>.delete{right:.5rem;position:absolute;top:0.5rem}.notification .title,.notification .subtitle,.notification .content{color:currentColor}.notification.is-white{background-color:#fff;color:#0a0a0a}.notification.is-black{background-color:#0a0a0a;color:#fff}.notification.is-light{background-color:#f5f5f5;color:rgba(0,0,0,0.7)}.notification.is-dark,.content kbd.notification{background-color:#363636;color:#fff}.notification.is-primary,.docstring>section>a.notification.docs-sourcelink{background-color:#4eb5de;color:#fff}.notification.is-primary.is-light,.docstring>section>a.notification.is-light.docs-sourcelink{background-color:#eef8fc;color:#1a6d8e}.notification.is-link{background-color:#2e63b8;color:#fff}.notification.is-link.is-light{background-color:#eff3fb;color:#3169c4}.notification.is-info{background-color:#209cee;color:#fff}.notification.is-info.is-light{background-color:#ecf7fe;color:#0e72b4}.notification.is-success{background-color:#22c35b;color:#fff}.notification.is-success.is-light{background-color:#eefcf3;color:#198f43}.notification.is-warning{background-color:#ffdd57;color:rgba(0,0,0,0.7)}.notification.is-warning.is-light{background-color:#fffbeb;color:#947600}.notification.is-danger{background-color:#da0b00;color:#fff}.notification.is-danger.is-light{background-color:#ffeceb;color:#f50c00}.progress{-moz-appearance:none;-webkit-appearance:none;border:none;border-radius:9999px;display:block;height:1rem;overflow:hidden;padding:0;width:100%}.progress::-webkit-progress-bar{background-color:#ededed}.progress::-webkit-progress-value{background-color:#222}.progress::-moz-progress-bar{background-color:#222}.progress::-ms-fill{background-color:#222;border:none}.progress.is-white::-webkit-progress-value{background-color:#fff}.progress.is-white::-moz-progress-bar{background-color:#fff}.progress.is-white::-ms-fill{background-color:#fff}.progress.is-white:indeterminate{background-image:linear-gradient(to right, #fff 30%, #ededed 30%)}.progress.is-black::-webkit-progress-value{background-color:#0a0a0a}.progress.is-black::-moz-progress-bar{background-color:#0a0a0a}.progress.is-black::-ms-fill{background-color:#0a0a0a}.progress.is-black:indeterminate{background-image:linear-gradient(to right, #0a0a0a 30%, #ededed 30%)}.progress.is-light::-webkit-progress-value{background-color:#f5f5f5}.progress.is-light::-moz-progress-bar{background-color:#f5f5f5}.progress.is-light::-ms-fill{background-color:#f5f5f5}.progress.is-light:indeterminate{background-image:linear-gradient(to right, #f5f5f5 30%, #ededed 30%)}.progress.is-dark::-webkit-progress-value,.content kbd.progress::-webkit-progress-value{background-color:#363636}.progress.is-dark::-moz-progress-bar,.content kbd.progress::-moz-progress-bar{background-color:#363636}.progress.is-dark::-ms-fill,.content kbd.progress::-ms-fill{background-color:#363636}.progress.is-dark:indeterminate,.content kbd.progress:indeterminate{background-image:linear-gradient(to right, #363636 30%, #ededed 30%)}.progress.is-primary::-webkit-progress-value,.docstring>section>a.progress.docs-sourcelink::-webkit-progress-value{background-color:#4eb5de}.progress.is-primary::-moz-progress-bar,.docstring>section>a.progress.docs-sourcelink::-moz-progress-bar{background-color:#4eb5de}.progress.is-primary::-ms-fill,.docstring>section>a.progress.docs-sourcelink::-ms-fill{background-color:#4eb5de}.progress.is-primary:indeterminate,.docstring>section>a.progress.docs-sourcelink:indeterminate{background-image:linear-gradient(to right, #4eb5de 30%, #ededed 30%)}.progress.is-link::-webkit-progress-value{background-color:#2e63b8}.progress.is-link::-moz-progress-bar{background-color:#2e63b8}.progress.is-link::-ms-fill{background-color:#2e63b8}.progress.is-link:indeterminate{background-image:linear-gradient(to right, #2e63b8 30%, #ededed 30%)}.progress.is-info::-webkit-progress-value{background-color:#209cee}.progress.is-info::-moz-progress-bar{background-color:#209cee}.progress.is-info::-ms-fill{background-color:#209cee}.progress.is-info:indeterminate{background-image:linear-gradient(to right, #209cee 30%, #ededed 30%)}.progress.is-success::-webkit-progress-value{background-color:#22c35b}.progress.is-success::-moz-progress-bar{background-color:#22c35b}.progress.is-success::-ms-fill{background-color:#22c35b}.progress.is-success:indeterminate{background-image:linear-gradient(to right, #22c35b 30%, #ededed 30%)}.progress.is-warning::-webkit-progress-value{background-color:#ffdd57}.progress.is-warning::-moz-progress-bar{background-color:#ffdd57}.progress.is-warning::-ms-fill{background-color:#ffdd57}.progress.is-warning:indeterminate{background-image:linear-gradient(to right, #ffdd57 30%, #ededed 30%)}.progress.is-danger::-webkit-progress-value{background-color:#da0b00}.progress.is-danger::-moz-progress-bar{background-color:#da0b00}.progress.is-danger::-ms-fill{background-color:#da0b00}.progress.is-danger:indeterminate{background-image:linear-gradient(to right, #da0b00 30%, #ededed 30%)}.progress:indeterminate{animation-duration:1.5s;animation-iteration-count:infinite;animation-name:moveIndeterminate;animation-timing-function:linear;background-color:#ededed;background-image:linear-gradient(to right, #222 30%, #ededed 30%);background-position:top left;background-repeat:no-repeat;background-size:150% 150%}.progress:indeterminate::-webkit-progress-bar{background-color:transparent}.progress:indeterminate::-moz-progress-bar{background-color:transparent}.progress:indeterminate::-ms-fill{animation-name:none}.progress.is-small,#documenter .docs-sidebar form.docs-search>input.progress{height:.75rem}.progress.is-medium{height:1.25rem}.progress.is-large{height:1.5rem}@keyframes moveIndeterminate{from{background-position:200% 0}to{background-position:-200% 0}}.table{background-color:#fff;color:#222}.table td,.table th{border:1px solid #dbdbdb;border-width:0 0 1px;padding:0.5em 0.75em;vertical-align:top}.table td.is-white,.table th.is-white{background-color:#fff;border-color:#fff;color:#0a0a0a}.table td.is-black,.table th.is-black{background-color:#0a0a0a;border-color:#0a0a0a;color:#fff}.table td.is-light,.table th.is-light{background-color:#f5f5f5;border-color:#f5f5f5;color:rgba(0,0,0,0.7)}.table td.is-dark,.table th.is-dark{background-color:#363636;border-color:#363636;color:#fff}.table td.is-primary,.table th.is-primary{background-color:#4eb5de;border-color:#4eb5de;color:#fff}.table td.is-link,.table th.is-link{background-color:#2e63b8;border-color:#2e63b8;color:#fff}.table td.is-info,.table th.is-info{background-color:#209cee;border-color:#209cee;color:#fff}.table td.is-success,.table th.is-success{background-color:#22c35b;border-color:#22c35b;color:#fff}.table td.is-warning,.table th.is-warning{background-color:#ffdd57;border-color:#ffdd57;color:rgba(0,0,0,0.7)}.table td.is-danger,.table th.is-danger{background-color:#da0b00;border-color:#da0b00;color:#fff}.table td.is-narrow,.table th.is-narrow{white-space:nowrap;width:1%}.table td.is-selected,.table th.is-selected{background-color:#4eb5de;color:#fff}.table td.is-selected a,.table td.is-selected strong,.table th.is-selected a,.table th.is-selected strong{color:currentColor}.table td.is-vcentered,.table th.is-vcentered{vertical-align:middle}.table th{color:#222}.table th:not([align]){text-align:left}.table tr.is-selected{background-color:#4eb5de;color:#fff}.table tr.is-selected a,.table tr.is-selected strong{color:currentColor}.table tr.is-selected td,.table tr.is-selected th{border-color:#fff;color:currentColor}.table thead{background-color:rgba(0,0,0,0)}.table thead td,.table thead th{border-width:0 0 2px;color:#222}.table tfoot{background-color:rgba(0,0,0,0)}.table tfoot td,.table tfoot th{border-width:2px 0 0;color:#222}.table tbody{background-color:rgba(0,0,0,0)}.table tbody tr:last-child td,.table tbody tr:last-child th{border-bottom-width:0}.table.is-bordered td,.table.is-bordered th{border-width:1px}.table.is-bordered tr:last-child td,.table.is-bordered tr:last-child th{border-bottom-width:1px}.table.is-fullwidth{width:100%}.table.is-hoverable tbody tr:not(.is-selected):hover{background-color:#fafafa}.table.is-hoverable.is-striped tbody tr:not(.is-selected):hover{background-color:#fafafa}.table.is-hoverable.is-striped tbody tr:not(.is-selected):hover:nth-child(even){background-color:#f5f5f5}.table.is-narrow td,.table.is-narrow th{padding:0.25em 0.5em}.table.is-striped tbody tr:not(.is-selected):nth-child(even){background-color:#fafafa}.table-container{-webkit-overflow-scrolling:touch;overflow:auto;overflow-y:hidden;max-width:100%}.tags{align-items:center;display:flex;flex-wrap:wrap;justify-content:flex-start}.tags .tag,.tags .content kbd,.content .tags kbd,.tags .docstring>section>a.docs-sourcelink{margin-bottom:0.5rem}.tags .tag:not(:last-child),.tags .content kbd:not(:last-child),.content .tags kbd:not(:last-child),.tags .docstring>section>a.docs-sourcelink:not(:last-child){margin-right:.5rem}.tags:last-child{margin-bottom:-0.5rem}.tags:not(:last-child){margin-bottom:1rem}.tags.are-medium .tag:not(.is-normal):not(.is-large),.tags.are-medium .content kbd:not(.is-normal):not(.is-large),.content .tags.are-medium kbd:not(.is-normal):not(.is-large),.tags.are-medium .docstring>section>a.docs-sourcelink:not(.is-normal):not(.is-large){font-size:1rem}.tags.are-large .tag:not(.is-normal):not(.is-medium),.tags.are-large .content kbd:not(.is-normal):not(.is-medium),.content .tags.are-large kbd:not(.is-normal):not(.is-medium),.tags.are-large .docstring>section>a.docs-sourcelink:not(.is-normal):not(.is-medium){font-size:1.25rem}.tags.is-centered{justify-content:center}.tags.is-centered .tag,.tags.is-centered .content kbd,.content .tags.is-centered kbd,.tags.is-centered .docstring>section>a.docs-sourcelink{margin-right:0.25rem;margin-left:0.25rem}.tags.is-right{justify-content:flex-end}.tags.is-right .tag:not(:first-child),.tags.is-right .content kbd:not(:first-child),.content .tags.is-right kbd:not(:first-child),.tags.is-right .docstring>section>a.docs-sourcelink:not(:first-child){margin-left:0.5rem}.tags.is-right .tag:not(:last-child),.tags.is-right .content kbd:not(:last-child),.content .tags.is-right kbd:not(:last-child),.tags.is-right .docstring>section>a.docs-sourcelink:not(:last-child){margin-right:0}.tags.has-addons .tag,.tags.has-addons .content kbd,.content .tags.has-addons kbd,.tags.has-addons .docstring>section>a.docs-sourcelink{margin-right:0}.tags.has-addons .tag:not(:first-child),.tags.has-addons .content kbd:not(:first-child),.content .tags.has-addons kbd:not(:first-child),.tags.has-addons .docstring>section>a.docs-sourcelink:not(:first-child){margin-left:0;border-top-left-radius:0;border-bottom-left-radius:0}.tags.has-addons .tag:not(:last-child),.tags.has-addons .content kbd:not(:last-child),.content .tags.has-addons kbd:not(:last-child),.tags.has-addons .docstring>section>a.docs-sourcelink:not(:last-child){border-top-right-radius:0;border-bottom-right-radius:0}.tag:not(body),.content kbd:not(body),.docstring>section>a.docs-sourcelink:not(body){align-items:center;background-color:#f5f5f5;border-radius:4px;color:#222;display:inline-flex;font-size:.75rem;height:2em;justify-content:center;line-height:1.5;padding-left:0.75em;padding-right:0.75em;white-space:nowrap}.tag:not(body) .delete,.content kbd:not(body) .delete,.docstring>section>a.docs-sourcelink:not(body) .delete{margin-left:.25rem;margin-right:-.375rem}.tag.is-white:not(body),.content kbd.is-white:not(body),.docstring>section>a.docs-sourcelink.is-white:not(body){background-color:#fff;color:#0a0a0a}.tag.is-black:not(body),.content kbd.is-black:not(body),.docstring>section>a.docs-sourcelink.is-black:not(body){background-color:#0a0a0a;color:#fff}.tag.is-light:not(body),.content kbd.is-light:not(body),.docstring>section>a.docs-sourcelink.is-light:not(body){background-color:#f5f5f5;color:rgba(0,0,0,0.7)}.tag.is-dark:not(body),.content kbd:not(body),.docstring>section>a.docs-sourcelink.is-dark:not(body),.content .docstring>section>kbd:not(body){background-color:#363636;color:#fff}.tag.is-primary:not(body),.content kbd.is-primary:not(body),.docstring>section>a.docs-sourcelink:not(body){background-color:#4eb5de;color:#fff}.tag.is-primary.is-light:not(body),.content kbd.is-primary.is-light:not(body),.docstring>section>a.docs-sourcelink.is-light:not(body){background-color:#eef8fc;color:#1a6d8e}.tag.is-link:not(body),.content kbd.is-link:not(body),.docstring>section>a.docs-sourcelink.is-link:not(body){background-color:#2e63b8;color:#fff}.tag.is-link.is-light:not(body),.content kbd.is-link.is-light:not(body),.docstring>section>a.docs-sourcelink.is-link.is-light:not(body){background-color:#eff3fb;color:#3169c4}.tag.is-info:not(body),.content kbd.is-info:not(body),.docstring>section>a.docs-sourcelink.is-info:not(body){background-color:#209cee;color:#fff}.tag.is-info.is-light:not(body),.content kbd.is-info.is-light:not(body),.docstring>section>a.docs-sourcelink.is-info.is-light:not(body){background-color:#ecf7fe;color:#0e72b4}.tag.is-success:not(body),.content kbd.is-success:not(body),.docstring>section>a.docs-sourcelink.is-success:not(body){background-color:#22c35b;color:#fff}.tag.is-success.is-light:not(body),.content kbd.is-success.is-light:not(body),.docstring>section>a.docs-sourcelink.is-success.is-light:not(body){background-color:#eefcf3;color:#198f43}.tag.is-warning:not(body),.content kbd.is-warning:not(body),.docstring>section>a.docs-sourcelink.is-warning:not(body){background-color:#ffdd57;color:rgba(0,0,0,0.7)}.tag.is-warning.is-light:not(body),.content kbd.is-warning.is-light:not(body),.docstring>section>a.docs-sourcelink.is-warning.is-light:not(body){background-color:#fffbeb;color:#947600}.tag.is-danger:not(body),.content kbd.is-danger:not(body),.docstring>section>a.docs-sourcelink.is-danger:not(body){background-color:#da0b00;color:#fff}.tag.is-danger.is-light:not(body),.content kbd.is-danger.is-light:not(body),.docstring>section>a.docs-sourcelink.is-danger.is-light:not(body){background-color:#ffeceb;color:#f50c00}.tag.is-normal:not(body),.content kbd.is-normal:not(body),.docstring>section>a.docs-sourcelink.is-normal:not(body){font-size:.75rem}.tag.is-medium:not(body),.content kbd.is-medium:not(body),.docstring>section>a.docs-sourcelink.is-medium:not(body){font-size:1rem}.tag.is-large:not(body),.content kbd.is-large:not(body),.docstring>section>a.docs-sourcelink.is-large:not(body){font-size:1.25rem}.tag:not(body) .icon:first-child:not(:last-child),.content kbd:not(body) .icon:first-child:not(:last-child),.docstring>section>a.docs-sourcelink:not(body) .icon:first-child:not(:last-child){margin-left:-.375em;margin-right:.1875em}.tag:not(body) .icon:last-child:not(:first-child),.content kbd:not(body) .icon:last-child:not(:first-child),.docstring>section>a.docs-sourcelink:not(body) .icon:last-child:not(:first-child){margin-left:.1875em;margin-right:-.375em}.tag:not(body) .icon:first-child:last-child,.content kbd:not(body) .icon:first-child:last-child,.docstring>section>a.docs-sourcelink:not(body) .icon:first-child:last-child{margin-left:-.375em;margin-right:-.375em}.tag.is-delete:not(body),.content kbd.is-delete:not(body),.docstring>section>a.docs-sourcelink.is-delete:not(body){margin-left:1px;padding:0;position:relative;width:2em}.tag.is-delete:not(body)::before,.content kbd.is-delete:not(body)::before,.docstring>section>a.docs-sourcelink.is-delete:not(body)::before,.tag.is-delete:not(body)::after,.content kbd.is-delete:not(body)::after,.docstring>section>a.docs-sourcelink.is-delete:not(body)::after{background-color:currentColor;content:"";display:block;left:50%;position:absolute;top:50%;transform:translateX(-50%) translateY(-50%) rotate(45deg);transform-origin:center center}.tag.is-delete:not(body)::before,.content kbd.is-delete:not(body)::before,.docstring>section>a.docs-sourcelink.is-delete:not(body)::before{height:1px;width:50%}.tag.is-delete:not(body)::after,.content kbd.is-delete:not(body)::after,.docstring>section>a.docs-sourcelink.is-delete:not(body)::after{height:50%;width:1px}.tag.is-delete:not(body):hover,.content kbd.is-delete:not(body):hover,.docstring>section>a.docs-sourcelink.is-delete:not(body):hover,.tag.is-delete:not(body):focus,.content kbd.is-delete:not(body):focus,.docstring>section>a.docs-sourcelink.is-delete:not(body):focus{background-color:#e8e8e8}.tag.is-delete:not(body):active,.content kbd.is-delete:not(body):active,.docstring>section>a.docs-sourcelink.is-delete:not(body):active{background-color:#dbdbdb}.tag.is-rounded:not(body),#documenter .docs-sidebar form.docs-search>input:not(body),.content kbd.is-rounded:not(body),#documenter .docs-sidebar .content form.docs-search>input:not(body),.docstring>section>a.docs-sourcelink.is-rounded:not(body){border-radius:9999px}a.tag:hover,.docstring>section>a.docs-sourcelink:hover{text-decoration:underline}.title,.subtitle{word-break:break-word}.title em,.title span,.subtitle em,.subtitle span{font-weight:inherit}.title sub,.subtitle sub{font-size:.75em}.title sup,.subtitle sup{font-size:.75em}.title .tag,.title .content kbd,.content .title kbd,.title .docstring>section>a.docs-sourcelink,.subtitle .tag,.subtitle .content kbd,.content .subtitle kbd,.subtitle .docstring>section>a.docs-sourcelink{vertical-align:middle}.title{color:#222;font-size:2rem;font-weight:600;line-height:1.125}.title strong{color:inherit;font-weight:inherit}.title:not(.is-spaced)+.subtitle{margin-top:-1.25rem}.title.is-1{font-size:3rem}.title.is-2{font-size:2.5rem}.title.is-3{font-size:2rem}.title.is-4{font-size:1.5rem}.title.is-5{font-size:1.25rem}.title.is-6{font-size:1rem}.title.is-7{font-size:.75rem}.subtitle{color:#222;font-size:1.25rem;font-weight:400;line-height:1.25}.subtitle strong{color:#222;font-weight:600}.subtitle:not(.is-spaced)+.title{margin-top:-1.25rem}.subtitle.is-1{font-size:3rem}.subtitle.is-2{font-size:2.5rem}.subtitle.is-3{font-size:2rem}.subtitle.is-4{font-size:1.5rem}.subtitle.is-5{font-size:1.25rem}.subtitle.is-6{font-size:1rem}.subtitle.is-7{font-size:.75rem}.heading{display:block;font-size:11px;letter-spacing:1px;margin-bottom:5px;text-transform:uppercase}.number{align-items:center;background-color:#f5f5f5;border-radius:9999px;display:inline-flex;font-size:1.25rem;height:2em;justify-content:center;margin-right:1.5rem;min-width:2.5em;padding:0.25rem 0.5rem;text-align:center;vertical-align:top}.select select,.textarea,.input,#documenter .docs-sidebar form.docs-search>input{background-color:#fff;border-color:#dbdbdb;border-radius:4px;color:#222}.select select::-moz-placeholder,.textarea::-moz-placeholder,.input::-moz-placeholder,#documenter .docs-sidebar form.docs-search>input::-moz-placeholder{color:#707070}.select select::-webkit-input-placeholder,.textarea::-webkit-input-placeholder,.input::-webkit-input-placeholder,#documenter .docs-sidebar form.docs-search>input::-webkit-input-placeholder{color:#707070}.select select:-moz-placeholder,.textarea:-moz-placeholder,.input:-moz-placeholder,#documenter .docs-sidebar form.docs-search>input:-moz-placeholder{color:#707070}.select select:-ms-input-placeholder,.textarea:-ms-input-placeholder,.input:-ms-input-placeholder,#documenter .docs-sidebar form.docs-search>input:-ms-input-placeholder{color:#707070}.select select:hover,.textarea:hover,.input:hover,#documenter .docs-sidebar form.docs-search>input:hover,.select select.is-hovered,.is-hovered.textarea,.is-hovered.input,#documenter .docs-sidebar form.docs-search>input.is-hovered{border-color:#b5b5b5}.select select:focus,.textarea:focus,.input:focus,#documenter .docs-sidebar form.docs-search>input:focus,.select select.is-focused,.is-focused.textarea,.is-focused.input,#documenter .docs-sidebar form.docs-search>input.is-focused,.select select:active,.textarea:active,.input:active,#documenter .docs-sidebar form.docs-search>input:active,.select select.is-active,.is-active.textarea,.is-active.input,#documenter .docs-sidebar form.docs-search>input.is-active{border-color:#2e63b8;box-shadow:0 0 0 0.125em rgba(46,99,184,0.25)}.select select[disabled],.textarea[disabled],.input[disabled],#documenter .docs-sidebar form.docs-search>input[disabled],fieldset[disabled] .select select,.select fieldset[disabled] select,fieldset[disabled] .textarea,fieldset[disabled] .input,fieldset[disabled] #documenter .docs-sidebar form.docs-search>input,#documenter .docs-sidebar fieldset[disabled] form.docs-search>input{background-color:#f5f5f5;border-color:#f5f5f5;box-shadow:none;color:#6b6b6b}.select select[disabled]::-moz-placeholder,.textarea[disabled]::-moz-placeholder,.input[disabled]::-moz-placeholder,#documenter .docs-sidebar form.docs-search>input[disabled]::-moz-placeholder,fieldset[disabled] .select select::-moz-placeholder,.select fieldset[disabled] select::-moz-placeholder,fieldset[disabled] .textarea::-moz-placeholder,fieldset[disabled] .input::-moz-placeholder,fieldset[disabled] #documenter .docs-sidebar form.docs-search>input::-moz-placeholder,#documenter .docs-sidebar fieldset[disabled] form.docs-search>input::-moz-placeholder{color:rgba(107,107,107,0.3)}.select select[disabled]::-webkit-input-placeholder,.textarea[disabled]::-webkit-input-placeholder,.input[disabled]::-webkit-input-placeholder,#documenter .docs-sidebar form.docs-search>input[disabled]::-webkit-input-placeholder,fieldset[disabled] .select select::-webkit-input-placeholder,.select fieldset[disabled] select::-webkit-input-placeholder,fieldset[disabled] .textarea::-webkit-input-placeholder,fieldset[disabled] .input::-webkit-input-placeholder,fieldset[disabled] #documenter .docs-sidebar form.docs-search>input::-webkit-input-placeholder,#documenter .docs-sidebar fieldset[disabled] form.docs-search>input::-webkit-input-placeholder{color:rgba(107,107,107,0.3)}.select select[disabled]:-moz-placeholder,.textarea[disabled]:-moz-placeholder,.input[disabled]:-moz-placeholder,#documenter .docs-sidebar form.docs-search>input[disabled]:-moz-placeholder,fieldset[disabled] .select select:-moz-placeholder,.select fieldset[disabled] select:-moz-placeholder,fieldset[disabled] .textarea:-moz-placeholder,fieldset[disabled] .input:-moz-placeholder,fieldset[disabled] #documenter .docs-sidebar form.docs-search>input:-moz-placeholder,#documenter .docs-sidebar fieldset[disabled] form.docs-search>input:-moz-placeholder{color:rgba(107,107,107,0.3)}.select select[disabled]:-ms-input-placeholder,.textarea[disabled]:-ms-input-placeholder,.input[disabled]:-ms-input-placeholder,#documenter .docs-sidebar form.docs-search>input[disabled]:-ms-input-placeholder,fieldset[disabled] .select select:-ms-input-placeholder,.select fieldset[disabled] select:-ms-input-placeholder,fieldset[disabled] .textarea:-ms-input-placeholder,fieldset[disabled] .input:-ms-input-placeholder,fieldset[disabled] #documenter .docs-sidebar form.docs-search>input:-ms-input-placeholder,#documenter .docs-sidebar fieldset[disabled] form.docs-search>input:-ms-input-placeholder{color:rgba(107,107,107,0.3)}.textarea,.input,#documenter .docs-sidebar form.docs-search>input{box-shadow:inset 0 0.0625em 0.125em rgba(10,10,10,0.05);max-width:100%;width:100%}.textarea[readonly],.input[readonly],#documenter .docs-sidebar form.docs-search>input[readonly]{box-shadow:none}.is-white.textarea,.is-white.input,#documenter .docs-sidebar form.docs-search>input.is-white{border-color:#fff}.is-white.textarea:focus,.is-white.input:focus,#documenter .docs-sidebar form.docs-search>input.is-white:focus,.is-white.is-focused.textarea,.is-white.is-focused.input,#documenter .docs-sidebar form.docs-search>input.is-focused,.is-white.textarea:active,.is-white.input:active,#documenter .docs-sidebar form.docs-search>input.is-white:active,.is-white.is-active.textarea,.is-white.is-active.input,#documenter .docs-sidebar form.docs-search>input.is-active{box-shadow:0 0 0 0.125em rgba(255,255,255,0.25)}.is-black.textarea,.is-black.input,#documenter .docs-sidebar form.docs-search>input.is-black{border-color:#0a0a0a}.is-black.textarea:focus,.is-black.input:focus,#documenter .docs-sidebar form.docs-search>input.is-black:focus,.is-black.is-focused.textarea,.is-black.is-focused.input,#documenter .docs-sidebar form.docs-search>input.is-focused,.is-black.textarea:active,.is-black.input:active,#documenter .docs-sidebar form.docs-search>input.is-black:active,.is-black.is-active.textarea,.is-black.is-active.input,#documenter .docs-sidebar form.docs-search>input.is-active{box-shadow:0 0 0 0.125em rgba(10,10,10,0.25)}.is-light.textarea,.is-light.input,#documenter .docs-sidebar form.docs-search>input.is-light{border-color:#f5f5f5}.is-light.textarea:focus,.is-light.input:focus,#documenter .docs-sidebar form.docs-search>input.is-light:focus,.is-light.is-focused.textarea,.is-light.is-focused.input,#documenter .docs-sidebar form.docs-search>input.is-focused,.is-light.textarea:active,.is-light.input:active,#documenter .docs-sidebar form.docs-search>input.is-light:active,.is-light.is-active.textarea,.is-light.is-active.input,#documenter .docs-sidebar form.docs-search>input.is-active{box-shadow:0 0 0 0.125em rgba(245,245,245,0.25)}.is-dark.textarea,.content kbd.textarea,.is-dark.input,#documenter .docs-sidebar form.docs-search>input.is-dark,.content kbd.input{border-color:#363636}.is-dark.textarea:focus,.content kbd.textarea:focus,.is-dark.input:focus,#documenter .docs-sidebar form.docs-search>input.is-dark:focus,.content kbd.input:focus,.is-dark.is-focused.textarea,.content kbd.is-focused.textarea,.is-dark.is-focused.input,#documenter .docs-sidebar form.docs-search>input.is-focused,.content kbd.is-focused.input,#documenter .docs-sidebar .content form.docs-search>input.is-focused,.is-dark.textarea:active,.content kbd.textarea:active,.is-dark.input:active,#documenter .docs-sidebar form.docs-search>input.is-dark:active,.content kbd.input:active,.is-dark.is-active.textarea,.content kbd.is-active.textarea,.is-dark.is-active.input,#documenter .docs-sidebar form.docs-search>input.is-active,.content kbd.is-active.input,#documenter .docs-sidebar .content form.docs-search>input.is-active{box-shadow:0 0 0 0.125em rgba(54,54,54,0.25)}.is-primary.textarea,.docstring>section>a.textarea.docs-sourcelink,.is-primary.input,#documenter .docs-sidebar form.docs-search>input.is-primary,.docstring>section>a.input.docs-sourcelink{border-color:#4eb5de}.is-primary.textarea:focus,.docstring>section>a.textarea.docs-sourcelink:focus,.is-primary.input:focus,#documenter .docs-sidebar form.docs-search>input.is-primary:focus,.docstring>section>a.input.docs-sourcelink:focus,.is-primary.is-focused.textarea,.docstring>section>a.is-focused.textarea.docs-sourcelink,.is-primary.is-focused.input,#documenter .docs-sidebar form.docs-search>input.is-focused,.docstring>section>a.is-focused.input.docs-sourcelink,.is-primary.textarea:active,.docstring>section>a.textarea.docs-sourcelink:active,.is-primary.input:active,#documenter .docs-sidebar form.docs-search>input.is-primary:active,.docstring>section>a.input.docs-sourcelink:active,.is-primary.is-active.textarea,.docstring>section>a.is-active.textarea.docs-sourcelink,.is-primary.is-active.input,#documenter .docs-sidebar form.docs-search>input.is-active,.docstring>section>a.is-active.input.docs-sourcelink{box-shadow:0 0 0 0.125em rgba(78,181,222,0.25)}.is-link.textarea,.is-link.input,#documenter .docs-sidebar form.docs-search>input.is-link{border-color:#2e63b8}.is-link.textarea:focus,.is-link.input:focus,#documenter .docs-sidebar form.docs-search>input.is-link:focus,.is-link.is-focused.textarea,.is-link.is-focused.input,#documenter .docs-sidebar form.docs-search>input.is-focused,.is-link.textarea:active,.is-link.input:active,#documenter .docs-sidebar form.docs-search>input.is-link:active,.is-link.is-active.textarea,.is-link.is-active.input,#documenter .docs-sidebar form.docs-search>input.is-active{box-shadow:0 0 0 0.125em rgba(46,99,184,0.25)}.is-info.textarea,.is-info.input,#documenter .docs-sidebar form.docs-search>input.is-info{border-color:#209cee}.is-info.textarea:focus,.is-info.input:focus,#documenter .docs-sidebar form.docs-search>input.is-info:focus,.is-info.is-focused.textarea,.is-info.is-focused.input,#documenter .docs-sidebar form.docs-search>input.is-focused,.is-info.textarea:active,.is-info.input:active,#documenter .docs-sidebar form.docs-search>input.is-info:active,.is-info.is-active.textarea,.is-info.is-active.input,#documenter .docs-sidebar form.docs-search>input.is-active{box-shadow:0 0 0 0.125em rgba(32,156,238,0.25)}.is-success.textarea,.is-success.input,#documenter .docs-sidebar form.docs-search>input.is-success{border-color:#22c35b}.is-success.textarea:focus,.is-success.input:focus,#documenter .docs-sidebar form.docs-search>input.is-success:focus,.is-success.is-focused.textarea,.is-success.is-focused.input,#documenter .docs-sidebar form.docs-search>input.is-focused,.is-success.textarea:active,.is-success.input:active,#documenter .docs-sidebar form.docs-search>input.is-success:active,.is-success.is-active.textarea,.is-success.is-active.input,#documenter .docs-sidebar form.docs-search>input.is-active{box-shadow:0 0 0 0.125em rgba(34,195,91,0.25)}.is-warning.textarea,.is-warning.input,#documenter .docs-sidebar form.docs-search>input.is-warning{border-color:#ffdd57}.is-warning.textarea:focus,.is-warning.input:focus,#documenter .docs-sidebar form.docs-search>input.is-warning:focus,.is-warning.is-focused.textarea,.is-warning.is-focused.input,#documenter .docs-sidebar form.docs-search>input.is-focused,.is-warning.textarea:active,.is-warning.input:active,#documenter .docs-sidebar form.docs-search>input.is-warning:active,.is-warning.is-active.textarea,.is-warning.is-active.input,#documenter .docs-sidebar form.docs-search>input.is-active{box-shadow:0 0 0 0.125em rgba(255,221,87,0.25)}.is-danger.textarea,.is-danger.input,#documenter .docs-sidebar form.docs-search>input.is-danger{border-color:#da0b00}.is-danger.textarea:focus,.is-danger.input:focus,#documenter .docs-sidebar form.docs-search>input.is-danger:focus,.is-danger.is-focused.textarea,.is-danger.is-focused.input,#documenter .docs-sidebar form.docs-search>input.is-focused,.is-danger.textarea:active,.is-danger.input:active,#documenter .docs-sidebar form.docs-search>input.is-danger:active,.is-danger.is-active.textarea,.is-danger.is-active.input,#documenter .docs-sidebar form.docs-search>input.is-active{box-shadow:0 0 0 0.125em rgba(218,11,0,0.25)}.is-small.textarea,.is-small.input,#documenter .docs-sidebar form.docs-search>input{border-radius:2px;font-size:.75rem}.is-medium.textarea,.is-medium.input,#documenter .docs-sidebar form.docs-search>input.is-medium{font-size:1.25rem}.is-large.textarea,.is-large.input,#documenter .docs-sidebar form.docs-search>input.is-large{font-size:1.5rem}.is-fullwidth.textarea,.is-fullwidth.input,#documenter .docs-sidebar form.docs-search>input.is-fullwidth{display:block;width:100%}.is-inline.textarea,.is-inline.input,#documenter .docs-sidebar form.docs-search>input.is-inline{display:inline;width:auto}.input.is-rounded,#documenter .docs-sidebar form.docs-search>input{border-radius:9999px;padding-left:calc(calc(0.75em - 1px) + 0.375em);padding-right:calc(calc(0.75em - 1px) + 0.375em)}.input.is-static,#documenter .docs-sidebar form.docs-search>input.is-static{background-color:transparent;border-color:transparent;box-shadow:none;padding-left:0;padding-right:0}.textarea{display:block;max-width:100%;min-width:100%;padding:calc(0.75em - 1px);resize:vertical}.textarea:not([rows]){max-height:40em;min-height:8em}.textarea[rows]{height:initial}.textarea.has-fixed-size{resize:none}.radio,.checkbox{cursor:pointer;display:inline-block;line-height:1.25;position:relative}.radio input,.checkbox input{cursor:pointer}.radio:hover,.checkbox:hover{color:#222}.radio[disabled],.checkbox[disabled],fieldset[disabled] .radio,fieldset[disabled] .checkbox,.radio input[disabled],.checkbox input[disabled]{color:#6b6b6b;cursor:not-allowed}.radio+.radio{margin-left:.5em}.select{display:inline-block;max-width:100%;position:relative;vertical-align:top}.select:not(.is-multiple){height:2.5em}.select:not(.is-multiple):not(.is-loading)::after{border-color:#2e63b8;right:1.125em;z-index:4}.select.is-rounded select,#documenter .docs-sidebar form.docs-search>input.select select{border-radius:9999px;padding-left:1em}.select select{cursor:pointer;display:block;font-size:1em;max-width:100%;outline:none}.select select::-ms-expand{display:none}.select select[disabled]:hover,fieldset[disabled] .select select:hover{border-color:#f5f5f5}.select select:not([multiple]){padding-right:2.5em}.select select[multiple]{height:auto;padding:0}.select select[multiple] option{padding:0.5em 1em}.select:not(.is-multiple):not(.is-loading):hover::after{border-color:#222}.select.is-white:not(:hover)::after{border-color:#fff}.select.is-white select{border-color:#fff}.select.is-white select:hover,.select.is-white select.is-hovered{border-color:#f2f2f2}.select.is-white select:focus,.select.is-white select.is-focused,.select.is-white select:active,.select.is-white select.is-active{box-shadow:0 0 0 0.125em rgba(255,255,255,0.25)}.select.is-black:not(:hover)::after{border-color:#0a0a0a}.select.is-black select{border-color:#0a0a0a}.select.is-black select:hover,.select.is-black select.is-hovered{border-color:#000}.select.is-black select:focus,.select.is-black select.is-focused,.select.is-black select:active,.select.is-black select.is-active{box-shadow:0 0 0 0.125em rgba(10,10,10,0.25)}.select.is-light:not(:hover)::after{border-color:#f5f5f5}.select.is-light select{border-color:#f5f5f5}.select.is-light select:hover,.select.is-light select.is-hovered{border-color:#e8e8e8}.select.is-light select:focus,.select.is-light select.is-focused,.select.is-light select:active,.select.is-light select.is-active{box-shadow:0 0 0 0.125em rgba(245,245,245,0.25)}.select.is-dark:not(:hover)::after,.content kbd.select:not(:hover)::after{border-color:#363636}.select.is-dark select,.content kbd.select select{border-color:#363636}.select.is-dark select:hover,.content kbd.select select:hover,.select.is-dark select.is-hovered,.content kbd.select select.is-hovered{border-color:#292929}.select.is-dark select:focus,.content kbd.select select:focus,.select.is-dark select.is-focused,.content kbd.select select.is-focused,.select.is-dark select:active,.content kbd.select select:active,.select.is-dark select.is-active,.content kbd.select select.is-active{box-shadow:0 0 0 0.125em rgba(54,54,54,0.25)}.select.is-primary:not(:hover)::after,.docstring>section>a.select.docs-sourcelink:not(:hover)::after{border-color:#4eb5de}.select.is-primary select,.docstring>section>a.select.docs-sourcelink select{border-color:#4eb5de}.select.is-primary select:hover,.docstring>section>a.select.docs-sourcelink select:hover,.select.is-primary select.is-hovered,.docstring>section>a.select.docs-sourcelink select.is-hovered{border-color:#39acda}.select.is-primary select:focus,.docstring>section>a.select.docs-sourcelink select:focus,.select.is-primary select.is-focused,.docstring>section>a.select.docs-sourcelink select.is-focused,.select.is-primary select:active,.docstring>section>a.select.docs-sourcelink select:active,.select.is-primary select.is-active,.docstring>section>a.select.docs-sourcelink select.is-active{box-shadow:0 0 0 0.125em rgba(78,181,222,0.25)}.select.is-link:not(:hover)::after{border-color:#2e63b8}.select.is-link select{border-color:#2e63b8}.select.is-link select:hover,.select.is-link select.is-hovered{border-color:#2958a4}.select.is-link select:focus,.select.is-link select.is-focused,.select.is-link select:active,.select.is-link select.is-active{box-shadow:0 0 0 0.125em rgba(46,99,184,0.25)}.select.is-info:not(:hover)::after{border-color:#209cee}.select.is-info select{border-color:#209cee}.select.is-info select:hover,.select.is-info select.is-hovered{border-color:#1190e3}.select.is-info select:focus,.select.is-info select.is-focused,.select.is-info select:active,.select.is-info select.is-active{box-shadow:0 0 0 0.125em rgba(32,156,238,0.25)}.select.is-success:not(:hover)::after{border-color:#22c35b}.select.is-success select{border-color:#22c35b}.select.is-success select:hover,.select.is-success select.is-hovered{border-color:#1ead51}.select.is-success select:focus,.select.is-success select.is-focused,.select.is-success select:active,.select.is-success select.is-active{box-shadow:0 0 0 0.125em rgba(34,195,91,0.25)}.select.is-warning:not(:hover)::after{border-color:#ffdd57}.select.is-warning select{border-color:#ffdd57}.select.is-warning select:hover,.select.is-warning select.is-hovered{border-color:#ffd83e}.select.is-warning select:focus,.select.is-warning select.is-focused,.select.is-warning select:active,.select.is-warning select.is-active{box-shadow:0 0 0 0.125em rgba(255,221,87,0.25)}.select.is-danger:not(:hover)::after{border-color:#da0b00}.select.is-danger select{border-color:#da0b00}.select.is-danger select:hover,.select.is-danger select.is-hovered{border-color:#c10a00}.select.is-danger select:focus,.select.is-danger select.is-focused,.select.is-danger select:active,.select.is-danger select.is-active{box-shadow:0 0 0 0.125em rgba(218,11,0,0.25)}.select.is-small,#documenter .docs-sidebar form.docs-search>input.select{border-radius:2px;font-size:.75rem}.select.is-medium{font-size:1.25rem}.select.is-large{font-size:1.5rem}.select.is-disabled::after{border-color:#6b6b6b !important;opacity:0.5}.select.is-fullwidth{width:100%}.select.is-fullwidth select{width:100%}.select.is-loading::after{margin-top:0;position:absolute;right:.625em;top:0.625em;transform:none}.select.is-loading.is-small:after,#documenter .docs-sidebar form.docs-search>input.is-loading:after{font-size:.75rem}.select.is-loading.is-medium:after{font-size:1.25rem}.select.is-loading.is-large:after{font-size:1.5rem}.file{align-items:stretch;display:flex;justify-content:flex-start;position:relative}.file.is-white .file-cta{background-color:#fff;border-color:transparent;color:#0a0a0a}.file.is-white:hover .file-cta,.file.is-white.is-hovered .file-cta{background-color:#f9f9f9;border-color:transparent;color:#0a0a0a}.file.is-white:focus .file-cta,.file.is-white.is-focused .file-cta{border-color:transparent;box-shadow:0 0 0.5em rgba(255,255,255,0.25);color:#0a0a0a}.file.is-white:active .file-cta,.file.is-white.is-active .file-cta{background-color:#f2f2f2;border-color:transparent;color:#0a0a0a}.file.is-black .file-cta{background-color:#0a0a0a;border-color:transparent;color:#fff}.file.is-black:hover .file-cta,.file.is-black.is-hovered .file-cta{background-color:#040404;border-color:transparent;color:#fff}.file.is-black:focus .file-cta,.file.is-black.is-focused .file-cta{border-color:transparent;box-shadow:0 0 0.5em rgba(10,10,10,0.25);color:#fff}.file.is-black:active .file-cta,.file.is-black.is-active .file-cta{background-color:#000;border-color:transparent;color:#fff}.file.is-light .file-cta{background-color:#f5f5f5;border-color:transparent;color:rgba(0,0,0,0.7)}.file.is-light:hover .file-cta,.file.is-light.is-hovered .file-cta{background-color:#eee;border-color:transparent;color:rgba(0,0,0,0.7)}.file.is-light:focus .file-cta,.file.is-light.is-focused .file-cta{border-color:transparent;box-shadow:0 0 0.5em rgba(245,245,245,0.25);color:rgba(0,0,0,0.7)}.file.is-light:active .file-cta,.file.is-light.is-active .file-cta{background-color:#e8e8e8;border-color:transparent;color:rgba(0,0,0,0.7)}.file.is-dark .file-cta,.content kbd.file .file-cta{background-color:#363636;border-color:transparent;color:#fff}.file.is-dark:hover .file-cta,.content kbd.file:hover .file-cta,.file.is-dark.is-hovered .file-cta,.content kbd.file.is-hovered .file-cta{background-color:#2f2f2f;border-color:transparent;color:#fff}.file.is-dark:focus .file-cta,.content kbd.file:focus .file-cta,.file.is-dark.is-focused .file-cta,.content kbd.file.is-focused .file-cta{border-color:transparent;box-shadow:0 0 0.5em rgba(54,54,54,0.25);color:#fff}.file.is-dark:active .file-cta,.content kbd.file:active .file-cta,.file.is-dark.is-active .file-cta,.content kbd.file.is-active .file-cta{background-color:#292929;border-color:transparent;color:#fff}.file.is-primary .file-cta,.docstring>section>a.file.docs-sourcelink .file-cta{background-color:#4eb5de;border-color:transparent;color:#fff}.file.is-primary:hover .file-cta,.docstring>section>a.file.docs-sourcelink:hover .file-cta,.file.is-primary.is-hovered .file-cta,.docstring>section>a.file.is-hovered.docs-sourcelink .file-cta{background-color:#43b1dc;border-color:transparent;color:#fff}.file.is-primary:focus .file-cta,.docstring>section>a.file.docs-sourcelink:focus .file-cta,.file.is-primary.is-focused .file-cta,.docstring>section>a.file.is-focused.docs-sourcelink .file-cta{border-color:transparent;box-shadow:0 0 0.5em rgba(78,181,222,0.25);color:#fff}.file.is-primary:active .file-cta,.docstring>section>a.file.docs-sourcelink:active .file-cta,.file.is-primary.is-active .file-cta,.docstring>section>a.file.is-active.docs-sourcelink .file-cta{background-color:#39acda;border-color:transparent;color:#fff}.file.is-link .file-cta{background-color:#2e63b8;border-color:transparent;color:#fff}.file.is-link:hover .file-cta,.file.is-link.is-hovered .file-cta{background-color:#2b5eae;border-color:transparent;color:#fff}.file.is-link:focus .file-cta,.file.is-link.is-focused .file-cta{border-color:transparent;box-shadow:0 0 0.5em rgba(46,99,184,0.25);color:#fff}.file.is-link:active .file-cta,.file.is-link.is-active .file-cta{background-color:#2958a4;border-color:transparent;color:#fff}.file.is-info .file-cta{background-color:#209cee;border-color:transparent;color:#fff}.file.is-info:hover .file-cta,.file.is-info.is-hovered .file-cta{background-color:#1497ed;border-color:transparent;color:#fff}.file.is-info:focus .file-cta,.file.is-info.is-focused .file-cta{border-color:transparent;box-shadow:0 0 0.5em rgba(32,156,238,0.25);color:#fff}.file.is-info:active .file-cta,.file.is-info.is-active .file-cta{background-color:#1190e3;border-color:transparent;color:#fff}.file.is-success .file-cta{background-color:#22c35b;border-color:transparent;color:#fff}.file.is-success:hover .file-cta,.file.is-success.is-hovered .file-cta{background-color:#20b856;border-color:transparent;color:#fff}.file.is-success:focus .file-cta,.file.is-success.is-focused .file-cta{border-color:transparent;box-shadow:0 0 0.5em rgba(34,195,91,0.25);color:#fff}.file.is-success:active .file-cta,.file.is-success.is-active .file-cta{background-color:#1ead51;border-color:transparent;color:#fff}.file.is-warning .file-cta{background-color:#ffdd57;border-color:transparent;color:rgba(0,0,0,0.7)}.file.is-warning:hover .file-cta,.file.is-warning.is-hovered .file-cta{background-color:#ffda4a;border-color:transparent;color:rgba(0,0,0,0.7)}.file.is-warning:focus .file-cta,.file.is-warning.is-focused .file-cta{border-color:transparent;box-shadow:0 0 0.5em rgba(255,221,87,0.25);color:rgba(0,0,0,0.7)}.file.is-warning:active .file-cta,.file.is-warning.is-active .file-cta{background-color:#ffd83e;border-color:transparent;color:rgba(0,0,0,0.7)}.file.is-danger .file-cta{background-color:#da0b00;border-color:transparent;color:#fff}.file.is-danger:hover .file-cta,.file.is-danger.is-hovered .file-cta{background-color:#cd0a00;border-color:transparent;color:#fff}.file.is-danger:focus .file-cta,.file.is-danger.is-focused .file-cta{border-color:transparent;box-shadow:0 0 0.5em rgba(218,11,0,0.25);color:#fff}.file.is-danger:active .file-cta,.file.is-danger.is-active .file-cta{background-color:#c10a00;border-color:transparent;color:#fff}.file.is-small,#documenter .docs-sidebar form.docs-search>input.file{font-size:.75rem}.file.is-normal{font-size:1rem}.file.is-medium{font-size:1.25rem}.file.is-medium .file-icon .fa{font-size:21px}.file.is-large{font-size:1.5rem}.file.is-large .file-icon .fa{font-size:28px}.file.has-name .file-cta{border-bottom-right-radius:0;border-top-right-radius:0}.file.has-name .file-name{border-bottom-left-radius:0;border-top-left-radius:0}.file.has-name.is-empty .file-cta{border-radius:4px}.file.has-name.is-empty .file-name{display:none}.file.is-boxed .file-label{flex-direction:column}.file.is-boxed .file-cta{flex-direction:column;height:auto;padding:1em 3em}.file.is-boxed .file-name{border-width:0 1px 1px}.file.is-boxed .file-icon{height:1.5em;width:1.5em}.file.is-boxed .file-icon .fa{font-size:21px}.file.is-boxed.is-small .file-icon .fa,#documenter .docs-sidebar form.docs-search>input.is-boxed .file-icon .fa{font-size:14px}.file.is-boxed.is-medium .file-icon .fa{font-size:28px}.file.is-boxed.is-large .file-icon .fa{font-size:35px}.file.is-boxed.has-name .file-cta{border-radius:4px 4px 0 0}.file.is-boxed.has-name .file-name{border-radius:0 0 4px 4px;border-width:0 1px 1px}.file.is-centered{justify-content:center}.file.is-fullwidth .file-label{width:100%}.file.is-fullwidth .file-name{flex-grow:1;max-width:none}.file.is-right{justify-content:flex-end}.file.is-right .file-cta{border-radius:0 4px 4px 0}.file.is-right .file-name{border-radius:4px 0 0 4px;border-width:1px 0 1px 1px;order:-1}.file-label{align-items:stretch;display:flex;cursor:pointer;justify-content:flex-start;overflow:hidden;position:relative}.file-label:hover .file-cta{background-color:#eee;color:#222}.file-label:hover .file-name{border-color:#d5d5d5}.file-label:active .file-cta{background-color:#e8e8e8;color:#222}.file-label:active .file-name{border-color:#cfcfcf}.file-input{height:100%;left:0;opacity:0;outline:none;position:absolute;top:0;width:100%}.file-cta,.file-name{border-color:#dbdbdb;border-radius:4px;font-size:1em;padding-left:1em;padding-right:1em;white-space:nowrap}.file-cta{background-color:#f5f5f5;color:#222}.file-name{border-color:#dbdbdb;border-style:solid;border-width:1px 1px 1px 0;display:block;max-width:16em;overflow:hidden;text-align:inherit;text-overflow:ellipsis}.file-icon{align-items:center;display:flex;height:1em;justify-content:center;margin-right:.5em;width:1em}.file-icon .fa{font-size:14px}.label{color:#222;display:block;font-size:1rem;font-weight:700}.label:not(:last-child){margin-bottom:0.5em}.label.is-small,#documenter .docs-sidebar form.docs-search>input.label{font-size:.75rem}.label.is-medium{font-size:1.25rem}.label.is-large{font-size:1.5rem}.help{display:block;font-size:.75rem;margin-top:0.25rem}.help.is-white{color:#fff}.help.is-black{color:#0a0a0a}.help.is-light{color:#f5f5f5}.help.is-dark,.content kbd.help{color:#363636}.help.is-primary,.docstring>section>a.help.docs-sourcelink{color:#4eb5de}.help.is-link{color:#2e63b8}.help.is-info{color:#209cee}.help.is-success{color:#22c35b}.help.is-warning{color:#ffdd57}.help.is-danger{color:#da0b00}.field:not(:last-child){margin-bottom:0.75rem}.field.has-addons{display:flex;justify-content:flex-start}.field.has-addons .control:not(:last-child){margin-right:-1px}.field.has-addons .control:not(:first-child):not(:last-child) .button,.field.has-addons .control:not(:first-child):not(:last-child) .input,.field.has-addons .control:not(:first-child):not(:last-child) #documenter .docs-sidebar form.docs-search>input,#documenter .docs-sidebar .field.has-addons .control:not(:first-child):not(:last-child) form.docs-search>input,.field.has-addons .control:not(:first-child):not(:last-child) .select select{border-radius:0}.field.has-addons .control:first-child:not(:only-child) .button,.field.has-addons .control:first-child:not(:only-child) .input,.field.has-addons .control:first-child:not(:only-child) #documenter .docs-sidebar form.docs-search>input,#documenter .docs-sidebar .field.has-addons .control:first-child:not(:only-child) form.docs-search>input,.field.has-addons .control:first-child:not(:only-child) .select select{border-bottom-right-radius:0;border-top-right-radius:0}.field.has-addons .control:last-child:not(:only-child) .button,.field.has-addons .control:last-child:not(:only-child) .input,.field.has-addons .control:last-child:not(:only-child) #documenter .docs-sidebar form.docs-search>input,#documenter .docs-sidebar .field.has-addons .control:last-child:not(:only-child) form.docs-search>input,.field.has-addons .control:last-child:not(:only-child) .select select{border-bottom-left-radius:0;border-top-left-radius:0}.field.has-addons .control .button:not([disabled]):hover,.field.has-addons .control .button.is-hovered:not([disabled]),.field.has-addons .control .input:not([disabled]):hover,.field.has-addons .control #documenter .docs-sidebar form.docs-search>input:not([disabled]):hover,#documenter .docs-sidebar .field.has-addons .control form.docs-search>input:not([disabled]):hover,.field.has-addons .control .input.is-hovered:not([disabled]),.field.has-addons .control #documenter .docs-sidebar form.docs-search>input.is-hovered:not([disabled]),#documenter .docs-sidebar .field.has-addons .control form.docs-search>input.is-hovered:not([disabled]),.field.has-addons .control .select select:not([disabled]):hover,.field.has-addons .control .select select.is-hovered:not([disabled]){z-index:2}.field.has-addons .control .button:not([disabled]):focus,.field.has-addons .control .button.is-focused:not([disabled]),.field.has-addons .control .button:not([disabled]):active,.field.has-addons .control .button.is-active:not([disabled]),.field.has-addons .control .input:not([disabled]):focus,.field.has-addons .control #documenter .docs-sidebar form.docs-search>input:not([disabled]):focus,#documenter .docs-sidebar .field.has-addons .control form.docs-search>input:not([disabled]):focus,.field.has-addons .control .input.is-focused:not([disabled]),.field.has-addons .control #documenter .docs-sidebar form.docs-search>input.is-focused:not([disabled]),#documenter .docs-sidebar .field.has-addons .control form.docs-search>input.is-focused:not([disabled]),.field.has-addons .control .input:not([disabled]):active,.field.has-addons .control #documenter .docs-sidebar form.docs-search>input:not([disabled]):active,#documenter .docs-sidebar .field.has-addons .control form.docs-search>input:not([disabled]):active,.field.has-addons .control .input.is-active:not([disabled]),.field.has-addons .control #documenter .docs-sidebar form.docs-search>input.is-active:not([disabled]),#documenter .docs-sidebar .field.has-addons .control form.docs-search>input.is-active:not([disabled]),.field.has-addons .control .select select:not([disabled]):focus,.field.has-addons .control .select select.is-focused:not([disabled]),.field.has-addons .control .select select:not([disabled]):active,.field.has-addons .control .select select.is-active:not([disabled]){z-index:3}.field.has-addons .control .button:not([disabled]):focus:hover,.field.has-addons .control .button.is-focused:not([disabled]):hover,.field.has-addons .control .button:not([disabled]):active:hover,.field.has-addons .control .button.is-active:not([disabled]):hover,.field.has-addons .control .input:not([disabled]):focus:hover,.field.has-addons .control #documenter .docs-sidebar form.docs-search>input:not([disabled]):focus:hover,#documenter .docs-sidebar .field.has-addons .control form.docs-search>input:not([disabled]):focus:hover,.field.has-addons .control .input.is-focused:not([disabled]):hover,.field.has-addons .control #documenter .docs-sidebar form.docs-search>input.is-focused:not([disabled]):hover,#documenter .docs-sidebar .field.has-addons .control form.docs-search>input.is-focused:not([disabled]):hover,.field.has-addons .control .input:not([disabled]):active:hover,.field.has-addons .control #documenter .docs-sidebar form.docs-search>input:not([disabled]):active:hover,#documenter .docs-sidebar .field.has-addons .control form.docs-search>input:not([disabled]):active:hover,.field.has-addons .control .input.is-active:not([disabled]):hover,.field.has-addons .control #documenter .docs-sidebar form.docs-search>input.is-active:not([disabled]):hover,#documenter .docs-sidebar .field.has-addons .control form.docs-search>input.is-active:not([disabled]):hover,.field.has-addons .control .select select:not([disabled]):focus:hover,.field.has-addons .control .select select.is-focused:not([disabled]):hover,.field.has-addons .control .select select:not([disabled]):active:hover,.field.has-addons .control .select select.is-active:not([disabled]):hover{z-index:4}.field.has-addons .control.is-expanded{flex-grow:1;flex-shrink:1}.field.has-addons.has-addons-centered{justify-content:center}.field.has-addons.has-addons-right{justify-content:flex-end}.field.has-addons.has-addons-fullwidth .control{flex-grow:1;flex-shrink:0}.field.is-grouped{display:flex;justify-content:flex-start}.field.is-grouped>.control{flex-shrink:0}.field.is-grouped>.control:not(:last-child){margin-bottom:0;margin-right:.75rem}.field.is-grouped>.control.is-expanded{flex-grow:1;flex-shrink:1}.field.is-grouped.is-grouped-centered{justify-content:center}.field.is-grouped.is-grouped-right{justify-content:flex-end}.field.is-grouped.is-grouped-multiline{flex-wrap:wrap}.field.is-grouped.is-grouped-multiline>.control:last-child,.field.is-grouped.is-grouped-multiline>.control:not(:last-child){margin-bottom:0.75rem}.field.is-grouped.is-grouped-multiline:last-child{margin-bottom:-0.75rem}.field.is-grouped.is-grouped-multiline:not(:last-child){margin-bottom:0}@media screen and (min-width: 769px),print{.field.is-horizontal{display:flex}}.field-label .label{font-size:inherit}@media screen and (max-width: 768px){.field-label{margin-bottom:0.5rem}}@media screen and (min-width: 769px),print{.field-label{flex-basis:0;flex-grow:1;flex-shrink:0;margin-right:1.5rem;text-align:right}.field-label.is-small,#documenter .docs-sidebar form.docs-search>input.field-label{font-size:.75rem;padding-top:0.375em}.field-label.is-normal{padding-top:0.375em}.field-label.is-medium{font-size:1.25rem;padding-top:0.375em}.field-label.is-large{font-size:1.5rem;padding-top:0.375em}}.field-body .field .field{margin-bottom:0}@media screen and (min-width: 769px),print{.field-body{display:flex;flex-basis:0;flex-grow:5;flex-shrink:1}.field-body .field{margin-bottom:0}.field-body>.field{flex-shrink:1}.field-body>.field:not(.is-narrow){flex-grow:1}.field-body>.field:not(:last-child){margin-right:.75rem}}.control{box-sizing:border-box;clear:both;font-size:1rem;position:relative;text-align:inherit}.control.has-icons-left .input:focus~.icon,.control.has-icons-left #documenter .docs-sidebar form.docs-search>input:focus~.icon,#documenter .docs-sidebar .control.has-icons-left form.docs-search>input:focus~.icon,.control.has-icons-left .select:focus~.icon,.control.has-icons-right .input:focus~.icon,.control.has-icons-right #documenter .docs-sidebar form.docs-search>input:focus~.icon,#documenter .docs-sidebar .control.has-icons-right form.docs-search>input:focus~.icon,.control.has-icons-right .select:focus~.icon{color:#222}.control.has-icons-left .input.is-small~.icon,.control.has-icons-left #documenter .docs-sidebar form.docs-search>input~.icon,#documenter .docs-sidebar .control.has-icons-left form.docs-search>input~.icon,.control.has-icons-left .select.is-small~.icon,.control.has-icons-right .input.is-small~.icon,.control.has-icons-right #documenter .docs-sidebar form.docs-search>input~.icon,#documenter .docs-sidebar .control.has-icons-right form.docs-search>input~.icon,.control.has-icons-right .select.is-small~.icon{font-size:.75rem}.control.has-icons-left .input.is-medium~.icon,.control.has-icons-left #documenter .docs-sidebar form.docs-search>input.is-medium~.icon,#documenter .docs-sidebar .control.has-icons-left form.docs-search>input.is-medium~.icon,.control.has-icons-left .select.is-medium~.icon,.control.has-icons-right .input.is-medium~.icon,.control.has-icons-right #documenter .docs-sidebar form.docs-search>input.is-medium~.icon,#documenter .docs-sidebar .control.has-icons-right form.docs-search>input.is-medium~.icon,.control.has-icons-right .select.is-medium~.icon{font-size:1.25rem}.control.has-icons-left .input.is-large~.icon,.control.has-icons-left #documenter .docs-sidebar form.docs-search>input.is-large~.icon,#documenter .docs-sidebar .control.has-icons-left form.docs-search>input.is-large~.icon,.control.has-icons-left .select.is-large~.icon,.control.has-icons-right .input.is-large~.icon,.control.has-icons-right #documenter .docs-sidebar form.docs-search>input.is-large~.icon,#documenter .docs-sidebar .control.has-icons-right form.docs-search>input.is-large~.icon,.control.has-icons-right .select.is-large~.icon{font-size:1.5rem}.control.has-icons-left .icon,.control.has-icons-right .icon{color:#dbdbdb;height:2.5em;pointer-events:none;position:absolute;top:0;width:2.5em;z-index:4}.control.has-icons-left .input,.control.has-icons-left #documenter .docs-sidebar form.docs-search>input,#documenter .docs-sidebar .control.has-icons-left form.docs-search>input,.control.has-icons-left .select select{padding-left:2.5em}.control.has-icons-left .icon.is-left{left:0}.control.has-icons-right .input,.control.has-icons-right #documenter .docs-sidebar form.docs-search>input,#documenter .docs-sidebar .control.has-icons-right form.docs-search>input,.control.has-icons-right .select select{padding-right:2.5em}.control.has-icons-right .icon.is-right{right:0}.control.is-loading::after{position:absolute !important;right:.625em;top:0.625em;z-index:4}.control.is-loading.is-small:after,#documenter .docs-sidebar form.docs-search>input.is-loading:after{font-size:.75rem}.control.is-loading.is-medium:after{font-size:1.25rem}.control.is-loading.is-large:after{font-size:1.5rem}.breadcrumb{font-size:1rem;white-space:nowrap}.breadcrumb a{align-items:center;color:#2e63b8;display:flex;justify-content:center;padding:0 .75em}.breadcrumb a:hover{color:#363636}.breadcrumb li{align-items:center;display:flex}.breadcrumb li:first-child a{padding-left:0}.breadcrumb li.is-active a{color:#222;cursor:default;pointer-events:none}.breadcrumb li+li::before{color:#b5b5b5;content:"\0002f"}.breadcrumb ul,.breadcrumb ol{align-items:flex-start;display:flex;flex-wrap:wrap;justify-content:flex-start}.breadcrumb .icon:first-child{margin-right:.5em}.breadcrumb .icon:last-child{margin-left:.5em}.breadcrumb.is-centered ol,.breadcrumb.is-centered ul{justify-content:center}.breadcrumb.is-right ol,.breadcrumb.is-right ul{justify-content:flex-end}.breadcrumb.is-small,#documenter .docs-sidebar form.docs-search>input.breadcrumb{font-size:.75rem}.breadcrumb.is-medium{font-size:1.25rem}.breadcrumb.is-large{font-size:1.5rem}.breadcrumb.has-arrow-separator li+li::before{content:"\02192"}.breadcrumb.has-bullet-separator li+li::before{content:"\02022"}.breadcrumb.has-dot-separator li+li::before{content:"\000b7"}.breadcrumb.has-succeeds-separator li+li::before{content:"\0227B"}.card{background-color:#fff;border-radius:.25rem;box-shadow:#bbb;color:#222;max-width:100%;position:relative}.card-footer:first-child,.card-content:first-child,.card-header:first-child{border-top-left-radius:.25rem;border-top-right-radius:.25rem}.card-footer:last-child,.card-content:last-child,.card-header:last-child{border-bottom-left-radius:.25rem;border-bottom-right-radius:.25rem}.card-header{background-color:rgba(0,0,0,0);align-items:stretch;box-shadow:0 0.125em 0.25em rgba(10,10,10,0.1);display:flex}.card-header-title{align-items:center;color:#222;display:flex;flex-grow:1;font-weight:700;padding:0.75rem 1rem}.card-header-title.is-centered{justify-content:center}.card-header-icon{-moz-appearance:none;-webkit-appearance:none;appearance:none;background:none;border:none;color:currentColor;font-family:inherit;font-size:1em;margin:0;padding:0;align-items:center;cursor:pointer;display:flex;justify-content:center;padding:0.75rem 1rem}.card-image{display:block;position:relative}.card-image:first-child img{border-top-left-radius:.25rem;border-top-right-radius:.25rem}.card-image:last-child img{border-bottom-left-radius:.25rem;border-bottom-right-radius:.25rem}.card-content{background-color:rgba(0,0,0,0);padding:1.5rem}.card-footer{background-color:rgba(0,0,0,0);border-top:1px solid #ededed;align-items:stretch;display:flex}.card-footer-item{align-items:center;display:flex;flex-basis:0;flex-grow:1;flex-shrink:0;justify-content:center;padding:.75rem}.card-footer-item:not(:last-child){border-right:1px solid #ededed}.card .media:not(:last-child){margin-bottom:1.5rem}.dropdown{display:inline-flex;position:relative;vertical-align:top}.dropdown.is-active .dropdown-menu,.dropdown.is-hoverable:hover .dropdown-menu{display:block}.dropdown.is-right .dropdown-menu{left:auto;right:0}.dropdown.is-up .dropdown-menu{bottom:100%;padding-bottom:4px;padding-top:initial;top:auto}.dropdown-menu{display:none;left:0;min-width:12rem;padding-top:4px;position:absolute;top:100%;z-index:20}.dropdown-content{background-color:#fff;border-radius:4px;box-shadow:#bbb;padding-bottom:.5rem;padding-top:.5rem}.dropdown-item{color:#222;display:block;font-size:0.875rem;line-height:1.5;padding:0.375rem 1rem;position:relative}a.dropdown-item,button.dropdown-item{padding-right:3rem;text-align:inherit;white-space:nowrap;width:100%}a.dropdown-item:hover,button.dropdown-item:hover{background-color:#f5f5f5;color:#0a0a0a}a.dropdown-item.is-active,button.dropdown-item.is-active{background-color:#2e63b8;color:#fff}.dropdown-divider{background-color:#ededed;border:none;display:block;height:1px;margin:0.5rem 0}.level{align-items:center;justify-content:space-between}.level code{border-radius:4px}.level img{display:inline-block;vertical-align:top}.level.is-mobile{display:flex}.level.is-mobile .level-left,.level.is-mobile .level-right{display:flex}.level.is-mobile .level-left+.level-right{margin-top:0}.level.is-mobile .level-item:not(:last-child){margin-bottom:0;margin-right:.75rem}.level.is-mobile .level-item:not(.is-narrow){flex-grow:1}@media screen and (min-width: 769px),print{.level{display:flex}.level>.level-item:not(.is-narrow){flex-grow:1}}.level-item{align-items:center;display:flex;flex-basis:auto;flex-grow:0;flex-shrink:0;justify-content:center}.level-item .title,.level-item .subtitle{margin-bottom:0}@media screen and (max-width: 768px){.level-item:not(:last-child){margin-bottom:.75rem}}.level-left,.level-right{flex-basis:auto;flex-grow:0;flex-shrink:0}.level-left .level-item.is-flexible,.level-right .level-item.is-flexible{flex-grow:1}@media screen and (min-width: 769px),print{.level-left .level-item:not(:last-child),.level-right .level-item:not(:last-child){margin-right:.75rem}}.level-left{align-items:center;justify-content:flex-start}@media screen and (max-width: 768px){.level-left+.level-right{margin-top:1.5rem}}@media screen and (min-width: 769px),print{.level-left{display:flex}}.level-right{align-items:center;justify-content:flex-end}@media screen and (min-width: 769px),print{.level-right{display:flex}}.media{align-items:flex-start;display:flex;text-align:inherit}.media .content:not(:last-child){margin-bottom:.75rem}.media .media{border-top:1px solid rgba(219,219,219,0.5);display:flex;padding-top:.75rem}.media .media .content:not(:last-child),.media .media .control:not(:last-child){margin-bottom:.5rem}.media .media .media{padding-top:.5rem}.media .media .media+.media{margin-top:.5rem}.media+.media{border-top:1px solid rgba(219,219,219,0.5);margin-top:1rem;padding-top:1rem}.media.is-large+.media{margin-top:1.5rem;padding-top:1.5rem}.media-left,.media-right{flex-basis:auto;flex-grow:0;flex-shrink:0}.media-left{margin-right:1rem}.media-right{margin-left:1rem}.media-content{flex-basis:auto;flex-grow:1;flex-shrink:1;text-align:inherit}@media screen and (max-width: 768px){.media-content{overflow-x:auto}}.menu{font-size:1rem}.menu.is-small,#documenter .docs-sidebar form.docs-search>input.menu{font-size:.75rem}.menu.is-medium{font-size:1.25rem}.menu.is-large{font-size:1.5rem}.menu-list{line-height:1.25}.menu-list a{border-radius:2px;color:#222;display:block;padding:0.5em 0.75em}.menu-list a:hover{background-color:#f5f5f5;color:#222}.menu-list a.is-active{background-color:#2e63b8;color:#fff}.menu-list li ul{border-left:1px solid #dbdbdb;margin:.75em;padding-left:.75em}.menu-label{color:#6b6b6b;font-size:.75em;letter-spacing:.1em;text-transform:uppercase}.menu-label:not(:first-child){margin-top:1em}.menu-label:not(:last-child){margin-bottom:1em}.message{background-color:#f5f5f5;border-radius:4px;font-size:1rem}.message strong{color:currentColor}.message a:not(.button):not(.tag):not(.dropdown-item){color:currentColor;text-decoration:underline}.message.is-small,#documenter .docs-sidebar form.docs-search>input.message{font-size:.75rem}.message.is-medium{font-size:1.25rem}.message.is-large{font-size:1.5rem}.message.is-white{background-color:#fff}.message.is-white .message-header{background-color:#fff;color:#0a0a0a}.message.is-white .message-body{border-color:#fff}.message.is-black{background-color:#fafafa}.message.is-black .message-header{background-color:#0a0a0a;color:#fff}.message.is-black .message-body{border-color:#0a0a0a}.message.is-light{background-color:#fafafa}.message.is-light .message-header{background-color:#f5f5f5;color:rgba(0,0,0,0.7)}.message.is-light .message-body{border-color:#f5f5f5}.message.is-dark,.content kbd.message{background-color:#fafafa}.message.is-dark .message-header,.content kbd.message .message-header{background-color:#363636;color:#fff}.message.is-dark .message-body,.content kbd.message .message-body{border-color:#363636}.message.is-primary,.docstring>section>a.message.docs-sourcelink{background-color:#eef8fc}.message.is-primary .message-header,.docstring>section>a.message.docs-sourcelink .message-header{background-color:#4eb5de;color:#fff}.message.is-primary .message-body,.docstring>section>a.message.docs-sourcelink .message-body{border-color:#4eb5de;color:#1a6d8e}.message.is-link{background-color:#eff3fb}.message.is-link .message-header{background-color:#2e63b8;color:#fff}.message.is-link .message-body{border-color:#2e63b8;color:#3169c4}.message.is-info{background-color:#ecf7fe}.message.is-info .message-header{background-color:#209cee;color:#fff}.message.is-info .message-body{border-color:#209cee;color:#0e72b4}.message.is-success{background-color:#eefcf3}.message.is-success .message-header{background-color:#22c35b;color:#fff}.message.is-success .message-body{border-color:#22c35b;color:#198f43}.message.is-warning{background-color:#fffbeb}.message.is-warning .message-header{background-color:#ffdd57;color:rgba(0,0,0,0.7)}.message.is-warning .message-body{border-color:#ffdd57;color:#947600}.message.is-danger{background-color:#ffeceb}.message.is-danger .message-header{background-color:#da0b00;color:#fff}.message.is-danger .message-body{border-color:#da0b00;color:#f50c00}.message-header{align-items:center;background-color:#222;border-radius:4px 4px 0 0;color:#fff;display:flex;font-weight:700;justify-content:space-between;line-height:1.25;padding:0.75em 1em;position:relative}.message-header .delete{flex-grow:0;flex-shrink:0;margin-left:.75em}.message-header+.message-body{border-width:0;border-top-left-radius:0;border-top-right-radius:0}.message-body{border-color:#dbdbdb;border-radius:4px;border-style:solid;border-width:0 0 0 4px;color:#222;padding:1.25em 1.5em}.message-body code,.message-body pre{background-color:#fff}.message-body pre code{background-color:rgba(0,0,0,0)}.modal{align-items:center;display:none;flex-direction:column;justify-content:center;overflow:hidden;position:fixed;z-index:40}.modal.is-active{display:flex}.modal-background{background-color:rgba(10,10,10,0.86)}.modal-content,.modal-card{margin:0 20px;max-height:calc(100vh - 160px);overflow:auto;position:relative;width:100%}@media screen and (min-width: 769px){.modal-content,.modal-card{margin:0 auto;max-height:calc(100vh - 40px);width:640px}}.modal-close{background:none;height:40px;position:fixed;right:20px;top:20px;width:40px}.modal-card{display:flex;flex-direction:column;max-height:calc(100vh - 40px);overflow:hidden;-ms-overflow-y:visible}.modal-card-head,.modal-card-foot{align-items:center;background-color:#f5f5f5;display:flex;flex-shrink:0;justify-content:flex-start;padding:20px;position:relative}.modal-card-head{border-bottom:1px solid #dbdbdb;border-top-left-radius:6px;border-top-right-radius:6px}.modal-card-title{color:#222;flex-grow:1;flex-shrink:0;font-size:1.5rem;line-height:1}.modal-card-foot{border-bottom-left-radius:6px;border-bottom-right-radius:6px;border-top:1px solid #dbdbdb}.modal-card-foot .button:not(:last-child){margin-right:.5em}.modal-card-body{-webkit-overflow-scrolling:touch;background-color:#fff;flex-grow:1;flex-shrink:1;overflow:auto;padding:20px}.navbar{background-color:#fff;min-height:3.25rem;position:relative;z-index:30}.navbar.is-white{background-color:#fff;color:#0a0a0a}.navbar.is-white .navbar-brand>.navbar-item,.navbar.is-white .navbar-brand .navbar-link{color:#0a0a0a}.navbar.is-white .navbar-brand>a.navbar-item:focus,.navbar.is-white .navbar-brand>a.navbar-item:hover,.navbar.is-white .navbar-brand>a.navbar-item.is-active,.navbar.is-white .navbar-brand .navbar-link:focus,.navbar.is-white .navbar-brand .navbar-link:hover,.navbar.is-white .navbar-brand .navbar-link.is-active{background-color:#f2f2f2;color:#0a0a0a}.navbar.is-white .navbar-brand .navbar-link::after{border-color:#0a0a0a}.navbar.is-white .navbar-burger{color:#0a0a0a}@media screen and (min-width: 1056px){.navbar.is-white .navbar-start>.navbar-item,.navbar.is-white .navbar-start .navbar-link,.navbar.is-white .navbar-end>.navbar-item,.navbar.is-white .navbar-end .navbar-link{color:#0a0a0a}.navbar.is-white .navbar-start>a.navbar-item:focus,.navbar.is-white .navbar-start>a.navbar-item:hover,.navbar.is-white .navbar-start>a.navbar-item.is-active,.navbar.is-white .navbar-start .navbar-link:focus,.navbar.is-white .navbar-start .navbar-link:hover,.navbar.is-white .navbar-start .navbar-link.is-active,.navbar.is-white .navbar-end>a.navbar-item:focus,.navbar.is-white .navbar-end>a.navbar-item:hover,.navbar.is-white .navbar-end>a.navbar-item.is-active,.navbar.is-white .navbar-end .navbar-link:focus,.navbar.is-white .navbar-end .navbar-link:hover,.navbar.is-white .navbar-end .navbar-link.is-active{background-color:#f2f2f2;color:#0a0a0a}.navbar.is-white .navbar-start .navbar-link::after,.navbar.is-white .navbar-end .navbar-link::after{border-color:#0a0a0a}.navbar.is-white .navbar-item.has-dropdown:focus .navbar-link,.navbar.is-white .navbar-item.has-dropdown:hover .navbar-link,.navbar.is-white .navbar-item.has-dropdown.is-active .navbar-link{background-color:#f2f2f2;color:#0a0a0a}.navbar.is-white .navbar-dropdown a.navbar-item.is-active{background-color:#fff;color:#0a0a0a}}.navbar.is-black{background-color:#0a0a0a;color:#fff}.navbar.is-black .navbar-brand>.navbar-item,.navbar.is-black .navbar-brand .navbar-link{color:#fff}.navbar.is-black .navbar-brand>a.navbar-item:focus,.navbar.is-black .navbar-brand>a.navbar-item:hover,.navbar.is-black .navbar-brand>a.navbar-item.is-active,.navbar.is-black .navbar-brand .navbar-link:focus,.navbar.is-black .navbar-brand .navbar-link:hover,.navbar.is-black .navbar-brand .navbar-link.is-active{background-color:#000;color:#fff}.navbar.is-black .navbar-brand .navbar-link::after{border-color:#fff}.navbar.is-black .navbar-burger{color:#fff}@media screen and (min-width: 1056px){.navbar.is-black .navbar-start>.navbar-item,.navbar.is-black .navbar-start .navbar-link,.navbar.is-black .navbar-end>.navbar-item,.navbar.is-black .navbar-end .navbar-link{color:#fff}.navbar.is-black .navbar-start>a.navbar-item:focus,.navbar.is-black .navbar-start>a.navbar-item:hover,.navbar.is-black .navbar-start>a.navbar-item.is-active,.navbar.is-black .navbar-start .navbar-link:focus,.navbar.is-black .navbar-start .navbar-link:hover,.navbar.is-black .navbar-start .navbar-link.is-active,.navbar.is-black .navbar-end>a.navbar-item:focus,.navbar.is-black .navbar-end>a.navbar-item:hover,.navbar.is-black .navbar-end>a.navbar-item.is-active,.navbar.is-black .navbar-end .navbar-link:focus,.navbar.is-black .navbar-end .navbar-link:hover,.navbar.is-black .navbar-end .navbar-link.is-active{background-color:#000;color:#fff}.navbar.is-black .navbar-start .navbar-link::after,.navbar.is-black .navbar-end .navbar-link::after{border-color:#fff}.navbar.is-black .navbar-item.has-dropdown:focus .navbar-link,.navbar.is-black .navbar-item.has-dropdown:hover .navbar-link,.navbar.is-black .navbar-item.has-dropdown.is-active .navbar-link{background-color:#000;color:#fff}.navbar.is-black .navbar-dropdown a.navbar-item.is-active{background-color:#0a0a0a;color:#fff}}.navbar.is-light{background-color:#f5f5f5;color:rgba(0,0,0,0.7)}.navbar.is-light .navbar-brand>.navbar-item,.navbar.is-light .navbar-brand .navbar-link{color:rgba(0,0,0,0.7)}.navbar.is-light .navbar-brand>a.navbar-item:focus,.navbar.is-light .navbar-brand>a.navbar-item:hover,.navbar.is-light .navbar-brand>a.navbar-item.is-active,.navbar.is-light .navbar-brand .navbar-link:focus,.navbar.is-light .navbar-brand .navbar-link:hover,.navbar.is-light .navbar-brand .navbar-link.is-active{background-color:#e8e8e8;color:rgba(0,0,0,0.7)}.navbar.is-light .navbar-brand .navbar-link::after{border-color:rgba(0,0,0,0.7)}.navbar.is-light .navbar-burger{color:rgba(0,0,0,0.7)}@media screen and (min-width: 1056px){.navbar.is-light .navbar-start>.navbar-item,.navbar.is-light .navbar-start .navbar-link,.navbar.is-light .navbar-end>.navbar-item,.navbar.is-light .navbar-end .navbar-link{color:rgba(0,0,0,0.7)}.navbar.is-light .navbar-start>a.navbar-item:focus,.navbar.is-light .navbar-start>a.navbar-item:hover,.navbar.is-light .navbar-start>a.navbar-item.is-active,.navbar.is-light .navbar-start .navbar-link:focus,.navbar.is-light .navbar-start .navbar-link:hover,.navbar.is-light .navbar-start .navbar-link.is-active,.navbar.is-light .navbar-end>a.navbar-item:focus,.navbar.is-light .navbar-end>a.navbar-item:hover,.navbar.is-light .navbar-end>a.navbar-item.is-active,.navbar.is-light .navbar-end .navbar-link:focus,.navbar.is-light .navbar-end .navbar-link:hover,.navbar.is-light .navbar-end .navbar-link.is-active{background-color:#e8e8e8;color:rgba(0,0,0,0.7)}.navbar.is-light .navbar-start .navbar-link::after,.navbar.is-light .navbar-end .navbar-link::after{border-color:rgba(0,0,0,0.7)}.navbar.is-light .navbar-item.has-dropdown:focus .navbar-link,.navbar.is-light .navbar-item.has-dropdown:hover .navbar-link,.navbar.is-light .navbar-item.has-dropdown.is-active .navbar-link{background-color:#e8e8e8;color:rgba(0,0,0,0.7)}.navbar.is-light .navbar-dropdown a.navbar-item.is-active{background-color:#f5f5f5;color:rgba(0,0,0,0.7)}}.navbar.is-dark,.content kbd.navbar{background-color:#363636;color:#fff}.navbar.is-dark .navbar-brand>.navbar-item,.content kbd.navbar .navbar-brand>.navbar-item,.navbar.is-dark .navbar-brand .navbar-link,.content kbd.navbar .navbar-brand .navbar-link{color:#fff}.navbar.is-dark .navbar-brand>a.navbar-item:focus,.content kbd.navbar .navbar-brand>a.navbar-item:focus,.navbar.is-dark .navbar-brand>a.navbar-item:hover,.content kbd.navbar .navbar-brand>a.navbar-item:hover,.navbar.is-dark .navbar-brand>a.navbar-item.is-active,.content kbd.navbar .navbar-brand>a.navbar-item.is-active,.navbar.is-dark .navbar-brand .navbar-link:focus,.content kbd.navbar .navbar-brand .navbar-link:focus,.navbar.is-dark .navbar-brand .navbar-link:hover,.content kbd.navbar .navbar-brand .navbar-link:hover,.navbar.is-dark .navbar-brand .navbar-link.is-active,.content kbd.navbar .navbar-brand .navbar-link.is-active{background-color:#292929;color:#fff}.navbar.is-dark .navbar-brand .navbar-link::after,.content kbd.navbar .navbar-brand .navbar-link::after{border-color:#fff}.navbar.is-dark .navbar-burger,.content kbd.navbar .navbar-burger{color:#fff}@media screen and (min-width: 1056px){.navbar.is-dark .navbar-start>.navbar-item,.content kbd.navbar .navbar-start>.navbar-item,.navbar.is-dark .navbar-start .navbar-link,.content kbd.navbar .navbar-start .navbar-link,.navbar.is-dark .navbar-end>.navbar-item,.content kbd.navbar .navbar-end>.navbar-item,.navbar.is-dark .navbar-end .navbar-link,.content kbd.navbar .navbar-end .navbar-link{color:#fff}.navbar.is-dark .navbar-start>a.navbar-item:focus,.content kbd.navbar .navbar-start>a.navbar-item:focus,.navbar.is-dark .navbar-start>a.navbar-item:hover,.content kbd.navbar .navbar-start>a.navbar-item:hover,.navbar.is-dark .navbar-start>a.navbar-item.is-active,.content kbd.navbar .navbar-start>a.navbar-item.is-active,.navbar.is-dark .navbar-start .navbar-link:focus,.content kbd.navbar .navbar-start .navbar-link:focus,.navbar.is-dark .navbar-start .navbar-link:hover,.content kbd.navbar .navbar-start .navbar-link:hover,.navbar.is-dark .navbar-start .navbar-link.is-active,.content kbd.navbar .navbar-start .navbar-link.is-active,.navbar.is-dark .navbar-end>a.navbar-item:focus,.content kbd.navbar .navbar-end>a.navbar-item:focus,.navbar.is-dark .navbar-end>a.navbar-item:hover,.content kbd.navbar .navbar-end>a.navbar-item:hover,.navbar.is-dark .navbar-end>a.navbar-item.is-active,.content kbd.navbar .navbar-end>a.navbar-item.is-active,.navbar.is-dark .navbar-end .navbar-link:focus,.content kbd.navbar .navbar-end .navbar-link:focus,.navbar.is-dark .navbar-end .navbar-link:hover,.content kbd.navbar .navbar-end .navbar-link:hover,.navbar.is-dark .navbar-end .navbar-link.is-active,.content kbd.navbar .navbar-end .navbar-link.is-active{background-color:#292929;color:#fff}.navbar.is-dark .navbar-start .navbar-link::after,.content kbd.navbar .navbar-start .navbar-link::after,.navbar.is-dark .navbar-end .navbar-link::after,.content kbd.navbar .navbar-end .navbar-link::after{border-color:#fff}.navbar.is-dark .navbar-item.has-dropdown:focus .navbar-link,.content kbd.navbar .navbar-item.has-dropdown:focus .navbar-link,.navbar.is-dark .navbar-item.has-dropdown:hover .navbar-link,.content kbd.navbar .navbar-item.has-dropdown:hover .navbar-link,.navbar.is-dark .navbar-item.has-dropdown.is-active .navbar-link,.content kbd.navbar .navbar-item.has-dropdown.is-active .navbar-link{background-color:#292929;color:#fff}.navbar.is-dark .navbar-dropdown a.navbar-item.is-active,.content kbd.navbar .navbar-dropdown a.navbar-item.is-active{background-color:#363636;color:#fff}}.navbar.is-primary,.docstring>section>a.navbar.docs-sourcelink{background-color:#4eb5de;color:#fff}.navbar.is-primary .navbar-brand>.navbar-item,.docstring>section>a.navbar.docs-sourcelink .navbar-brand>.navbar-item,.navbar.is-primary .navbar-brand .navbar-link,.docstring>section>a.navbar.docs-sourcelink .navbar-brand .navbar-link{color:#fff}.navbar.is-primary .navbar-brand>a.navbar-item:focus,.docstring>section>a.navbar.docs-sourcelink .navbar-brand>a.navbar-item:focus,.navbar.is-primary .navbar-brand>a.navbar-item:hover,.docstring>section>a.navbar.docs-sourcelink .navbar-brand>a.navbar-item:hover,.navbar.is-primary .navbar-brand>a.navbar-item.is-active,.docstring>section>a.navbar.docs-sourcelink .navbar-brand>a.navbar-item.is-active,.navbar.is-primary .navbar-brand .navbar-link:focus,.docstring>section>a.navbar.docs-sourcelink .navbar-brand .navbar-link:focus,.navbar.is-primary .navbar-brand .navbar-link:hover,.docstring>section>a.navbar.docs-sourcelink .navbar-brand .navbar-link:hover,.navbar.is-primary .navbar-brand .navbar-link.is-active,.docstring>section>a.navbar.docs-sourcelink .navbar-brand .navbar-link.is-active{background-color:#39acda;color:#fff}.navbar.is-primary .navbar-brand .navbar-link::after,.docstring>section>a.navbar.docs-sourcelink .navbar-brand .navbar-link::after{border-color:#fff}.navbar.is-primary .navbar-burger,.docstring>section>a.navbar.docs-sourcelink .navbar-burger{color:#fff}@media screen and (min-width: 1056px){.navbar.is-primary .navbar-start>.navbar-item,.docstring>section>a.navbar.docs-sourcelink .navbar-start>.navbar-item,.navbar.is-primary .navbar-start .navbar-link,.docstring>section>a.navbar.docs-sourcelink .navbar-start .navbar-link,.navbar.is-primary .navbar-end>.navbar-item,.docstring>section>a.navbar.docs-sourcelink .navbar-end>.navbar-item,.navbar.is-primary .navbar-end .navbar-link,.docstring>section>a.navbar.docs-sourcelink .navbar-end .navbar-link{color:#fff}.navbar.is-primary .navbar-start>a.navbar-item:focus,.docstring>section>a.navbar.docs-sourcelink .navbar-start>a.navbar-item:focus,.navbar.is-primary .navbar-start>a.navbar-item:hover,.docstring>section>a.navbar.docs-sourcelink .navbar-start>a.navbar-item:hover,.navbar.is-primary .navbar-start>a.navbar-item.is-active,.docstring>section>a.navbar.docs-sourcelink .navbar-start>a.navbar-item.is-active,.navbar.is-primary .navbar-start .navbar-link:focus,.docstring>section>a.navbar.docs-sourcelink .navbar-start .navbar-link:focus,.navbar.is-primary .navbar-start .navbar-link:hover,.docstring>section>a.navbar.docs-sourcelink .navbar-start .navbar-link:hover,.navbar.is-primary .navbar-start .navbar-link.is-active,.docstring>section>a.navbar.docs-sourcelink .navbar-start .navbar-link.is-active,.navbar.is-primary .navbar-end>a.navbar-item:focus,.docstring>section>a.navbar.docs-sourcelink .navbar-end>a.navbar-item:focus,.navbar.is-primary .navbar-end>a.navbar-item:hover,.docstring>section>a.navbar.docs-sourcelink .navbar-end>a.navbar-item:hover,.navbar.is-primary .navbar-end>a.navbar-item.is-active,.docstring>section>a.navbar.docs-sourcelink .navbar-end>a.navbar-item.is-active,.navbar.is-primary .navbar-end .navbar-link:focus,.docstring>section>a.navbar.docs-sourcelink .navbar-end .navbar-link:focus,.navbar.is-primary .navbar-end .navbar-link:hover,.docstring>section>a.navbar.docs-sourcelink .navbar-end .navbar-link:hover,.navbar.is-primary .navbar-end .navbar-link.is-active,.docstring>section>a.navbar.docs-sourcelink .navbar-end .navbar-link.is-active{background-color:#39acda;color:#fff}.navbar.is-primary .navbar-start .navbar-link::after,.docstring>section>a.navbar.docs-sourcelink .navbar-start .navbar-link::after,.navbar.is-primary .navbar-end .navbar-link::after,.docstring>section>a.navbar.docs-sourcelink .navbar-end .navbar-link::after{border-color:#fff}.navbar.is-primary .navbar-item.has-dropdown:focus .navbar-link,.docstring>section>a.navbar.docs-sourcelink .navbar-item.has-dropdown:focus .navbar-link,.navbar.is-primary .navbar-item.has-dropdown:hover .navbar-link,.docstring>section>a.navbar.docs-sourcelink .navbar-item.has-dropdown:hover .navbar-link,.navbar.is-primary .navbar-item.has-dropdown.is-active .navbar-link,.docstring>section>a.navbar.docs-sourcelink .navbar-item.has-dropdown.is-active .navbar-link{background-color:#39acda;color:#fff}.navbar.is-primary .navbar-dropdown a.navbar-item.is-active,.docstring>section>a.navbar.docs-sourcelink .navbar-dropdown a.navbar-item.is-active{background-color:#4eb5de;color:#fff}}.navbar.is-link{background-color:#2e63b8;color:#fff}.navbar.is-link .navbar-brand>.navbar-item,.navbar.is-link .navbar-brand .navbar-link{color:#fff}.navbar.is-link .navbar-brand>a.navbar-item:focus,.navbar.is-link .navbar-brand>a.navbar-item:hover,.navbar.is-link .navbar-brand>a.navbar-item.is-active,.navbar.is-link .navbar-brand .navbar-link:focus,.navbar.is-link .navbar-brand .navbar-link:hover,.navbar.is-link .navbar-brand .navbar-link.is-active{background-color:#2958a4;color:#fff}.navbar.is-link .navbar-brand .navbar-link::after{border-color:#fff}.navbar.is-link .navbar-burger{color:#fff}@media screen and (min-width: 1056px){.navbar.is-link .navbar-start>.navbar-item,.navbar.is-link .navbar-start .navbar-link,.navbar.is-link .navbar-end>.navbar-item,.navbar.is-link .navbar-end .navbar-link{color:#fff}.navbar.is-link .navbar-start>a.navbar-item:focus,.navbar.is-link .navbar-start>a.navbar-item:hover,.navbar.is-link .navbar-start>a.navbar-item.is-active,.navbar.is-link .navbar-start .navbar-link:focus,.navbar.is-link .navbar-start .navbar-link:hover,.navbar.is-link .navbar-start .navbar-link.is-active,.navbar.is-link .navbar-end>a.navbar-item:focus,.navbar.is-link .navbar-end>a.navbar-item:hover,.navbar.is-link .navbar-end>a.navbar-item.is-active,.navbar.is-link .navbar-end .navbar-link:focus,.navbar.is-link .navbar-end .navbar-link:hover,.navbar.is-link .navbar-end .navbar-link.is-active{background-color:#2958a4;color:#fff}.navbar.is-link .navbar-start .navbar-link::after,.navbar.is-link .navbar-end .navbar-link::after{border-color:#fff}.navbar.is-link .navbar-item.has-dropdown:focus .navbar-link,.navbar.is-link .navbar-item.has-dropdown:hover .navbar-link,.navbar.is-link .navbar-item.has-dropdown.is-active .navbar-link{background-color:#2958a4;color:#fff}.navbar.is-link .navbar-dropdown a.navbar-item.is-active{background-color:#2e63b8;color:#fff}}.navbar.is-info{background-color:#209cee;color:#fff}.navbar.is-info .navbar-brand>.navbar-item,.navbar.is-info .navbar-brand .navbar-link{color:#fff}.navbar.is-info .navbar-brand>a.navbar-item:focus,.navbar.is-info .navbar-brand>a.navbar-item:hover,.navbar.is-info .navbar-brand>a.navbar-item.is-active,.navbar.is-info .navbar-brand .navbar-link:focus,.navbar.is-info .navbar-brand .navbar-link:hover,.navbar.is-info .navbar-brand .navbar-link.is-active{background-color:#1190e3;color:#fff}.navbar.is-info .navbar-brand .navbar-link::after{border-color:#fff}.navbar.is-info .navbar-burger{color:#fff}@media screen and (min-width: 1056px){.navbar.is-info .navbar-start>.navbar-item,.navbar.is-info .navbar-start .navbar-link,.navbar.is-info .navbar-end>.navbar-item,.navbar.is-info .navbar-end .navbar-link{color:#fff}.navbar.is-info .navbar-start>a.navbar-item:focus,.navbar.is-info .navbar-start>a.navbar-item:hover,.navbar.is-info .navbar-start>a.navbar-item.is-active,.navbar.is-info .navbar-start .navbar-link:focus,.navbar.is-info .navbar-start .navbar-link:hover,.navbar.is-info .navbar-start .navbar-link.is-active,.navbar.is-info .navbar-end>a.navbar-item:focus,.navbar.is-info .navbar-end>a.navbar-item:hover,.navbar.is-info .navbar-end>a.navbar-item.is-active,.navbar.is-info .navbar-end .navbar-link:focus,.navbar.is-info .navbar-end .navbar-link:hover,.navbar.is-info .navbar-end .navbar-link.is-active{background-color:#1190e3;color:#fff}.navbar.is-info .navbar-start .navbar-link::after,.navbar.is-info .navbar-end .navbar-link::after{border-color:#fff}.navbar.is-info .navbar-item.has-dropdown:focus .navbar-link,.navbar.is-info .navbar-item.has-dropdown:hover .navbar-link,.navbar.is-info .navbar-item.has-dropdown.is-active .navbar-link{background-color:#1190e3;color:#fff}.navbar.is-info .navbar-dropdown a.navbar-item.is-active{background-color:#209cee;color:#fff}}.navbar.is-success{background-color:#22c35b;color:#fff}.navbar.is-success .navbar-brand>.navbar-item,.navbar.is-success .navbar-brand .navbar-link{color:#fff}.navbar.is-success .navbar-brand>a.navbar-item:focus,.navbar.is-success .navbar-brand>a.navbar-item:hover,.navbar.is-success .navbar-brand>a.navbar-item.is-active,.navbar.is-success .navbar-brand .navbar-link:focus,.navbar.is-success .navbar-brand .navbar-link:hover,.navbar.is-success .navbar-brand .navbar-link.is-active{background-color:#1ead51;color:#fff}.navbar.is-success .navbar-brand .navbar-link::after{border-color:#fff}.navbar.is-success .navbar-burger{color:#fff}@media screen and (min-width: 1056px){.navbar.is-success .navbar-start>.navbar-item,.navbar.is-success .navbar-start .navbar-link,.navbar.is-success .navbar-end>.navbar-item,.navbar.is-success .navbar-end .navbar-link{color:#fff}.navbar.is-success .navbar-start>a.navbar-item:focus,.navbar.is-success .navbar-start>a.navbar-item:hover,.navbar.is-success .navbar-start>a.navbar-item.is-active,.navbar.is-success .navbar-start .navbar-link:focus,.navbar.is-success .navbar-start .navbar-link:hover,.navbar.is-success .navbar-start .navbar-link.is-active,.navbar.is-success .navbar-end>a.navbar-item:focus,.navbar.is-success .navbar-end>a.navbar-item:hover,.navbar.is-success .navbar-end>a.navbar-item.is-active,.navbar.is-success .navbar-end .navbar-link:focus,.navbar.is-success .navbar-end .navbar-link:hover,.navbar.is-success .navbar-end .navbar-link.is-active{background-color:#1ead51;color:#fff}.navbar.is-success .navbar-start .navbar-link::after,.navbar.is-success .navbar-end .navbar-link::after{border-color:#fff}.navbar.is-success .navbar-item.has-dropdown:focus .navbar-link,.navbar.is-success .navbar-item.has-dropdown:hover .navbar-link,.navbar.is-success .navbar-item.has-dropdown.is-active .navbar-link{background-color:#1ead51;color:#fff}.navbar.is-success .navbar-dropdown a.navbar-item.is-active{background-color:#22c35b;color:#fff}}.navbar.is-warning{background-color:#ffdd57;color:rgba(0,0,0,0.7)}.navbar.is-warning .navbar-brand>.navbar-item,.navbar.is-warning .navbar-brand .navbar-link{color:rgba(0,0,0,0.7)}.navbar.is-warning .navbar-brand>a.navbar-item:focus,.navbar.is-warning .navbar-brand>a.navbar-item:hover,.navbar.is-warning .navbar-brand>a.navbar-item.is-active,.navbar.is-warning .navbar-brand .navbar-link:focus,.navbar.is-warning .navbar-brand .navbar-link:hover,.navbar.is-warning .navbar-brand .navbar-link.is-active{background-color:#ffd83e;color:rgba(0,0,0,0.7)}.navbar.is-warning .navbar-brand .navbar-link::after{border-color:rgba(0,0,0,0.7)}.navbar.is-warning .navbar-burger{color:rgba(0,0,0,0.7)}@media screen and (min-width: 1056px){.navbar.is-warning .navbar-start>.navbar-item,.navbar.is-warning .navbar-start .navbar-link,.navbar.is-warning .navbar-end>.navbar-item,.navbar.is-warning .navbar-end .navbar-link{color:rgba(0,0,0,0.7)}.navbar.is-warning .navbar-start>a.navbar-item:focus,.navbar.is-warning .navbar-start>a.navbar-item:hover,.navbar.is-warning .navbar-start>a.navbar-item.is-active,.navbar.is-warning .navbar-start .navbar-link:focus,.navbar.is-warning .navbar-start .navbar-link:hover,.navbar.is-warning .navbar-start .navbar-link.is-active,.navbar.is-warning .navbar-end>a.navbar-item:focus,.navbar.is-warning .navbar-end>a.navbar-item:hover,.navbar.is-warning .navbar-end>a.navbar-item.is-active,.navbar.is-warning .navbar-end .navbar-link:focus,.navbar.is-warning .navbar-end .navbar-link:hover,.navbar.is-warning .navbar-end .navbar-link.is-active{background-color:#ffd83e;color:rgba(0,0,0,0.7)}.navbar.is-warning .navbar-start .navbar-link::after,.navbar.is-warning .navbar-end .navbar-link::after{border-color:rgba(0,0,0,0.7)}.navbar.is-warning .navbar-item.has-dropdown:focus .navbar-link,.navbar.is-warning .navbar-item.has-dropdown:hover .navbar-link,.navbar.is-warning .navbar-item.has-dropdown.is-active .navbar-link{background-color:#ffd83e;color:rgba(0,0,0,0.7)}.navbar.is-warning .navbar-dropdown a.navbar-item.is-active{background-color:#ffdd57;color:rgba(0,0,0,0.7)}}.navbar.is-danger{background-color:#da0b00;color:#fff}.navbar.is-danger .navbar-brand>.navbar-item,.navbar.is-danger .navbar-brand .navbar-link{color:#fff}.navbar.is-danger .navbar-brand>a.navbar-item:focus,.navbar.is-danger .navbar-brand>a.navbar-item:hover,.navbar.is-danger .navbar-brand>a.navbar-item.is-active,.navbar.is-danger .navbar-brand .navbar-link:focus,.navbar.is-danger .navbar-brand .navbar-link:hover,.navbar.is-danger .navbar-brand .navbar-link.is-active{background-color:#c10a00;color:#fff}.navbar.is-danger .navbar-brand .navbar-link::after{border-color:#fff}.navbar.is-danger .navbar-burger{color:#fff}@media screen and (min-width: 1056px){.navbar.is-danger .navbar-start>.navbar-item,.navbar.is-danger .navbar-start .navbar-link,.navbar.is-danger .navbar-end>.navbar-item,.navbar.is-danger .navbar-end .navbar-link{color:#fff}.navbar.is-danger .navbar-start>a.navbar-item:focus,.navbar.is-danger .navbar-start>a.navbar-item:hover,.navbar.is-danger .navbar-start>a.navbar-item.is-active,.navbar.is-danger .navbar-start .navbar-link:focus,.navbar.is-danger .navbar-start .navbar-link:hover,.navbar.is-danger .navbar-start .navbar-link.is-active,.navbar.is-danger .navbar-end>a.navbar-item:focus,.navbar.is-danger .navbar-end>a.navbar-item:hover,.navbar.is-danger .navbar-end>a.navbar-item.is-active,.navbar.is-danger .navbar-end .navbar-link:focus,.navbar.is-danger .navbar-end .navbar-link:hover,.navbar.is-danger .navbar-end .navbar-link.is-active{background-color:#c10a00;color:#fff}.navbar.is-danger .navbar-start .navbar-link::after,.navbar.is-danger .navbar-end .navbar-link::after{border-color:#fff}.navbar.is-danger .navbar-item.has-dropdown:focus .navbar-link,.navbar.is-danger .navbar-item.has-dropdown:hover .navbar-link,.navbar.is-danger .navbar-item.has-dropdown.is-active .navbar-link{background-color:#c10a00;color:#fff}.navbar.is-danger .navbar-dropdown a.navbar-item.is-active{background-color:#da0b00;color:#fff}}.navbar>.container{align-items:stretch;display:flex;min-height:3.25rem;width:100%}.navbar.has-shadow{box-shadow:0 2px 0 0 #f5f5f5}.navbar.is-fixed-bottom,.navbar.is-fixed-top{left:0;position:fixed;right:0;z-index:30}.navbar.is-fixed-bottom{bottom:0}.navbar.is-fixed-bottom.has-shadow{box-shadow:0 -2px 0 0 #f5f5f5}.navbar.is-fixed-top{top:0}html.has-navbar-fixed-top,body.has-navbar-fixed-top{padding-top:3.25rem}html.has-navbar-fixed-bottom,body.has-navbar-fixed-bottom{padding-bottom:3.25rem}.navbar-brand,.navbar-tabs{align-items:stretch;display:flex;flex-shrink:0;min-height:3.25rem}.navbar-brand a.navbar-item:focus,.navbar-brand a.navbar-item:hover{background-color:transparent}.navbar-tabs{-webkit-overflow-scrolling:touch;max-width:100vw;overflow-x:auto;overflow-y:hidden}.navbar-burger{color:#222;-moz-appearance:none;-webkit-appearance:none;appearance:none;background:none;border:none;cursor:pointer;display:block;height:3.25rem;position:relative;width:3.25rem;margin-left:auto}.navbar-burger span{background-color:currentColor;display:block;height:1px;left:calc(50% - 8px);position:absolute;transform-origin:center;transition-duration:86ms;transition-property:background-color, opacity, transform;transition-timing-function:ease-out;width:16px}.navbar-burger span:nth-child(1){top:calc(50% - 6px)}.navbar-burger span:nth-child(2){top:calc(50% - 1px)}.navbar-burger span:nth-child(3){top:calc(50% + 4px)}.navbar-burger:hover{background-color:rgba(0,0,0,0.05)}.navbar-burger.is-active span:nth-child(1){transform:translateY(5px) rotate(45deg)}.navbar-burger.is-active span:nth-child(2){opacity:0}.navbar-burger.is-active span:nth-child(3){transform:translateY(-5px) rotate(-45deg)}.navbar-menu{display:none}.navbar-item,.navbar-link{color:#222;display:block;line-height:1.5;padding:0.5rem 0.75rem;position:relative}.navbar-item .icon:only-child,.navbar-link .icon:only-child{margin-left:-0.25rem;margin-right:-0.25rem}a.navbar-item,.navbar-link{cursor:pointer}a.navbar-item:focus,a.navbar-item:focus-within,a.navbar-item:hover,a.navbar-item.is-active,.navbar-link:focus,.navbar-link:focus-within,.navbar-link:hover,.navbar-link.is-active{background-color:#fafafa;color:#2e63b8}.navbar-item{flex-grow:0;flex-shrink:0}.navbar-item img{max-height:1.75rem}.navbar-item.has-dropdown{padding:0}.navbar-item.is-expanded{flex-grow:1;flex-shrink:1}.navbar-item.is-tab{border-bottom:1px solid transparent;min-height:3.25rem;padding-bottom:calc(0.5rem - 1px)}.navbar-item.is-tab:focus,.navbar-item.is-tab:hover{background-color:rgba(0,0,0,0);border-bottom-color:#2e63b8}.navbar-item.is-tab.is-active{background-color:rgba(0,0,0,0);border-bottom-color:#2e63b8;border-bottom-style:solid;border-bottom-width:3px;color:#2e63b8;padding-bottom:calc(0.5rem - 3px)}.navbar-content{flex-grow:1;flex-shrink:1}.navbar-link:not(.is-arrowless){padding-right:2.5em}.navbar-link:not(.is-arrowless)::after{border-color:#2e63b8;margin-top:-0.375em;right:1.125em}.navbar-dropdown{font-size:0.875rem;padding-bottom:0.5rem;padding-top:0.5rem}.navbar-dropdown .navbar-item{padding-left:1.5rem;padding-right:1.5rem}.navbar-divider{background-color:#f5f5f5;border:none;display:none;height:2px;margin:0.5rem 0}@media screen and (max-width: 1055px){.navbar>.container{display:block}.navbar-brand .navbar-item,.navbar-tabs .navbar-item{align-items:center;display:flex}.navbar-link::after{display:none}.navbar-menu{background-color:#fff;box-shadow:0 8px 16px rgba(10,10,10,0.1);padding:0.5rem 0}.navbar-menu.is-active{display:block}.navbar.is-fixed-bottom-touch,.navbar.is-fixed-top-touch{left:0;position:fixed;right:0;z-index:30}.navbar.is-fixed-bottom-touch{bottom:0}.navbar.is-fixed-bottom-touch.has-shadow{box-shadow:0 -2px 3px rgba(10,10,10,0.1)}.navbar.is-fixed-top-touch{top:0}.navbar.is-fixed-top .navbar-menu,.navbar.is-fixed-top-touch .navbar-menu{-webkit-overflow-scrolling:touch;max-height:calc(100vh - 3.25rem);overflow:auto}html.has-navbar-fixed-top-touch,body.has-navbar-fixed-top-touch{padding-top:3.25rem}html.has-navbar-fixed-bottom-touch,body.has-navbar-fixed-bottom-touch{padding-bottom:3.25rem}}@media screen and (min-width: 1056px){.navbar,.navbar-menu,.navbar-start,.navbar-end{align-items:stretch;display:flex}.navbar{min-height:3.25rem}.navbar.is-spaced{padding:1rem 2rem}.navbar.is-spaced .navbar-start,.navbar.is-spaced .navbar-end{align-items:center}.navbar.is-spaced a.navbar-item,.navbar.is-spaced .navbar-link{border-radius:4px}.navbar.is-transparent a.navbar-item:focus,.navbar.is-transparent a.navbar-item:hover,.navbar.is-transparent a.navbar-item.is-active,.navbar.is-transparent .navbar-link:focus,.navbar.is-transparent .navbar-link:hover,.navbar.is-transparent .navbar-link.is-active{background-color:transparent !important}.navbar.is-transparent .navbar-item.has-dropdown.is-active .navbar-link,.navbar.is-transparent .navbar-item.has-dropdown.is-hoverable:focus .navbar-link,.navbar.is-transparent .navbar-item.has-dropdown.is-hoverable:focus-within .navbar-link,.navbar.is-transparent .navbar-item.has-dropdown.is-hoverable:hover .navbar-link{background-color:transparent !important}.navbar.is-transparent .navbar-dropdown a.navbar-item:focus,.navbar.is-transparent .navbar-dropdown a.navbar-item:hover{background-color:#f5f5f5;color:#0a0a0a}.navbar.is-transparent .navbar-dropdown a.navbar-item.is-active{background-color:#f5f5f5;color:#2e63b8}.navbar-burger{display:none}.navbar-item,.navbar-link{align-items:center;display:flex}.navbar-item.has-dropdown{align-items:stretch}.navbar-item.has-dropdown-up .navbar-link::after{transform:rotate(135deg) translate(0.25em, -0.25em)}.navbar-item.has-dropdown-up .navbar-dropdown{border-bottom:2px solid #dbdbdb;border-radius:6px 6px 0 0;border-top:none;bottom:100%;box-shadow:0 -8px 8px rgba(10,10,10,0.1);top:auto}.navbar-item.is-active .navbar-dropdown,.navbar-item.is-hoverable:focus .navbar-dropdown,.navbar-item.is-hoverable:focus-within .navbar-dropdown,.navbar-item.is-hoverable:hover .navbar-dropdown{display:block}.navbar.is-spaced .navbar-item.is-active .navbar-dropdown,.navbar-item.is-active .navbar-dropdown.is-boxed,.navbar.is-spaced .navbar-item.is-hoverable:focus .navbar-dropdown,.navbar-item.is-hoverable:focus .navbar-dropdown.is-boxed,.navbar.is-spaced .navbar-item.is-hoverable:focus-within .navbar-dropdown,.navbar-item.is-hoverable:focus-within .navbar-dropdown.is-boxed,.navbar.is-spaced .navbar-item.is-hoverable:hover .navbar-dropdown,.navbar-item.is-hoverable:hover .navbar-dropdown.is-boxed{opacity:1;pointer-events:auto;transform:translateY(0)}.navbar-menu{flex-grow:1;flex-shrink:0}.navbar-start{justify-content:flex-start;margin-right:auto}.navbar-end{justify-content:flex-end;margin-left:auto}.navbar-dropdown{background-color:#fff;border-bottom-left-radius:6px;border-bottom-right-radius:6px;border-top:2px solid #dbdbdb;box-shadow:0 8px 8px rgba(10,10,10,0.1);display:none;font-size:0.875rem;left:0;min-width:100%;position:absolute;top:100%;z-index:20}.navbar-dropdown .navbar-item{padding:0.375rem 1rem;white-space:nowrap}.navbar-dropdown a.navbar-item{padding-right:3rem}.navbar-dropdown a.navbar-item:focus,.navbar-dropdown a.navbar-item:hover{background-color:#f5f5f5;color:#0a0a0a}.navbar-dropdown a.navbar-item.is-active{background-color:#f5f5f5;color:#2e63b8}.navbar.is-spaced .navbar-dropdown,.navbar-dropdown.is-boxed{border-radius:6px;border-top:none;box-shadow:0 8px 8px rgba(10,10,10,0.1), 0 0 0 1px rgba(10,10,10,0.1);display:block;opacity:0;pointer-events:none;top:calc(100% + (-4px));transform:translateY(-5px);transition-duration:86ms;transition-property:opacity, transform}.navbar-dropdown.is-right{left:auto;right:0}.navbar-divider{display:block}.navbar>.container .navbar-brand,.container>.navbar .navbar-brand{margin-left:-.75rem}.navbar>.container .navbar-menu,.container>.navbar .navbar-menu{margin-right:-.75rem}.navbar.is-fixed-bottom-desktop,.navbar.is-fixed-top-desktop{left:0;position:fixed;right:0;z-index:30}.navbar.is-fixed-bottom-desktop{bottom:0}.navbar.is-fixed-bottom-desktop.has-shadow{box-shadow:0 -2px 3px rgba(10,10,10,0.1)}.navbar.is-fixed-top-desktop{top:0}html.has-navbar-fixed-top-desktop,body.has-navbar-fixed-top-desktop{padding-top:3.25rem}html.has-navbar-fixed-bottom-desktop,body.has-navbar-fixed-bottom-desktop{padding-bottom:3.25rem}html.has-spaced-navbar-fixed-top,body.has-spaced-navbar-fixed-top{padding-top:5.25rem}html.has-spaced-navbar-fixed-bottom,body.has-spaced-navbar-fixed-bottom{padding-bottom:5.25rem}a.navbar-item.is-active,.navbar-link.is-active{color:#0a0a0a}a.navbar-item.is-active:not(:focus):not(:hover),.navbar-link.is-active:not(:focus):not(:hover){background-color:rgba(0,0,0,0)}.navbar-item.has-dropdown:focus .navbar-link,.navbar-item.has-dropdown:hover .navbar-link,.navbar-item.has-dropdown.is-active .navbar-link{background-color:#fafafa}}.hero.is-fullheight-with-navbar{min-height:calc(100vh - 3.25rem)}.pagination{font-size:1rem;margin:-.25rem}.pagination.is-small,#documenter .docs-sidebar form.docs-search>input.pagination{font-size:.75rem}.pagination.is-medium{font-size:1.25rem}.pagination.is-large{font-size:1.5rem}.pagination.is-rounded .pagination-previous,#documenter .docs-sidebar form.docs-search>input.pagination .pagination-previous,.pagination.is-rounded .pagination-next,#documenter .docs-sidebar form.docs-search>input.pagination .pagination-next{padding-left:1em;padding-right:1em;border-radius:9999px}.pagination.is-rounded .pagination-link,#documenter .docs-sidebar form.docs-search>input.pagination .pagination-link{border-radius:9999px}.pagination,.pagination-list{align-items:center;display:flex;justify-content:center;text-align:center}.pagination-previous,.pagination-next,.pagination-link,.pagination-ellipsis{font-size:1em;justify-content:center;margin:.25rem;padding-left:.5em;padding-right:.5em;text-align:center}.pagination-previous,.pagination-next,.pagination-link{border-color:#dbdbdb;color:#222;min-width:2.5em}.pagination-previous:hover,.pagination-next:hover,.pagination-link:hover{border-color:#b5b5b5;color:#363636}.pagination-previous:focus,.pagination-next:focus,.pagination-link:focus{border-color:#3c5dcd}.pagination-previous:active,.pagination-next:active,.pagination-link:active{box-shadow:inset 0 1px 2px rgba(10,10,10,0.2)}.pagination-previous[disabled],.pagination-previous.is-disabled,.pagination-next[disabled],.pagination-next.is-disabled,.pagination-link[disabled],.pagination-link.is-disabled{background-color:#dbdbdb;border-color:#dbdbdb;box-shadow:none;color:#6b6b6b;opacity:0.5}.pagination-previous,.pagination-next{padding-left:.75em;padding-right:.75em;white-space:nowrap}.pagination-link.is-current{background-color:#2e63b8;border-color:#2e63b8;color:#fff}.pagination-ellipsis{color:#b5b5b5;pointer-events:none}.pagination-list{flex-wrap:wrap}.pagination-list li{list-style:none}@media screen and (max-width: 768px){.pagination{flex-wrap:wrap}.pagination-previous,.pagination-next{flex-grow:1;flex-shrink:1}.pagination-list li{flex-grow:1;flex-shrink:1}}@media screen and (min-width: 769px),print{.pagination-list{flex-grow:1;flex-shrink:1;justify-content:flex-start;order:1}.pagination-previous,.pagination-next,.pagination-link,.pagination-ellipsis{margin-bottom:0;margin-top:0}.pagination-previous{order:2}.pagination-next{order:3}.pagination{justify-content:space-between;margin-bottom:0;margin-top:0}.pagination.is-centered .pagination-previous{order:1}.pagination.is-centered .pagination-list{justify-content:center;order:2}.pagination.is-centered .pagination-next{order:3}.pagination.is-right .pagination-previous{order:1}.pagination.is-right .pagination-next{order:2}.pagination.is-right .pagination-list{justify-content:flex-end;order:3}}.panel{border-radius:6px;box-shadow:#bbb;font-size:1rem}.panel:not(:last-child){margin-bottom:1.5rem}.panel.is-white .panel-heading{background-color:#fff;color:#0a0a0a}.panel.is-white .panel-tabs a.is-active{border-bottom-color:#fff}.panel.is-white .panel-block.is-active .panel-icon{color:#fff}.panel.is-black .panel-heading{background-color:#0a0a0a;color:#fff}.panel.is-black .panel-tabs a.is-active{border-bottom-color:#0a0a0a}.panel.is-black .panel-block.is-active .panel-icon{color:#0a0a0a}.panel.is-light .panel-heading{background-color:#f5f5f5;color:rgba(0,0,0,0.7)}.panel.is-light .panel-tabs a.is-active{border-bottom-color:#f5f5f5}.panel.is-light .panel-block.is-active .panel-icon{color:#f5f5f5}.panel.is-dark .panel-heading,.content kbd.panel .panel-heading{background-color:#363636;color:#fff}.panel.is-dark .panel-tabs a.is-active,.content kbd.panel .panel-tabs a.is-active{border-bottom-color:#363636}.panel.is-dark .panel-block.is-active .panel-icon,.content kbd.panel .panel-block.is-active .panel-icon{color:#363636}.panel.is-primary .panel-heading,.docstring>section>a.panel.docs-sourcelink .panel-heading{background-color:#4eb5de;color:#fff}.panel.is-primary .panel-tabs a.is-active,.docstring>section>a.panel.docs-sourcelink .panel-tabs a.is-active{border-bottom-color:#4eb5de}.panel.is-primary .panel-block.is-active .panel-icon,.docstring>section>a.panel.docs-sourcelink .panel-block.is-active .panel-icon{color:#4eb5de}.panel.is-link .panel-heading{background-color:#2e63b8;color:#fff}.panel.is-link .panel-tabs a.is-active{border-bottom-color:#2e63b8}.panel.is-link .panel-block.is-active .panel-icon{color:#2e63b8}.panel.is-info .panel-heading{background-color:#209cee;color:#fff}.panel.is-info .panel-tabs a.is-active{border-bottom-color:#209cee}.panel.is-info .panel-block.is-active .panel-icon{color:#209cee}.panel.is-success .panel-heading{background-color:#22c35b;color:#fff}.panel.is-success .panel-tabs a.is-active{border-bottom-color:#22c35b}.panel.is-success .panel-block.is-active .panel-icon{color:#22c35b}.panel.is-warning .panel-heading{background-color:#ffdd57;color:rgba(0,0,0,0.7)}.panel.is-warning .panel-tabs a.is-active{border-bottom-color:#ffdd57}.panel.is-warning .panel-block.is-active .panel-icon{color:#ffdd57}.panel.is-danger .panel-heading{background-color:#da0b00;color:#fff}.panel.is-danger .panel-tabs a.is-active{border-bottom-color:#da0b00}.panel.is-danger .panel-block.is-active .panel-icon{color:#da0b00}.panel-tabs:not(:last-child),.panel-block:not(:last-child){border-bottom:1px solid #ededed}.panel-heading{background-color:#ededed;border-radius:6px 6px 0 0;color:#222;font-size:1.25em;font-weight:700;line-height:1.25;padding:0.75em 1em}.panel-tabs{align-items:flex-end;display:flex;font-size:.875em;justify-content:center}.panel-tabs a{border-bottom:1px solid #dbdbdb;margin-bottom:-1px;padding:0.5em}.panel-tabs a.is-active{border-bottom-color:#4a4a4a;color:#363636}.panel-list a{color:#222}.panel-list a:hover{color:#2e63b8}.panel-block{align-items:center;color:#222;display:flex;justify-content:flex-start;padding:0.5em 0.75em}.panel-block input[type="checkbox"]{margin-right:.75em}.panel-block>.control{flex-grow:1;flex-shrink:1;width:100%}.panel-block.is-wrapped{flex-wrap:wrap}.panel-block.is-active{border-left-color:#2e63b8;color:#363636}.panel-block.is-active .panel-icon{color:#2e63b8}.panel-block:last-child{border-bottom-left-radius:6px;border-bottom-right-radius:6px}a.panel-block,label.panel-block{cursor:pointer}a.panel-block:hover,label.panel-block:hover{background-color:#f5f5f5}.panel-icon{display:inline-block;font-size:14px;height:1em;line-height:1em;text-align:center;vertical-align:top;width:1em;color:#6b6b6b;margin-right:.75em}.panel-icon .fa{font-size:inherit;line-height:inherit}.tabs{-webkit-overflow-scrolling:touch;align-items:stretch;display:flex;font-size:1rem;justify-content:space-between;overflow:hidden;overflow-x:auto;white-space:nowrap}.tabs a{align-items:center;border-bottom-color:#dbdbdb;border-bottom-style:solid;border-bottom-width:1px;color:#222;display:flex;justify-content:center;margin-bottom:-1px;padding:0.5em 1em;vertical-align:top}.tabs a:hover{border-bottom-color:#222;color:#222}.tabs li{display:block}.tabs li.is-active a{border-bottom-color:#2e63b8;color:#2e63b8}.tabs ul{align-items:center;border-bottom-color:#dbdbdb;border-bottom-style:solid;border-bottom-width:1px;display:flex;flex-grow:1;flex-shrink:0;justify-content:flex-start}.tabs ul.is-left{padding-right:0.75em}.tabs ul.is-center{flex:none;justify-content:center;padding-left:0.75em;padding-right:0.75em}.tabs ul.is-right{justify-content:flex-end;padding-left:0.75em}.tabs .icon:first-child{margin-right:.5em}.tabs .icon:last-child{margin-left:.5em}.tabs.is-centered ul{justify-content:center}.tabs.is-right ul{justify-content:flex-end}.tabs.is-boxed a{border:1px solid transparent;border-radius:4px 4px 0 0}.tabs.is-boxed a:hover{background-color:#f5f5f5;border-bottom-color:#dbdbdb}.tabs.is-boxed li.is-active a{background-color:#fff;border-color:#dbdbdb;border-bottom-color:rgba(0,0,0,0) !important}.tabs.is-fullwidth li{flex-grow:1;flex-shrink:0}.tabs.is-toggle a{border-color:#dbdbdb;border-style:solid;border-width:1px;margin-bottom:0;position:relative}.tabs.is-toggle a:hover{background-color:#f5f5f5;border-color:#b5b5b5;z-index:2}.tabs.is-toggle li+li{margin-left:-1px}.tabs.is-toggle li:first-child a{border-top-left-radius:4px;border-bottom-left-radius:4px}.tabs.is-toggle li:last-child a{border-top-right-radius:4px;border-bottom-right-radius:4px}.tabs.is-toggle li.is-active a{background-color:#2e63b8;border-color:#2e63b8;color:#fff;z-index:1}.tabs.is-toggle ul{border-bottom:none}.tabs.is-toggle.is-toggle-rounded li:first-child a{border-bottom-left-radius:9999px;border-top-left-radius:9999px;padding-left:1.25em}.tabs.is-toggle.is-toggle-rounded li:last-child a{border-bottom-right-radius:9999px;border-top-right-radius:9999px;padding-right:1.25em}.tabs.is-small,#documenter .docs-sidebar form.docs-search>input.tabs{font-size:.75rem}.tabs.is-medium{font-size:1.25rem}.tabs.is-large{font-size:1.5rem}.column{display:block;flex-basis:0;flex-grow:1;flex-shrink:1;padding:.75rem}.columns.is-mobile>.column.is-narrow{flex:none;width:unset}.columns.is-mobile>.column.is-full{flex:none;width:100%}.columns.is-mobile>.column.is-three-quarters{flex:none;width:75%}.columns.is-mobile>.column.is-two-thirds{flex:none;width:66.6666%}.columns.is-mobile>.column.is-half{flex:none;width:50%}.columns.is-mobile>.column.is-one-third{flex:none;width:33.3333%}.columns.is-mobile>.column.is-one-quarter{flex:none;width:25%}.columns.is-mobile>.column.is-one-fifth{flex:none;width:20%}.columns.is-mobile>.column.is-two-fifths{flex:none;width:40%}.columns.is-mobile>.column.is-three-fifths{flex:none;width:60%}.columns.is-mobile>.column.is-four-fifths{flex:none;width:80%}.columns.is-mobile>.column.is-offset-three-quarters{margin-left:75%}.columns.is-mobile>.column.is-offset-two-thirds{margin-left:66.6666%}.columns.is-mobile>.column.is-offset-half{margin-left:50%}.columns.is-mobile>.column.is-offset-one-third{margin-left:33.3333%}.columns.is-mobile>.column.is-offset-one-quarter{margin-left:25%}.columns.is-mobile>.column.is-offset-one-fifth{margin-left:20%}.columns.is-mobile>.column.is-offset-two-fifths{margin-left:40%}.columns.is-mobile>.column.is-offset-three-fifths{margin-left:60%}.columns.is-mobile>.column.is-offset-four-fifths{margin-left:80%}.columns.is-mobile>.column.is-0{flex:none;width:0%}.columns.is-mobile>.column.is-offset-0{margin-left:0%}.columns.is-mobile>.column.is-1{flex:none;width:8.33333337%}.columns.is-mobile>.column.is-offset-1{margin-left:8.33333337%}.columns.is-mobile>.column.is-2{flex:none;width:16.66666674%}.columns.is-mobile>.column.is-offset-2{margin-left:16.66666674%}.columns.is-mobile>.column.is-3{flex:none;width:25%}.columns.is-mobile>.column.is-offset-3{margin-left:25%}.columns.is-mobile>.column.is-4{flex:none;width:33.33333337%}.columns.is-mobile>.column.is-offset-4{margin-left:33.33333337%}.columns.is-mobile>.column.is-5{flex:none;width:41.66666674%}.columns.is-mobile>.column.is-offset-5{margin-left:41.66666674%}.columns.is-mobile>.column.is-6{flex:none;width:50%}.columns.is-mobile>.column.is-offset-6{margin-left:50%}.columns.is-mobile>.column.is-7{flex:none;width:58.33333337%}.columns.is-mobile>.column.is-offset-7{margin-left:58.33333337%}.columns.is-mobile>.column.is-8{flex:none;width:66.66666674%}.columns.is-mobile>.column.is-offset-8{margin-left:66.66666674%}.columns.is-mobile>.column.is-9{flex:none;width:75%}.columns.is-mobile>.column.is-offset-9{margin-left:75%}.columns.is-mobile>.column.is-10{flex:none;width:83.33333337%}.columns.is-mobile>.column.is-offset-10{margin-left:83.33333337%}.columns.is-mobile>.column.is-11{flex:none;width:91.66666674%}.columns.is-mobile>.column.is-offset-11{margin-left:91.66666674%}.columns.is-mobile>.column.is-12{flex:none;width:100%}.columns.is-mobile>.column.is-offset-12{margin-left:100%}@media screen and (max-width: 768px){.column.is-narrow-mobile{flex:none;width:unset}.column.is-full-mobile{flex:none;width:100%}.column.is-three-quarters-mobile{flex:none;width:75%}.column.is-two-thirds-mobile{flex:none;width:66.6666%}.column.is-half-mobile{flex:none;width:50%}.column.is-one-third-mobile{flex:none;width:33.3333%}.column.is-one-quarter-mobile{flex:none;width:25%}.column.is-one-fifth-mobile{flex:none;width:20%}.column.is-two-fifths-mobile{flex:none;width:40%}.column.is-three-fifths-mobile{flex:none;width:60%}.column.is-four-fifths-mobile{flex:none;width:80%}.column.is-offset-three-quarters-mobile{margin-left:75%}.column.is-offset-two-thirds-mobile{margin-left:66.6666%}.column.is-offset-half-mobile{margin-left:50%}.column.is-offset-one-third-mobile{margin-left:33.3333%}.column.is-offset-one-quarter-mobile{margin-left:25%}.column.is-offset-one-fifth-mobile{margin-left:20%}.column.is-offset-two-fifths-mobile{margin-left:40%}.column.is-offset-three-fifths-mobile{margin-left:60%}.column.is-offset-four-fifths-mobile{margin-left:80%}.column.is-0-mobile{flex:none;width:0%}.column.is-offset-0-mobile{margin-left:0%}.column.is-1-mobile{flex:none;width:8.33333337%}.column.is-offset-1-mobile{margin-left:8.33333337%}.column.is-2-mobile{flex:none;width:16.66666674%}.column.is-offset-2-mobile{margin-left:16.66666674%}.column.is-3-mobile{flex:none;width:25%}.column.is-offset-3-mobile{margin-left:25%}.column.is-4-mobile{flex:none;width:33.33333337%}.column.is-offset-4-mobile{margin-left:33.33333337%}.column.is-5-mobile{flex:none;width:41.66666674%}.column.is-offset-5-mobile{margin-left:41.66666674%}.column.is-6-mobile{flex:none;width:50%}.column.is-offset-6-mobile{margin-left:50%}.column.is-7-mobile{flex:none;width:58.33333337%}.column.is-offset-7-mobile{margin-left:58.33333337%}.column.is-8-mobile{flex:none;width:66.66666674%}.column.is-offset-8-mobile{margin-left:66.66666674%}.column.is-9-mobile{flex:none;width:75%}.column.is-offset-9-mobile{margin-left:75%}.column.is-10-mobile{flex:none;width:83.33333337%}.column.is-offset-10-mobile{margin-left:83.33333337%}.column.is-11-mobile{flex:none;width:91.66666674%}.column.is-offset-11-mobile{margin-left:91.66666674%}.column.is-12-mobile{flex:none;width:100%}.column.is-offset-12-mobile{margin-left:100%}}@media screen and (min-width: 769px),print{.column.is-narrow,.column.is-narrow-tablet{flex:none;width:unset}.column.is-full,.column.is-full-tablet{flex:none;width:100%}.column.is-three-quarters,.column.is-three-quarters-tablet{flex:none;width:75%}.column.is-two-thirds,.column.is-two-thirds-tablet{flex:none;width:66.6666%}.column.is-half,.column.is-half-tablet{flex:none;width:50%}.column.is-one-third,.column.is-one-third-tablet{flex:none;width:33.3333%}.column.is-one-quarter,.column.is-one-quarter-tablet{flex:none;width:25%}.column.is-one-fifth,.column.is-one-fifth-tablet{flex:none;width:20%}.column.is-two-fifths,.column.is-two-fifths-tablet{flex:none;width:40%}.column.is-three-fifths,.column.is-three-fifths-tablet{flex:none;width:60%}.column.is-four-fifths,.column.is-four-fifths-tablet{flex:none;width:80%}.column.is-offset-three-quarters,.column.is-offset-three-quarters-tablet{margin-left:75%}.column.is-offset-two-thirds,.column.is-offset-two-thirds-tablet{margin-left:66.6666%}.column.is-offset-half,.column.is-offset-half-tablet{margin-left:50%}.column.is-offset-one-third,.column.is-offset-one-third-tablet{margin-left:33.3333%}.column.is-offset-one-quarter,.column.is-offset-one-quarter-tablet{margin-left:25%}.column.is-offset-one-fifth,.column.is-offset-one-fifth-tablet{margin-left:20%}.column.is-offset-two-fifths,.column.is-offset-two-fifths-tablet{margin-left:40%}.column.is-offset-three-fifths,.column.is-offset-three-fifths-tablet{margin-left:60%}.column.is-offset-four-fifths,.column.is-offset-four-fifths-tablet{margin-left:80%}.column.is-0,.column.is-0-tablet{flex:none;width:0%}.column.is-offset-0,.column.is-offset-0-tablet{margin-left:0%}.column.is-1,.column.is-1-tablet{flex:none;width:8.33333337%}.column.is-offset-1,.column.is-offset-1-tablet{margin-left:8.33333337%}.column.is-2,.column.is-2-tablet{flex:none;width:16.66666674%}.column.is-offset-2,.column.is-offset-2-tablet{margin-left:16.66666674%}.column.is-3,.column.is-3-tablet{flex:none;width:25%}.column.is-offset-3,.column.is-offset-3-tablet{margin-left:25%}.column.is-4,.column.is-4-tablet{flex:none;width:33.33333337%}.column.is-offset-4,.column.is-offset-4-tablet{margin-left:33.33333337%}.column.is-5,.column.is-5-tablet{flex:none;width:41.66666674%}.column.is-offset-5,.column.is-offset-5-tablet{margin-left:41.66666674%}.column.is-6,.column.is-6-tablet{flex:none;width:50%}.column.is-offset-6,.column.is-offset-6-tablet{margin-left:50%}.column.is-7,.column.is-7-tablet{flex:none;width:58.33333337%}.column.is-offset-7,.column.is-offset-7-tablet{margin-left:58.33333337%}.column.is-8,.column.is-8-tablet{flex:none;width:66.66666674%}.column.is-offset-8,.column.is-offset-8-tablet{margin-left:66.66666674%}.column.is-9,.column.is-9-tablet{flex:none;width:75%}.column.is-offset-9,.column.is-offset-9-tablet{margin-left:75%}.column.is-10,.column.is-10-tablet{flex:none;width:83.33333337%}.column.is-offset-10,.column.is-offset-10-tablet{margin-left:83.33333337%}.column.is-11,.column.is-11-tablet{flex:none;width:91.66666674%}.column.is-offset-11,.column.is-offset-11-tablet{margin-left:91.66666674%}.column.is-12,.column.is-12-tablet{flex:none;width:100%}.column.is-offset-12,.column.is-offset-12-tablet{margin-left:100%}}@media screen and (max-width: 1055px){.column.is-narrow-touch{flex:none;width:unset}.column.is-full-touch{flex:none;width:100%}.column.is-three-quarters-touch{flex:none;width:75%}.column.is-two-thirds-touch{flex:none;width:66.6666%}.column.is-half-touch{flex:none;width:50%}.column.is-one-third-touch{flex:none;width:33.3333%}.column.is-one-quarter-touch{flex:none;width:25%}.column.is-one-fifth-touch{flex:none;width:20%}.column.is-two-fifths-touch{flex:none;width:40%}.column.is-three-fifths-touch{flex:none;width:60%}.column.is-four-fifths-touch{flex:none;width:80%}.column.is-offset-three-quarters-touch{margin-left:75%}.column.is-offset-two-thirds-touch{margin-left:66.6666%}.column.is-offset-half-touch{margin-left:50%}.column.is-offset-one-third-touch{margin-left:33.3333%}.column.is-offset-one-quarter-touch{margin-left:25%}.column.is-offset-one-fifth-touch{margin-left:20%}.column.is-offset-two-fifths-touch{margin-left:40%}.column.is-offset-three-fifths-touch{margin-left:60%}.column.is-offset-four-fifths-touch{margin-left:80%}.column.is-0-touch{flex:none;width:0%}.column.is-offset-0-touch{margin-left:0%}.column.is-1-touch{flex:none;width:8.33333337%}.column.is-offset-1-touch{margin-left:8.33333337%}.column.is-2-touch{flex:none;width:16.66666674%}.column.is-offset-2-touch{margin-left:16.66666674%}.column.is-3-touch{flex:none;width:25%}.column.is-offset-3-touch{margin-left:25%}.column.is-4-touch{flex:none;width:33.33333337%}.column.is-offset-4-touch{margin-left:33.33333337%}.column.is-5-touch{flex:none;width:41.66666674%}.column.is-offset-5-touch{margin-left:41.66666674%}.column.is-6-touch{flex:none;width:50%}.column.is-offset-6-touch{margin-left:50%}.column.is-7-touch{flex:none;width:58.33333337%}.column.is-offset-7-touch{margin-left:58.33333337%}.column.is-8-touch{flex:none;width:66.66666674%}.column.is-offset-8-touch{margin-left:66.66666674%}.column.is-9-touch{flex:none;width:75%}.column.is-offset-9-touch{margin-left:75%}.column.is-10-touch{flex:none;width:83.33333337%}.column.is-offset-10-touch{margin-left:83.33333337%}.column.is-11-touch{flex:none;width:91.66666674%}.column.is-offset-11-touch{margin-left:91.66666674%}.column.is-12-touch{flex:none;width:100%}.column.is-offset-12-touch{margin-left:100%}}@media screen and (min-width: 1056px){.column.is-narrow-desktop{flex:none;width:unset}.column.is-full-desktop{flex:none;width:100%}.column.is-three-quarters-desktop{flex:none;width:75%}.column.is-two-thirds-desktop{flex:none;width:66.6666%}.column.is-half-desktop{flex:none;width:50%}.column.is-one-third-desktop{flex:none;width:33.3333%}.column.is-one-quarter-desktop{flex:none;width:25%}.column.is-one-fifth-desktop{flex:none;width:20%}.column.is-two-fifths-desktop{flex:none;width:40%}.column.is-three-fifths-desktop{flex:none;width:60%}.column.is-four-fifths-desktop{flex:none;width:80%}.column.is-offset-three-quarters-desktop{margin-left:75%}.column.is-offset-two-thirds-desktop{margin-left:66.6666%}.column.is-offset-half-desktop{margin-left:50%}.column.is-offset-one-third-desktop{margin-left:33.3333%}.column.is-offset-one-quarter-desktop{margin-left:25%}.column.is-offset-one-fifth-desktop{margin-left:20%}.column.is-offset-two-fifths-desktop{margin-left:40%}.column.is-offset-three-fifths-desktop{margin-left:60%}.column.is-offset-four-fifths-desktop{margin-left:80%}.column.is-0-desktop{flex:none;width:0%}.column.is-offset-0-desktop{margin-left:0%}.column.is-1-desktop{flex:none;width:8.33333337%}.column.is-offset-1-desktop{margin-left:8.33333337%}.column.is-2-desktop{flex:none;width:16.66666674%}.column.is-offset-2-desktop{margin-left:16.66666674%}.column.is-3-desktop{flex:none;width:25%}.column.is-offset-3-desktop{margin-left:25%}.column.is-4-desktop{flex:none;width:33.33333337%}.column.is-offset-4-desktop{margin-left:33.33333337%}.column.is-5-desktop{flex:none;width:41.66666674%}.column.is-offset-5-desktop{margin-left:41.66666674%}.column.is-6-desktop{flex:none;width:50%}.column.is-offset-6-desktop{margin-left:50%}.column.is-7-desktop{flex:none;width:58.33333337%}.column.is-offset-7-desktop{margin-left:58.33333337%}.column.is-8-desktop{flex:none;width:66.66666674%}.column.is-offset-8-desktop{margin-left:66.66666674%}.column.is-9-desktop{flex:none;width:75%}.column.is-offset-9-desktop{margin-left:75%}.column.is-10-desktop{flex:none;width:83.33333337%}.column.is-offset-10-desktop{margin-left:83.33333337%}.column.is-11-desktop{flex:none;width:91.66666674%}.column.is-offset-11-desktop{margin-left:91.66666674%}.column.is-12-desktop{flex:none;width:100%}.column.is-offset-12-desktop{margin-left:100%}}@media screen and (min-width: 1216px){.column.is-narrow-widescreen{flex:none;width:unset}.column.is-full-widescreen{flex:none;width:100%}.column.is-three-quarters-widescreen{flex:none;width:75%}.column.is-two-thirds-widescreen{flex:none;width:66.6666%}.column.is-half-widescreen{flex:none;width:50%}.column.is-one-third-widescreen{flex:none;width:33.3333%}.column.is-one-quarter-widescreen{flex:none;width:25%}.column.is-one-fifth-widescreen{flex:none;width:20%}.column.is-two-fifths-widescreen{flex:none;width:40%}.column.is-three-fifths-widescreen{flex:none;width:60%}.column.is-four-fifths-widescreen{flex:none;width:80%}.column.is-offset-three-quarters-widescreen{margin-left:75%}.column.is-offset-two-thirds-widescreen{margin-left:66.6666%}.column.is-offset-half-widescreen{margin-left:50%}.column.is-offset-one-third-widescreen{margin-left:33.3333%}.column.is-offset-one-quarter-widescreen{margin-left:25%}.column.is-offset-one-fifth-widescreen{margin-left:20%}.column.is-offset-two-fifths-widescreen{margin-left:40%}.column.is-offset-three-fifths-widescreen{margin-left:60%}.column.is-offset-four-fifths-widescreen{margin-left:80%}.column.is-0-widescreen{flex:none;width:0%}.column.is-offset-0-widescreen{margin-left:0%}.column.is-1-widescreen{flex:none;width:8.33333337%}.column.is-offset-1-widescreen{margin-left:8.33333337%}.column.is-2-widescreen{flex:none;width:16.66666674%}.column.is-offset-2-widescreen{margin-left:16.66666674%}.column.is-3-widescreen{flex:none;width:25%}.column.is-offset-3-widescreen{margin-left:25%}.column.is-4-widescreen{flex:none;width:33.33333337%}.column.is-offset-4-widescreen{margin-left:33.33333337%}.column.is-5-widescreen{flex:none;width:41.66666674%}.column.is-offset-5-widescreen{margin-left:41.66666674%}.column.is-6-widescreen{flex:none;width:50%}.column.is-offset-6-widescreen{margin-left:50%}.column.is-7-widescreen{flex:none;width:58.33333337%}.column.is-offset-7-widescreen{margin-left:58.33333337%}.column.is-8-widescreen{flex:none;width:66.66666674%}.column.is-offset-8-widescreen{margin-left:66.66666674%}.column.is-9-widescreen{flex:none;width:75%}.column.is-offset-9-widescreen{margin-left:75%}.column.is-10-widescreen{flex:none;width:83.33333337%}.column.is-offset-10-widescreen{margin-left:83.33333337%}.column.is-11-widescreen{flex:none;width:91.66666674%}.column.is-offset-11-widescreen{margin-left:91.66666674%}.column.is-12-widescreen{flex:none;width:100%}.column.is-offset-12-widescreen{margin-left:100%}}@media screen and (min-width: 1408px){.column.is-narrow-fullhd{flex:none;width:unset}.column.is-full-fullhd{flex:none;width:100%}.column.is-three-quarters-fullhd{flex:none;width:75%}.column.is-two-thirds-fullhd{flex:none;width:66.6666%}.column.is-half-fullhd{flex:none;width:50%}.column.is-one-third-fullhd{flex:none;width:33.3333%}.column.is-one-quarter-fullhd{flex:none;width:25%}.column.is-one-fifth-fullhd{flex:none;width:20%}.column.is-two-fifths-fullhd{flex:none;width:40%}.column.is-three-fifths-fullhd{flex:none;width:60%}.column.is-four-fifths-fullhd{flex:none;width:80%}.column.is-offset-three-quarters-fullhd{margin-left:75%}.column.is-offset-two-thirds-fullhd{margin-left:66.6666%}.column.is-offset-half-fullhd{margin-left:50%}.column.is-offset-one-third-fullhd{margin-left:33.3333%}.column.is-offset-one-quarter-fullhd{margin-left:25%}.column.is-offset-one-fifth-fullhd{margin-left:20%}.column.is-offset-two-fifths-fullhd{margin-left:40%}.column.is-offset-three-fifths-fullhd{margin-left:60%}.column.is-offset-four-fifths-fullhd{margin-left:80%}.column.is-0-fullhd{flex:none;width:0%}.column.is-offset-0-fullhd{margin-left:0%}.column.is-1-fullhd{flex:none;width:8.33333337%}.column.is-offset-1-fullhd{margin-left:8.33333337%}.column.is-2-fullhd{flex:none;width:16.66666674%}.column.is-offset-2-fullhd{margin-left:16.66666674%}.column.is-3-fullhd{flex:none;width:25%}.column.is-offset-3-fullhd{margin-left:25%}.column.is-4-fullhd{flex:none;width:33.33333337%}.column.is-offset-4-fullhd{margin-left:33.33333337%}.column.is-5-fullhd{flex:none;width:41.66666674%}.column.is-offset-5-fullhd{margin-left:41.66666674%}.column.is-6-fullhd{flex:none;width:50%}.column.is-offset-6-fullhd{margin-left:50%}.column.is-7-fullhd{flex:none;width:58.33333337%}.column.is-offset-7-fullhd{margin-left:58.33333337%}.column.is-8-fullhd{flex:none;width:66.66666674%}.column.is-offset-8-fullhd{margin-left:66.66666674%}.column.is-9-fullhd{flex:none;width:75%}.column.is-offset-9-fullhd{margin-left:75%}.column.is-10-fullhd{flex:none;width:83.33333337%}.column.is-offset-10-fullhd{margin-left:83.33333337%}.column.is-11-fullhd{flex:none;width:91.66666674%}.column.is-offset-11-fullhd{margin-left:91.66666674%}.column.is-12-fullhd{flex:none;width:100%}.column.is-offset-12-fullhd{margin-left:100%}}.columns{margin-left:-.75rem;margin-right:-.75rem;margin-top:-.75rem}.columns:last-child{margin-bottom:-.75rem}.columns:not(:last-child){margin-bottom:calc(1.5rem - .75rem)}.columns.is-centered{justify-content:center}.columns.is-gapless{margin-left:0;margin-right:0;margin-top:0}.columns.is-gapless>.column{margin:0;padding:0 !important}.columns.is-gapless:not(:last-child){margin-bottom:1.5rem}.columns.is-gapless:last-child{margin-bottom:0}.columns.is-mobile{display:flex}.columns.is-multiline{flex-wrap:wrap}.columns.is-vcentered{align-items:center}@media screen and (min-width: 769px),print{.columns:not(.is-desktop){display:flex}}@media screen and (min-width: 1056px){.columns.is-desktop{display:flex}}.columns.is-variable{--columnGap: 0.75rem;margin-left:calc(-1 * var(--columnGap));margin-right:calc(-1 * var(--columnGap))}.columns.is-variable>.column{padding-left:var(--columnGap);padding-right:var(--columnGap)}.columns.is-variable.is-0{--columnGap: 0rem}@media screen and (max-width: 768px){.columns.is-variable.is-0-mobile{--columnGap: 0rem}}@media screen and (min-width: 769px),print{.columns.is-variable.is-0-tablet{--columnGap: 0rem}}@media screen and (min-width: 769px) and (max-width: 1055px){.columns.is-variable.is-0-tablet-only{--columnGap: 0rem}}@media screen and (max-width: 1055px){.columns.is-variable.is-0-touch{--columnGap: 0rem}}@media screen and (min-width: 1056px){.columns.is-variable.is-0-desktop{--columnGap: 0rem}}@media screen and (min-width: 1056px) and (max-width: 1215px){.columns.is-variable.is-0-desktop-only{--columnGap: 0rem}}@media screen and (min-width: 1216px){.columns.is-variable.is-0-widescreen{--columnGap: 0rem}}@media screen and (min-width: 1216px) and (max-width: 1407px){.columns.is-variable.is-0-widescreen-only{--columnGap: 0rem}}@media screen and (min-width: 1408px){.columns.is-variable.is-0-fullhd{--columnGap: 0rem}}.columns.is-variable.is-1{--columnGap: .25rem}@media screen and (max-width: 768px){.columns.is-variable.is-1-mobile{--columnGap: .25rem}}@media screen and (min-width: 769px),print{.columns.is-variable.is-1-tablet{--columnGap: .25rem}}@media screen and (min-width: 769px) and (max-width: 1055px){.columns.is-variable.is-1-tablet-only{--columnGap: .25rem}}@media screen and (max-width: 1055px){.columns.is-variable.is-1-touch{--columnGap: .25rem}}@media screen and (min-width: 1056px){.columns.is-variable.is-1-desktop{--columnGap: .25rem}}@media screen and (min-width: 1056px) and (max-width: 1215px){.columns.is-variable.is-1-desktop-only{--columnGap: .25rem}}@media screen and (min-width: 1216px){.columns.is-variable.is-1-widescreen{--columnGap: .25rem}}@media screen and (min-width: 1216px) and (max-width: 1407px){.columns.is-variable.is-1-widescreen-only{--columnGap: .25rem}}@media screen and (min-width: 1408px){.columns.is-variable.is-1-fullhd{--columnGap: .25rem}}.columns.is-variable.is-2{--columnGap: .5rem}@media screen and (max-width: 768px){.columns.is-variable.is-2-mobile{--columnGap: .5rem}}@media screen and (min-width: 769px),print{.columns.is-variable.is-2-tablet{--columnGap: .5rem}}@media screen and (min-width: 769px) and (max-width: 1055px){.columns.is-variable.is-2-tablet-only{--columnGap: .5rem}}@media screen and (max-width: 1055px){.columns.is-variable.is-2-touch{--columnGap: .5rem}}@media screen and (min-width: 1056px){.columns.is-variable.is-2-desktop{--columnGap: .5rem}}@media screen and (min-width: 1056px) and (max-width: 1215px){.columns.is-variable.is-2-desktop-only{--columnGap: .5rem}}@media screen and (min-width: 1216px){.columns.is-variable.is-2-widescreen{--columnGap: .5rem}}@media screen and (min-width: 1216px) and (max-width: 1407px){.columns.is-variable.is-2-widescreen-only{--columnGap: .5rem}}@media screen and (min-width: 1408px){.columns.is-variable.is-2-fullhd{--columnGap: .5rem}}.columns.is-variable.is-3{--columnGap: .75rem}@media screen and (max-width: 768px){.columns.is-variable.is-3-mobile{--columnGap: .75rem}}@media screen and (min-width: 769px),print{.columns.is-variable.is-3-tablet{--columnGap: .75rem}}@media screen and (min-width: 769px) and (max-width: 1055px){.columns.is-variable.is-3-tablet-only{--columnGap: .75rem}}@media screen and (max-width: 1055px){.columns.is-variable.is-3-touch{--columnGap: .75rem}}@media screen and (min-width: 1056px){.columns.is-variable.is-3-desktop{--columnGap: .75rem}}@media screen and (min-width: 1056px) and (max-width: 1215px){.columns.is-variable.is-3-desktop-only{--columnGap: .75rem}}@media screen and (min-width: 1216px){.columns.is-variable.is-3-widescreen{--columnGap: .75rem}}@media screen and (min-width: 1216px) and (max-width: 1407px){.columns.is-variable.is-3-widescreen-only{--columnGap: .75rem}}@media screen and (min-width: 1408px){.columns.is-variable.is-3-fullhd{--columnGap: .75rem}}.columns.is-variable.is-4{--columnGap: 1rem}@media screen and (max-width: 768px){.columns.is-variable.is-4-mobile{--columnGap: 1rem}}@media screen and (min-width: 769px),print{.columns.is-variable.is-4-tablet{--columnGap: 1rem}}@media screen and (min-width: 769px) and (max-width: 1055px){.columns.is-variable.is-4-tablet-only{--columnGap: 1rem}}@media screen and (max-width: 1055px){.columns.is-variable.is-4-touch{--columnGap: 1rem}}@media screen and (min-width: 1056px){.columns.is-variable.is-4-desktop{--columnGap: 1rem}}@media screen and (min-width: 1056px) and (max-width: 1215px){.columns.is-variable.is-4-desktop-only{--columnGap: 1rem}}@media screen and (min-width: 1216px){.columns.is-variable.is-4-widescreen{--columnGap: 1rem}}@media screen and (min-width: 1216px) and (max-width: 1407px){.columns.is-variable.is-4-widescreen-only{--columnGap: 1rem}}@media screen and (min-width: 1408px){.columns.is-variable.is-4-fullhd{--columnGap: 1rem}}.columns.is-variable.is-5{--columnGap: 1.25rem}@media screen and (max-width: 768px){.columns.is-variable.is-5-mobile{--columnGap: 1.25rem}}@media screen and (min-width: 769px),print{.columns.is-variable.is-5-tablet{--columnGap: 1.25rem}}@media screen and (min-width: 769px) and (max-width: 1055px){.columns.is-variable.is-5-tablet-only{--columnGap: 1.25rem}}@media screen and (max-width: 1055px){.columns.is-variable.is-5-touch{--columnGap: 1.25rem}}@media screen and (min-width: 1056px){.columns.is-variable.is-5-desktop{--columnGap: 1.25rem}}@media screen and (min-width: 1056px) and (max-width: 1215px){.columns.is-variable.is-5-desktop-only{--columnGap: 1.25rem}}@media screen and (min-width: 1216px){.columns.is-variable.is-5-widescreen{--columnGap: 1.25rem}}@media screen and (min-width: 1216px) and (max-width: 1407px){.columns.is-variable.is-5-widescreen-only{--columnGap: 1.25rem}}@media screen and (min-width: 1408px){.columns.is-variable.is-5-fullhd{--columnGap: 1.25rem}}.columns.is-variable.is-6{--columnGap: 1.5rem}@media screen and (max-width: 768px){.columns.is-variable.is-6-mobile{--columnGap: 1.5rem}}@media screen and (min-width: 769px),print{.columns.is-variable.is-6-tablet{--columnGap: 1.5rem}}@media screen and (min-width: 769px) and (max-width: 1055px){.columns.is-variable.is-6-tablet-only{--columnGap: 1.5rem}}@media screen and (max-width: 1055px){.columns.is-variable.is-6-touch{--columnGap: 1.5rem}}@media screen and (min-width: 1056px){.columns.is-variable.is-6-desktop{--columnGap: 1.5rem}}@media screen and (min-width: 1056px) and (max-width: 1215px){.columns.is-variable.is-6-desktop-only{--columnGap: 1.5rem}}@media screen and (min-width: 1216px){.columns.is-variable.is-6-widescreen{--columnGap: 1.5rem}}@media screen and (min-width: 1216px) and (max-width: 1407px){.columns.is-variable.is-6-widescreen-only{--columnGap: 1.5rem}}@media screen and (min-width: 1408px){.columns.is-variable.is-6-fullhd{--columnGap: 1.5rem}}.columns.is-variable.is-7{--columnGap: 1.75rem}@media screen and (max-width: 768px){.columns.is-variable.is-7-mobile{--columnGap: 1.75rem}}@media screen and (min-width: 769px),print{.columns.is-variable.is-7-tablet{--columnGap: 1.75rem}}@media screen and (min-width: 769px) and (max-width: 1055px){.columns.is-variable.is-7-tablet-only{--columnGap: 1.75rem}}@media screen and (max-width: 1055px){.columns.is-variable.is-7-touch{--columnGap: 1.75rem}}@media screen and (min-width: 1056px){.columns.is-variable.is-7-desktop{--columnGap: 1.75rem}}@media screen and (min-width: 1056px) and (max-width: 1215px){.columns.is-variable.is-7-desktop-only{--columnGap: 1.75rem}}@media screen and (min-width: 1216px){.columns.is-variable.is-7-widescreen{--columnGap: 1.75rem}}@media screen and (min-width: 1216px) and (max-width: 1407px){.columns.is-variable.is-7-widescreen-only{--columnGap: 1.75rem}}@media screen and (min-width: 1408px){.columns.is-variable.is-7-fullhd{--columnGap: 1.75rem}}.columns.is-variable.is-8{--columnGap: 2rem}@media screen and (max-width: 768px){.columns.is-variable.is-8-mobile{--columnGap: 2rem}}@media screen and (min-width: 769px),print{.columns.is-variable.is-8-tablet{--columnGap: 2rem}}@media screen and (min-width: 769px) and (max-width: 1055px){.columns.is-variable.is-8-tablet-only{--columnGap: 2rem}}@media screen and (max-width: 1055px){.columns.is-variable.is-8-touch{--columnGap: 2rem}}@media screen and (min-width: 1056px){.columns.is-variable.is-8-desktop{--columnGap: 2rem}}@media screen and (min-width: 1056px) and (max-width: 1215px){.columns.is-variable.is-8-desktop-only{--columnGap: 2rem}}@media screen and (min-width: 1216px){.columns.is-variable.is-8-widescreen{--columnGap: 2rem}}@media screen and (min-width: 1216px) and (max-width: 1407px){.columns.is-variable.is-8-widescreen-only{--columnGap: 2rem}}@media screen and (min-width: 1408px){.columns.is-variable.is-8-fullhd{--columnGap: 2rem}}.tile{align-items:stretch;display:block;flex-basis:0;flex-grow:1;flex-shrink:1;min-height:min-content}.tile.is-ancestor{margin-left:-.75rem;margin-right:-.75rem;margin-top:-.75rem}.tile.is-ancestor:last-child{margin-bottom:-.75rem}.tile.is-ancestor:not(:last-child){margin-bottom:.75rem}.tile.is-child{margin:0 !important}.tile.is-parent{padding:.75rem}.tile.is-vertical{flex-direction:column}.tile.is-vertical>.tile.is-child:not(:last-child){margin-bottom:1.5rem !important}@media screen and (min-width: 769px),print{.tile:not(.is-child){display:flex}.tile.is-1{flex:none;width:8.33333337%}.tile.is-2{flex:none;width:16.66666674%}.tile.is-3{flex:none;width:25%}.tile.is-4{flex:none;width:33.33333337%}.tile.is-5{flex:none;width:41.66666674%}.tile.is-6{flex:none;width:50%}.tile.is-7{flex:none;width:58.33333337%}.tile.is-8{flex:none;width:66.66666674%}.tile.is-9{flex:none;width:75%}.tile.is-10{flex:none;width:83.33333337%}.tile.is-11{flex:none;width:91.66666674%}.tile.is-12{flex:none;width:100%}}.hero{align-items:stretch;display:flex;flex-direction:column;justify-content:space-between}.hero .navbar{background:none}.hero .tabs ul{border-bottom:none}.hero.is-white{background-color:#fff;color:#0a0a0a}.hero.is-white a:not(.button):not(.dropdown-item):not(.tag):not(.pagination-link.is-current),.hero.is-white strong{color:inherit}.hero.is-white .title{color:#0a0a0a}.hero.is-white .subtitle{color:rgba(10,10,10,0.9)}.hero.is-white .subtitle a:not(.button),.hero.is-white .subtitle strong{color:#0a0a0a}@media screen and (max-width: 1055px){.hero.is-white .navbar-menu{background-color:#fff}}.hero.is-white .navbar-item,.hero.is-white .navbar-link{color:rgba(10,10,10,0.7)}.hero.is-white a.navbar-item:hover,.hero.is-white a.navbar-item.is-active,.hero.is-white .navbar-link:hover,.hero.is-white .navbar-link.is-active{background-color:#f2f2f2;color:#0a0a0a}.hero.is-white .tabs a{color:#0a0a0a;opacity:0.9}.hero.is-white .tabs a:hover{opacity:1}.hero.is-white .tabs li.is-active a{color:#fff !important;opacity:1}.hero.is-white .tabs.is-boxed a,.hero.is-white .tabs.is-toggle a{color:#0a0a0a}.hero.is-white .tabs.is-boxed a:hover,.hero.is-white .tabs.is-toggle a:hover{background-color:rgba(10,10,10,0.1)}.hero.is-white .tabs.is-boxed li.is-active a,.hero.is-white .tabs.is-boxed li.is-active a:hover,.hero.is-white .tabs.is-toggle li.is-active a,.hero.is-white .tabs.is-toggle li.is-active a:hover{background-color:#0a0a0a;border-color:#0a0a0a;color:#fff}.hero.is-white.is-bold{background-image:linear-gradient(141deg, #e8e3e4 0%, #fff 71%, #fff 100%)}@media screen and (max-width: 768px){.hero.is-white.is-bold .navbar-menu{background-image:linear-gradient(141deg, #e8e3e4 0%, #fff 71%, #fff 100%)}}.hero.is-black{background-color:#0a0a0a;color:#fff}.hero.is-black a:not(.button):not(.dropdown-item):not(.tag):not(.pagination-link.is-current),.hero.is-black strong{color:inherit}.hero.is-black .title{color:#fff}.hero.is-black .subtitle{color:rgba(255,255,255,0.9)}.hero.is-black .subtitle a:not(.button),.hero.is-black .subtitle strong{color:#fff}@media screen and (max-width: 1055px){.hero.is-black .navbar-menu{background-color:#0a0a0a}}.hero.is-black .navbar-item,.hero.is-black .navbar-link{color:rgba(255,255,255,0.7)}.hero.is-black a.navbar-item:hover,.hero.is-black a.navbar-item.is-active,.hero.is-black .navbar-link:hover,.hero.is-black .navbar-link.is-active{background-color:#000;color:#fff}.hero.is-black .tabs a{color:#fff;opacity:0.9}.hero.is-black .tabs a:hover{opacity:1}.hero.is-black .tabs li.is-active a{color:#0a0a0a !important;opacity:1}.hero.is-black .tabs.is-boxed a,.hero.is-black .tabs.is-toggle a{color:#fff}.hero.is-black .tabs.is-boxed a:hover,.hero.is-black .tabs.is-toggle a:hover{background-color:rgba(10,10,10,0.1)}.hero.is-black .tabs.is-boxed li.is-active a,.hero.is-black .tabs.is-boxed li.is-active a:hover,.hero.is-black .tabs.is-toggle li.is-active a,.hero.is-black .tabs.is-toggle li.is-active a:hover{background-color:#fff;border-color:#fff;color:#0a0a0a}.hero.is-black.is-bold{background-image:linear-gradient(141deg, #000 0%, #0a0a0a 71%, #181616 100%)}@media screen and (max-width: 768px){.hero.is-black.is-bold .navbar-menu{background-image:linear-gradient(141deg, #000 0%, #0a0a0a 71%, #181616 100%)}}.hero.is-light{background-color:#f5f5f5;color:rgba(0,0,0,0.7)}.hero.is-light a:not(.button):not(.dropdown-item):not(.tag):not(.pagination-link.is-current),.hero.is-light strong{color:inherit}.hero.is-light .title{color:rgba(0,0,0,0.7)}.hero.is-light .subtitle{color:rgba(0,0,0,0.9)}.hero.is-light .subtitle a:not(.button),.hero.is-light .subtitle strong{color:rgba(0,0,0,0.7)}@media screen and (max-width: 1055px){.hero.is-light .navbar-menu{background-color:#f5f5f5}}.hero.is-light .navbar-item,.hero.is-light .navbar-link{color:rgba(0,0,0,0.7)}.hero.is-light a.navbar-item:hover,.hero.is-light a.navbar-item.is-active,.hero.is-light .navbar-link:hover,.hero.is-light .navbar-link.is-active{background-color:#e8e8e8;color:rgba(0,0,0,0.7)}.hero.is-light .tabs a{color:rgba(0,0,0,0.7);opacity:0.9}.hero.is-light .tabs a:hover{opacity:1}.hero.is-light .tabs li.is-active a{color:#f5f5f5 !important;opacity:1}.hero.is-light .tabs.is-boxed a,.hero.is-light .tabs.is-toggle a{color:rgba(0,0,0,0.7)}.hero.is-light .tabs.is-boxed a:hover,.hero.is-light .tabs.is-toggle a:hover{background-color:rgba(10,10,10,0.1)}.hero.is-light .tabs.is-boxed li.is-active a,.hero.is-light .tabs.is-boxed li.is-active a:hover,.hero.is-light .tabs.is-toggle li.is-active a,.hero.is-light .tabs.is-toggle li.is-active a:hover{background-color:rgba(0,0,0,0.7);border-color:rgba(0,0,0,0.7);color:#f5f5f5}.hero.is-light.is-bold{background-image:linear-gradient(141deg, #dfd8d9 0%, #f5f5f5 71%, #fff 100%)}@media screen and (max-width: 768px){.hero.is-light.is-bold .navbar-menu{background-image:linear-gradient(141deg, #dfd8d9 0%, #f5f5f5 71%, #fff 100%)}}.hero.is-dark,.content kbd.hero{background-color:#363636;color:#fff}.hero.is-dark a:not(.button):not(.dropdown-item):not(.tag):not(.pagination-link.is-current),.content kbd.hero a:not(.button):not(.dropdown-item):not(.tag):not(.pagination-link.is-current),.hero.is-dark strong,.content kbd.hero strong{color:inherit}.hero.is-dark .title,.content kbd.hero .title{color:#fff}.hero.is-dark .subtitle,.content kbd.hero .subtitle{color:rgba(255,255,255,0.9)}.hero.is-dark .subtitle a:not(.button),.content kbd.hero .subtitle a:not(.button),.hero.is-dark .subtitle strong,.content kbd.hero .subtitle strong{color:#fff}@media screen and (max-width: 1055px){.hero.is-dark .navbar-menu,.content kbd.hero .navbar-menu{background-color:#363636}}.hero.is-dark .navbar-item,.content kbd.hero .navbar-item,.hero.is-dark .navbar-link,.content kbd.hero .navbar-link{color:rgba(255,255,255,0.7)}.hero.is-dark a.navbar-item:hover,.content kbd.hero a.navbar-item:hover,.hero.is-dark a.navbar-item.is-active,.content kbd.hero a.navbar-item.is-active,.hero.is-dark .navbar-link:hover,.content kbd.hero .navbar-link:hover,.hero.is-dark .navbar-link.is-active,.content kbd.hero .navbar-link.is-active{background-color:#292929;color:#fff}.hero.is-dark .tabs a,.content kbd.hero .tabs a{color:#fff;opacity:0.9}.hero.is-dark .tabs a:hover,.content kbd.hero .tabs a:hover{opacity:1}.hero.is-dark .tabs li.is-active a,.content kbd.hero .tabs li.is-active a{color:#363636 !important;opacity:1}.hero.is-dark .tabs.is-boxed a,.content kbd.hero .tabs.is-boxed a,.hero.is-dark .tabs.is-toggle a,.content kbd.hero .tabs.is-toggle a{color:#fff}.hero.is-dark .tabs.is-boxed a:hover,.content kbd.hero .tabs.is-boxed a:hover,.hero.is-dark .tabs.is-toggle a:hover,.content kbd.hero .tabs.is-toggle a:hover{background-color:rgba(10,10,10,0.1)}.hero.is-dark .tabs.is-boxed li.is-active a,.content kbd.hero .tabs.is-boxed li.is-active a,.hero.is-dark .tabs.is-boxed li.is-active a:hover,.hero.is-dark .tabs.is-toggle li.is-active a,.content kbd.hero .tabs.is-toggle li.is-active a,.hero.is-dark .tabs.is-toggle li.is-active a:hover{background-color:#fff;border-color:#fff;color:#363636}.hero.is-dark.is-bold,.content kbd.hero.is-bold{background-image:linear-gradient(141deg, #1f191a 0%, #363636 71%, #46403f 100%)}@media screen and (max-width: 768px){.hero.is-dark.is-bold .navbar-menu,.content kbd.hero.is-bold .navbar-menu{background-image:linear-gradient(141deg, #1f191a 0%, #363636 71%, #46403f 100%)}}.hero.is-primary,.docstring>section>a.hero.docs-sourcelink{background-color:#4eb5de;color:#fff}.hero.is-primary a:not(.button):not(.dropdown-item):not(.tag):not(.pagination-link.is-current),.docstring>section>a.hero.docs-sourcelink a:not(.button):not(.dropdown-item):not(.tag):not(.pagination-link.is-current),.hero.is-primary strong,.docstring>section>a.hero.docs-sourcelink strong{color:inherit}.hero.is-primary .title,.docstring>section>a.hero.docs-sourcelink .title{color:#fff}.hero.is-primary .subtitle,.docstring>section>a.hero.docs-sourcelink .subtitle{color:rgba(255,255,255,0.9)}.hero.is-primary .subtitle a:not(.button),.docstring>section>a.hero.docs-sourcelink .subtitle a:not(.button),.hero.is-primary .subtitle strong,.docstring>section>a.hero.docs-sourcelink .subtitle strong{color:#fff}@media screen and (max-width: 1055px){.hero.is-primary .navbar-menu,.docstring>section>a.hero.docs-sourcelink .navbar-menu{background-color:#4eb5de}}.hero.is-primary .navbar-item,.docstring>section>a.hero.docs-sourcelink .navbar-item,.hero.is-primary .navbar-link,.docstring>section>a.hero.docs-sourcelink .navbar-link{color:rgba(255,255,255,0.7)}.hero.is-primary a.navbar-item:hover,.docstring>section>a.hero.docs-sourcelink a.navbar-item:hover,.hero.is-primary a.navbar-item.is-active,.docstring>section>a.hero.docs-sourcelink a.navbar-item.is-active,.hero.is-primary .navbar-link:hover,.docstring>section>a.hero.docs-sourcelink .navbar-link:hover,.hero.is-primary .navbar-link.is-active,.docstring>section>a.hero.docs-sourcelink .navbar-link.is-active{background-color:#39acda;color:#fff}.hero.is-primary .tabs a,.docstring>section>a.hero.docs-sourcelink .tabs a{color:#fff;opacity:0.9}.hero.is-primary .tabs a:hover,.docstring>section>a.hero.docs-sourcelink .tabs a:hover{opacity:1}.hero.is-primary .tabs li.is-active a,.docstring>section>a.hero.docs-sourcelink .tabs li.is-active a{color:#4eb5de !important;opacity:1}.hero.is-primary .tabs.is-boxed a,.docstring>section>a.hero.docs-sourcelink .tabs.is-boxed a,.hero.is-primary .tabs.is-toggle a,.docstring>section>a.hero.docs-sourcelink .tabs.is-toggle a{color:#fff}.hero.is-primary .tabs.is-boxed a:hover,.docstring>section>a.hero.docs-sourcelink .tabs.is-boxed a:hover,.hero.is-primary .tabs.is-toggle a:hover,.docstring>section>a.hero.docs-sourcelink .tabs.is-toggle a:hover{background-color:rgba(10,10,10,0.1)}.hero.is-primary .tabs.is-boxed li.is-active a,.docstring>section>a.hero.docs-sourcelink .tabs.is-boxed li.is-active a,.hero.is-primary .tabs.is-boxed li.is-active a:hover,.hero.is-primary .tabs.is-toggle li.is-active a,.docstring>section>a.hero.docs-sourcelink .tabs.is-toggle li.is-active a,.hero.is-primary .tabs.is-toggle li.is-active a:hover{background-color:#fff;border-color:#fff;color:#4eb5de}.hero.is-primary.is-bold,.docstring>section>a.hero.is-bold.docs-sourcelink{background-image:linear-gradient(141deg, #1bc7de 0%, #4eb5de 71%, #5fa9e7 100%)}@media screen and (max-width: 768px){.hero.is-primary.is-bold .navbar-menu,.docstring>section>a.hero.is-bold.docs-sourcelink .navbar-menu{background-image:linear-gradient(141deg, #1bc7de 0%, #4eb5de 71%, #5fa9e7 100%)}}.hero.is-link{background-color:#2e63b8;color:#fff}.hero.is-link a:not(.button):not(.dropdown-item):not(.tag):not(.pagination-link.is-current),.hero.is-link strong{color:inherit}.hero.is-link .title{color:#fff}.hero.is-link .subtitle{color:rgba(255,255,255,0.9)}.hero.is-link .subtitle a:not(.button),.hero.is-link .subtitle strong{color:#fff}@media screen and (max-width: 1055px){.hero.is-link .navbar-menu{background-color:#2e63b8}}.hero.is-link .navbar-item,.hero.is-link .navbar-link{color:rgba(255,255,255,0.7)}.hero.is-link a.navbar-item:hover,.hero.is-link a.navbar-item.is-active,.hero.is-link .navbar-link:hover,.hero.is-link .navbar-link.is-active{background-color:#2958a4;color:#fff}.hero.is-link .tabs a{color:#fff;opacity:0.9}.hero.is-link .tabs a:hover{opacity:1}.hero.is-link .tabs li.is-active a{color:#2e63b8 !important;opacity:1}.hero.is-link .tabs.is-boxed a,.hero.is-link .tabs.is-toggle a{color:#fff}.hero.is-link .tabs.is-boxed a:hover,.hero.is-link .tabs.is-toggle a:hover{background-color:rgba(10,10,10,0.1)}.hero.is-link .tabs.is-boxed li.is-active a,.hero.is-link .tabs.is-boxed li.is-active a:hover,.hero.is-link .tabs.is-toggle li.is-active a,.hero.is-link .tabs.is-toggle li.is-active a:hover{background-color:#fff;border-color:#fff;color:#2e63b8}.hero.is-link.is-bold{background-image:linear-gradient(141deg, #1b6098 0%, #2e63b8 71%, #2d51d2 100%)}@media screen and (max-width: 768px){.hero.is-link.is-bold .navbar-menu{background-image:linear-gradient(141deg, #1b6098 0%, #2e63b8 71%, #2d51d2 100%)}}.hero.is-info{background-color:#209cee;color:#fff}.hero.is-info a:not(.button):not(.dropdown-item):not(.tag):not(.pagination-link.is-current),.hero.is-info strong{color:inherit}.hero.is-info .title{color:#fff}.hero.is-info .subtitle{color:rgba(255,255,255,0.9)}.hero.is-info .subtitle a:not(.button),.hero.is-info .subtitle strong{color:#fff}@media screen and (max-width: 1055px){.hero.is-info .navbar-menu{background-color:#209cee}}.hero.is-info .navbar-item,.hero.is-info .navbar-link{color:rgba(255,255,255,0.7)}.hero.is-info a.navbar-item:hover,.hero.is-info a.navbar-item.is-active,.hero.is-info .navbar-link:hover,.hero.is-info .navbar-link.is-active{background-color:#1190e3;color:#fff}.hero.is-info .tabs a{color:#fff;opacity:0.9}.hero.is-info .tabs a:hover{opacity:1}.hero.is-info .tabs li.is-active a{color:#209cee !important;opacity:1}.hero.is-info .tabs.is-boxed a,.hero.is-info .tabs.is-toggle a{color:#fff}.hero.is-info .tabs.is-boxed a:hover,.hero.is-info .tabs.is-toggle a:hover{background-color:rgba(10,10,10,0.1)}.hero.is-info .tabs.is-boxed li.is-active a,.hero.is-info .tabs.is-boxed li.is-active a:hover,.hero.is-info .tabs.is-toggle li.is-active a,.hero.is-info .tabs.is-toggle li.is-active a:hover{background-color:#fff;border-color:#fff;color:#209cee}.hero.is-info.is-bold{background-image:linear-gradient(141deg, #05a6d6 0%, #209cee 71%, #3287f5 100%)}@media screen and (max-width: 768px){.hero.is-info.is-bold .navbar-menu{background-image:linear-gradient(141deg, #05a6d6 0%, #209cee 71%, #3287f5 100%)}}.hero.is-success{background-color:#22c35b;color:#fff}.hero.is-success a:not(.button):not(.dropdown-item):not(.tag):not(.pagination-link.is-current),.hero.is-success strong{color:inherit}.hero.is-success .title{color:#fff}.hero.is-success .subtitle{color:rgba(255,255,255,0.9)}.hero.is-success .subtitle a:not(.button),.hero.is-success .subtitle strong{color:#fff}@media screen and (max-width: 1055px){.hero.is-success .navbar-menu{background-color:#22c35b}}.hero.is-success .navbar-item,.hero.is-success .navbar-link{color:rgba(255,255,255,0.7)}.hero.is-success a.navbar-item:hover,.hero.is-success a.navbar-item.is-active,.hero.is-success .navbar-link:hover,.hero.is-success .navbar-link.is-active{background-color:#1ead51;color:#fff}.hero.is-success .tabs a{color:#fff;opacity:0.9}.hero.is-success .tabs a:hover{opacity:1}.hero.is-success .tabs li.is-active a{color:#22c35b !important;opacity:1}.hero.is-success .tabs.is-boxed a,.hero.is-success .tabs.is-toggle a{color:#fff}.hero.is-success .tabs.is-boxed a:hover,.hero.is-success .tabs.is-toggle a:hover{background-color:rgba(10,10,10,0.1)}.hero.is-success .tabs.is-boxed li.is-active a,.hero.is-success .tabs.is-boxed li.is-active a:hover,.hero.is-success .tabs.is-toggle li.is-active a,.hero.is-success .tabs.is-toggle li.is-active a:hover{background-color:#fff;border-color:#fff;color:#22c35b}.hero.is-success.is-bold{background-image:linear-gradient(141deg, #12a02c 0%, #22c35b 71%, #1fdf83 100%)}@media screen and (max-width: 768px){.hero.is-success.is-bold .navbar-menu{background-image:linear-gradient(141deg, #12a02c 0%, #22c35b 71%, #1fdf83 100%)}}.hero.is-warning{background-color:#ffdd57;color:rgba(0,0,0,0.7)}.hero.is-warning a:not(.button):not(.dropdown-item):not(.tag):not(.pagination-link.is-current),.hero.is-warning strong{color:inherit}.hero.is-warning .title{color:rgba(0,0,0,0.7)}.hero.is-warning .subtitle{color:rgba(0,0,0,0.9)}.hero.is-warning .subtitle a:not(.button),.hero.is-warning .subtitle strong{color:rgba(0,0,0,0.7)}@media screen and (max-width: 1055px){.hero.is-warning .navbar-menu{background-color:#ffdd57}}.hero.is-warning .navbar-item,.hero.is-warning .navbar-link{color:rgba(0,0,0,0.7)}.hero.is-warning a.navbar-item:hover,.hero.is-warning a.navbar-item.is-active,.hero.is-warning .navbar-link:hover,.hero.is-warning .navbar-link.is-active{background-color:#ffd83e;color:rgba(0,0,0,0.7)}.hero.is-warning .tabs a{color:rgba(0,0,0,0.7);opacity:0.9}.hero.is-warning .tabs a:hover{opacity:1}.hero.is-warning .tabs li.is-active a{color:#ffdd57 !important;opacity:1}.hero.is-warning .tabs.is-boxed a,.hero.is-warning .tabs.is-toggle a{color:rgba(0,0,0,0.7)}.hero.is-warning .tabs.is-boxed a:hover,.hero.is-warning .tabs.is-toggle a:hover{background-color:rgba(10,10,10,0.1)}.hero.is-warning .tabs.is-boxed li.is-active a,.hero.is-warning .tabs.is-boxed li.is-active a:hover,.hero.is-warning .tabs.is-toggle li.is-active a,.hero.is-warning .tabs.is-toggle li.is-active a:hover{background-color:rgba(0,0,0,0.7);border-color:rgba(0,0,0,0.7);color:#ffdd57}.hero.is-warning.is-bold{background-image:linear-gradient(141deg, #ffae24 0%, #ffdd57 71%, #fffa71 100%)}@media screen and (max-width: 768px){.hero.is-warning.is-bold .navbar-menu{background-image:linear-gradient(141deg, #ffae24 0%, #ffdd57 71%, #fffa71 100%)}}.hero.is-danger{background-color:#da0b00;color:#fff}.hero.is-danger a:not(.button):not(.dropdown-item):not(.tag):not(.pagination-link.is-current),.hero.is-danger strong{color:inherit}.hero.is-danger .title{color:#fff}.hero.is-danger .subtitle{color:rgba(255,255,255,0.9)}.hero.is-danger .subtitle a:not(.button),.hero.is-danger .subtitle strong{color:#fff}@media screen and (max-width: 1055px){.hero.is-danger .navbar-menu{background-color:#da0b00}}.hero.is-danger .navbar-item,.hero.is-danger .navbar-link{color:rgba(255,255,255,0.7)}.hero.is-danger a.navbar-item:hover,.hero.is-danger a.navbar-item.is-active,.hero.is-danger .navbar-link:hover,.hero.is-danger .navbar-link.is-active{background-color:#c10a00;color:#fff}.hero.is-danger .tabs a{color:#fff;opacity:0.9}.hero.is-danger .tabs a:hover{opacity:1}.hero.is-danger .tabs li.is-active a{color:#da0b00 !important;opacity:1}.hero.is-danger .tabs.is-boxed a,.hero.is-danger .tabs.is-toggle a{color:#fff}.hero.is-danger .tabs.is-boxed a:hover,.hero.is-danger .tabs.is-toggle a:hover{background-color:rgba(10,10,10,0.1)}.hero.is-danger .tabs.is-boxed li.is-active a,.hero.is-danger .tabs.is-boxed li.is-active a:hover,.hero.is-danger .tabs.is-toggle li.is-active a,.hero.is-danger .tabs.is-toggle li.is-active a:hover{background-color:#fff;border-color:#fff;color:#da0b00}.hero.is-danger.is-bold{background-image:linear-gradient(141deg, #a70013 0%, #da0b00 71%, #f43500 100%)}@media screen and (max-width: 768px){.hero.is-danger.is-bold .navbar-menu{background-image:linear-gradient(141deg, #a70013 0%, #da0b00 71%, #f43500 100%)}}.hero.is-small .hero-body,#documenter .docs-sidebar form.docs-search>input.hero .hero-body{padding:1.5rem}@media screen and (min-width: 769px),print{.hero.is-medium .hero-body{padding:9rem 4.5rem}}@media screen and (min-width: 769px),print{.hero.is-large .hero-body{padding:18rem 6rem}}.hero.is-halfheight .hero-body,.hero.is-fullheight .hero-body,.hero.is-fullheight-with-navbar .hero-body{align-items:center;display:flex}.hero.is-halfheight .hero-body>.container,.hero.is-fullheight .hero-body>.container,.hero.is-fullheight-with-navbar .hero-body>.container{flex-grow:1;flex-shrink:1}.hero.is-halfheight{min-height:50vh}.hero.is-fullheight{min-height:100vh}.hero-video{overflow:hidden}.hero-video video{left:50%;min-height:100%;min-width:100%;position:absolute;top:50%;transform:translate3d(-50%, -50%, 0)}.hero-video.is-transparent{opacity:0.3}@media screen and (max-width: 768px){.hero-video{display:none}}.hero-buttons{margin-top:1.5rem}@media screen and (max-width: 768px){.hero-buttons .button{display:flex}.hero-buttons .button:not(:last-child){margin-bottom:0.75rem}}@media screen and (min-width: 769px),print{.hero-buttons{display:flex;justify-content:center}.hero-buttons .button:not(:last-child){margin-right:1.5rem}}.hero-head,.hero-foot{flex-grow:0;flex-shrink:0}.hero-body{flex-grow:1;flex-shrink:0;padding:3rem 1.5rem}@media screen and (min-width: 769px),print{.hero-body{padding:3rem 3rem}}.section{padding:3rem 1.5rem}@media screen and (min-width: 1056px){.section{padding:3rem 3rem}.section.is-medium{padding:9rem 4.5rem}.section.is-large{padding:18rem 6rem}}.footer{background-color:#fafafa;padding:3rem 1.5rem 6rem}h1 .docs-heading-anchor,h1 .docs-heading-anchor:hover,h1 .docs-heading-anchor:visited,h2 .docs-heading-anchor,h2 .docs-heading-anchor:hover,h2 .docs-heading-anchor:visited,h3 .docs-heading-anchor,h3 .docs-heading-anchor:hover,h3 .docs-heading-anchor:visited,h4 .docs-heading-anchor,h4 .docs-heading-anchor:hover,h4 .docs-heading-anchor:visited,h5 .docs-heading-anchor,h5 .docs-heading-anchor:hover,h5 .docs-heading-anchor:visited,h6 .docs-heading-anchor,h6 .docs-heading-anchor:hover,h6 .docs-heading-anchor:visited{color:#222}h1 .docs-heading-anchor-permalink,h2 .docs-heading-anchor-permalink,h3 .docs-heading-anchor-permalink,h4 .docs-heading-anchor-permalink,h5 .docs-heading-anchor-permalink,h6 .docs-heading-anchor-permalink{visibility:hidden;vertical-align:middle;margin-left:0.5em;font-size:0.7rem}h1 .docs-heading-anchor-permalink::before,h2 .docs-heading-anchor-permalink::before,h3 .docs-heading-anchor-permalink::before,h4 .docs-heading-anchor-permalink::before,h5 .docs-heading-anchor-permalink::before,h6 .docs-heading-anchor-permalink::before{font-family:"Font Awesome 6 Free";font-weight:900;content:"\f0c1"}h1:hover .docs-heading-anchor-permalink,h2:hover .docs-heading-anchor-permalink,h3:hover .docs-heading-anchor-permalink,h4:hover .docs-heading-anchor-permalink,h5:hover .docs-heading-anchor-permalink,h6:hover .docs-heading-anchor-permalink{visibility:visible}.docs-dark-only{display:none !important}pre{position:relative;overflow:hidden}pre code,pre code.hljs{padding:0 .75rem !important;overflow:auto;display:block}pre code:first-of-type,pre code.hljs:first-of-type{padding-top:0.5rem !important}pre code:last-of-type,pre code.hljs:last-of-type{padding-bottom:0.5rem !important}pre .copy-button{opacity:0.2;transition:opacity 0.2s;position:absolute;right:0em;top:0em;padding:0.5em;width:2.5em;height:2.5em;background:transparent;border:none;font-family:"Font Awesome 6 Free";color:#222;cursor:pointer;text-align:center}pre .copy-button:focus,pre .copy-button:hover{opacity:1;background:rgba(34,34,34,0.1);color:#2e63b8}pre .copy-button.success{color:#259a12;opacity:1}pre .copy-button.error{color:#cb3c33;opacity:1}pre:hover .copy-button{opacity:1}.admonition{background-color:#b5b5b5;border-style:solid;border-width:1px;border-color:#363636;border-radius:4px;font-size:1rem}.admonition strong{color:currentColor}.admonition.is-small,#documenter .docs-sidebar form.docs-search>input.admonition{font-size:.75rem}.admonition.is-medium{font-size:1.25rem}.admonition.is-large{font-size:1.5rem}.admonition.is-default{background-color:#b5b5b5;border-color:#363636}.admonition.is-default>.admonition-header{background-color:#363636;color:#fff}.admonition.is-default>.admonition-body{color:#fff}.admonition.is-info{background-color:#def0fc;border-color:#209cee}.admonition.is-info>.admonition-header{background-color:#209cee;color:#fff}.admonition.is-info>.admonition-body{color:rgba(0,0,0,0.7)}.admonition.is-success{background-color:#bdf4d1;border-color:#22c35b}.admonition.is-success>.admonition-header{background-color:#22c35b;color:#fff}.admonition.is-success>.admonition-body{color:rgba(0,0,0,0.7)}.admonition.is-warning{background-color:#fff3c5;border-color:#ffdd57}.admonition.is-warning>.admonition-header{background-color:#ffdd57;color:rgba(0,0,0,0.7)}.admonition.is-warning>.admonition-body{color:rgba(0,0,0,0.7)}.admonition.is-danger{background-color:#ffaba7;border-color:#da0b00}.admonition.is-danger>.admonition-header{background-color:#da0b00;color:#fff}.admonition.is-danger>.admonition-body{color:rgba(0,0,0,0.7)}.admonition.is-compat{background-color:#bdeff5;border-color:#1db5c9}.admonition.is-compat>.admonition-header{background-color:#1db5c9;color:#fff}.admonition.is-compat>.admonition-body{color:rgba(0,0,0,0.7)}.admonition-header{color:#fff;background-color:#363636;align-items:center;font-weight:700;justify-content:space-between;line-height:1.25;padding:0.5rem .75rem;position:relative}.admonition-header:before{font-family:"Font Awesome 6 Free";font-weight:900;margin-right:.75rem;content:"\f06a"}details.admonition.is-details>.admonition-header{list-style:none}details.admonition.is-details>.admonition-header:before{font-family:"Font Awesome 6 Free";font-weight:900;content:"\f055"}details.admonition.is-details[open]>.admonition-header:before{font-family:"Font Awesome 6 Free";font-weight:900;content:"\f056"}.admonition-body{color:#222;padding:0.5rem .75rem}.admonition-body pre{background-color:#f5f5f5}.admonition-body code{background-color:rgba(0,0,0,0.05)}.docstring{margin-bottom:1em;background-color:rgba(0,0,0,0);border:1px solid #dbdbdb;box-shadow:2px 2px 3px rgba(10,10,10,0.1);max-width:100%}.docstring>header{cursor:pointer;display:flex;flex-grow:1;align-items:stretch;padding:0.5rem .75rem;background-color:#f5f5f5;box-shadow:0 0.125em 0.25em rgba(10,10,10,0.1);box-shadow:none;border-bottom:1px solid #dbdbdb;overflow:auto}.docstring>header code{background-color:transparent}.docstring>header .docstring-article-toggle-button{min-width:1.1rem;padding:0.2rem 0.2rem 0.2rem 0}.docstring>header .docstring-binding{margin-right:0.3em}.docstring>header .docstring-category{margin-left:0.3em}.docstring>section{position:relative;padding:.75rem .75rem;border-bottom:1px solid #dbdbdb}.docstring>section:last-child{border-bottom:none}.docstring>section>a.docs-sourcelink{transition:opacity 0.3s;opacity:0;position:absolute;right:.375rem;bottom:.375rem}.docstring>section>a.docs-sourcelink:focus{opacity:1 !important}.docstring:hover>section>a.docs-sourcelink{opacity:0.2}.docstring:focus-within>section>a.docs-sourcelink{opacity:0.2}.docstring>section:hover a.docs-sourcelink{opacity:1}.documenter-example-output{background-color:#fff}.outdated-warning-overlay{position:fixed;top:0;left:0;right:0;box-shadow:0 0 10px rgba(0,0,0,0.3);z-index:999;background-color:#ffaba7;color:rgba(0,0,0,0.7);border-bottom:3px solid #da0b00;padding:10px 35px;text-align:center;font-size:15px}.outdated-warning-overlay .outdated-warning-closer{position:absolute;top:calc(50% - 10px);right:18px;cursor:pointer;width:12px}.outdated-warning-overlay a{color:#2e63b8}.outdated-warning-overlay a:hover{color:#363636}.content pre{border:1px solid #dbdbdb}.content code{font-weight:inherit}.content a code{color:#2e63b8}.content h1 code,.content h2 code,.content h3 code,.content h4 code,.content h5 code,.content h6 code{color:#222}.content table{display:block;width:initial;max-width:100%;overflow-x:auto}.content blockquote>ul:first-child,.content blockquote>ol:first-child,.content .admonition-body>ul:first-child,.content .admonition-body>ol:first-child{margin-top:0}pre,code{font-variant-ligatures:no-contextual}.breadcrumb a.is-disabled{cursor:default;pointer-events:none}.breadcrumb a.is-disabled,.breadcrumb a.is-disabled:hover{color:#222}.hljs{background:initial !important}.katex .katex-mathml{top:0;right:0}.katex-display,mjx-container,.MathJax_Display{margin:0.5em 0 !important}html{-moz-osx-font-smoothing:auto;-webkit-font-smoothing:auto}li.no-marker{list-style:none}#documenter .docs-main>article{overflow-wrap:break-word}#documenter .docs-main>article .math-container{overflow-x:auto;overflow-y:hidden}@media screen and (min-width: 1056px){#documenter .docs-main{max-width:52rem;margin-left:20rem;padding-right:1rem}}@media screen and (max-width: 1055px){#documenter .docs-main{width:100%}#documenter .docs-main>article{max-width:52rem;margin-left:auto;margin-right:auto;margin-bottom:1rem;padding:0 1rem}#documenter .docs-main>header,#documenter .docs-main>nav{max-width:100%;width:100%;margin:0}}#documenter .docs-main header.docs-navbar{background-color:#fff;border-bottom:1px solid #dbdbdb;z-index:2;min-height:4rem;margin-bottom:1rem;display:flex}#documenter .docs-main header.docs-navbar .breadcrumb{flex-grow:1;overflow-x:hidden}#documenter .docs-main header.docs-navbar .docs-sidebar-button{display:block;font-size:1.5rem;padding-bottom:0.1rem;margin-right:1rem}#documenter .docs-main header.docs-navbar .docs-right{display:flex;white-space:nowrap;gap:1rem;align-items:center}#documenter .docs-main header.docs-navbar .docs-right .docs-icon,#documenter .docs-main header.docs-navbar .docs-right .docs-label{display:inline-block}#documenter .docs-main header.docs-navbar .docs-right .docs-label{padding:0;margin-left:0.3em}@media screen and (max-width: 1055px){#documenter .docs-main header.docs-navbar .docs-right .docs-navbar-link{margin-left:0.4rem;margin-right:0.4rem}}#documenter .docs-main header.docs-navbar>*{margin:auto 0}@media screen and (max-width: 1055px){#documenter .docs-main header.docs-navbar{position:sticky;top:0;padding:0 1rem;transition-property:top, box-shadow;-webkit-transition-property:top, box-shadow;transition-duration:0.3s;-webkit-transition-duration:0.3s}#documenter .docs-main header.docs-navbar.headroom--not-top{box-shadow:.2rem 0rem .4rem #bbb;transition-duration:0.7s;-webkit-transition-duration:0.7s}#documenter .docs-main header.docs-navbar.headroom--unpinned.headroom--not-top.headroom--not-bottom{top:-4.5rem;transition-duration:0.7s;-webkit-transition-duration:0.7s}}#documenter .docs-main section.footnotes{border-top:1px solid #dbdbdb}#documenter .docs-main section.footnotes li .tag:first-child,#documenter .docs-main section.footnotes li .docstring>section>a.docs-sourcelink:first-child,#documenter .docs-main section.footnotes li .content kbd:first-child,.content #documenter .docs-main section.footnotes li kbd:first-child{margin-right:1em;margin-bottom:0.4em}#documenter .docs-main .docs-footer{display:flex;flex-wrap:wrap;margin-left:0;margin-right:0;border-top:1px solid #dbdbdb;padding-top:1rem;padding-bottom:1rem}@media screen and (max-width: 1055px){#documenter .docs-main .docs-footer{padding-left:1rem;padding-right:1rem}}#documenter .docs-main .docs-footer .docs-footer-nextpage,#documenter .docs-main .docs-footer .docs-footer-prevpage{flex-grow:1}#documenter .docs-main .docs-footer .docs-footer-nextpage{text-align:right}#documenter .docs-main .docs-footer .flexbox-break{flex-basis:100%;height:0}#documenter .docs-main .docs-footer .footer-message{font-size:0.8em;margin:0.5em auto 0 auto;text-align:center}#documenter .docs-sidebar{display:flex;flex-direction:column;color:#0a0a0a;background-color:#f5f5f5;border-right:1px solid #dbdbdb;padding:0;flex:0 0 18rem;z-index:5;font-size:1rem;position:fixed;left:-18rem;width:18rem;height:100%;transition:left 0.3s}#documenter .docs-sidebar.visible{left:0;box-shadow:.4rem 0rem .8rem #bbb}@media screen and (min-width: 1056px){#documenter .docs-sidebar.visible{box-shadow:none}}@media screen and (min-width: 1056px){#documenter .docs-sidebar{left:0;top:0}}#documenter .docs-sidebar .docs-logo{margin-top:1rem;padding:0 1rem}#documenter .docs-sidebar .docs-logo>img{max-height:6rem;margin:auto}#documenter .docs-sidebar .docs-package-name{flex-shrink:0;font-size:1.5rem;font-weight:700;text-align:center;white-space:nowrap;overflow:hidden;padding:0.5rem 0}#documenter .docs-sidebar .docs-package-name .docs-autofit{max-width:16.2rem}#documenter .docs-sidebar .docs-package-name a,#documenter .docs-sidebar .docs-package-name a:hover{color:#0a0a0a}#documenter .docs-sidebar .docs-version-selector{border-top:1px solid #dbdbdb;display:none;padding:0.5rem}#documenter .docs-sidebar .docs-version-selector.visible{display:flex}#documenter .docs-sidebar ul.docs-menu{flex-grow:1;user-select:none;border-top:1px solid #dbdbdb;padding-bottom:1.5rem}#documenter .docs-sidebar ul.docs-menu>li>.tocitem{font-weight:bold}#documenter .docs-sidebar ul.docs-menu>li li{font-size:.95rem;margin-left:1em;border-left:1px solid #dbdbdb}#documenter .docs-sidebar ul.docs-menu input.collapse-toggle{display:none}#documenter .docs-sidebar ul.docs-menu ul.collapsed{display:none}#documenter .docs-sidebar ul.docs-menu input:checked~ul.collapsed{display:block}#documenter .docs-sidebar ul.docs-menu label.tocitem{display:flex}#documenter .docs-sidebar ul.docs-menu label.tocitem .docs-label{flex-grow:2}#documenter .docs-sidebar ul.docs-menu label.tocitem .docs-chevron{display:inline-block;font-style:normal;font-variant:normal;text-rendering:auto;line-height:1;font-size:.75rem;margin-left:1rem;margin-top:auto;margin-bottom:auto}#documenter .docs-sidebar ul.docs-menu label.tocitem .docs-chevron::before{font-family:"Font Awesome 6 Free";font-weight:900;content:"\f054"}#documenter .docs-sidebar ul.docs-menu input:checked~label.tocitem .docs-chevron::before{content:"\f078"}#documenter .docs-sidebar ul.docs-menu .tocitem{display:block;padding:0.5rem 0.5rem}#documenter .docs-sidebar ul.docs-menu .tocitem,#documenter .docs-sidebar ul.docs-menu .tocitem:hover{color:#0a0a0a;background:#f5f5f5}#documenter .docs-sidebar ul.docs-menu a.tocitem:hover,#documenter .docs-sidebar ul.docs-menu label.tocitem:hover{color:#0a0a0a;background-color:#ebebeb}#documenter .docs-sidebar ul.docs-menu li.is-active{border-top:1px solid #dbdbdb;border-bottom:1px solid #dbdbdb;background-color:#fff}#documenter .docs-sidebar ul.docs-menu li.is-active .tocitem,#documenter .docs-sidebar ul.docs-menu li.is-active .tocitem:hover{background-color:#fff;color:#0a0a0a}#documenter .docs-sidebar ul.docs-menu li.is-active ul.internal .tocitem:hover{background-color:#ebebeb;color:#0a0a0a}#documenter .docs-sidebar ul.docs-menu>li.is-active:first-child{border-top:none}#documenter .docs-sidebar ul.docs-menu ul.internal{margin:0 0.5rem 0.5rem;border-top:1px solid #dbdbdb}#documenter .docs-sidebar ul.docs-menu ul.internal li{font-size:.85rem;border-left:none;margin-left:0;margin-top:0.5rem}#documenter .docs-sidebar ul.docs-menu ul.internal .tocitem{width:100%;padding:0}#documenter .docs-sidebar ul.docs-menu ul.internal .tocitem::before{content:"⚬";margin-right:0.4em}#documenter .docs-sidebar form.docs-search{margin:auto;margin-top:0.5rem;margin-bottom:0.5rem}#documenter .docs-sidebar form.docs-search>input{width:14.4rem}#documenter .docs-sidebar #documenter-search-query{color:#707070;width:14.4rem;box-shadow:inset 0 1px 2px rgba(10,10,10,0.1)}@media screen and (min-width: 1056px){#documenter .docs-sidebar ul.docs-menu{overflow-y:auto;-webkit-overflow-scroll:touch}#documenter .docs-sidebar ul.docs-menu::-webkit-scrollbar{width:.3rem;background:none}#documenter .docs-sidebar ul.docs-menu::-webkit-scrollbar-thumb{border-radius:5px 0px 0px 5px;background:#e0e0e0}#documenter .docs-sidebar ul.docs-menu::-webkit-scrollbar-thumb:hover{background:#ccc}}@media screen and (max-width: 1055px){#documenter .docs-sidebar{overflow-y:auto;-webkit-overflow-scroll:touch}#documenter .docs-sidebar::-webkit-scrollbar{width:.3rem;background:none}#documenter .docs-sidebar::-webkit-scrollbar-thumb{border-radius:5px 0px 0px 5px;background:#e0e0e0}#documenter .docs-sidebar::-webkit-scrollbar-thumb:hover{background:#ccc}}kbd.search-modal-key-hints{border-radius:0.25rem;border:1px solid rgba(0,0,0,0.6);box-shadow:0 2px 0 1px rgba(0,0,0,0.6);cursor:default;font-size:0.9rem;line-height:1.5;min-width:0.75rem;text-align:center;padding:0.1rem 0.3rem;position:relative;top:-1px}.search-min-width-50{min-width:50%}.search-min-height-100{min-height:100%}.search-modal-card-body{max-height:calc(100vh - 15rem)}.search-result-link{border-radius:0.7em;transition:all 300ms}.search-result-link:hover,.search-result-link:focus{background-color:rgba(0,128,128,0.1)}.search-result-link .property-search-result-badge,.search-result-link .search-filter{transition:all 300ms}.property-search-result-badge,.search-filter{padding:0.15em 0.5em;font-size:0.8em;font-style:italic;text-transform:none !important;line-height:1.5;color:#f5f5f5;background-color:rgba(51,65,85,0.501961);border-radius:0.6rem}.search-result-link:hover .property-search-result-badge,.search-result-link:hover .search-filter,.search-result-link:focus .property-search-result-badge,.search-result-link:focus .search-filter{color:#f1f5f9;background-color:#333}.search-filter{color:#333;background-color:#f5f5f5;transition:all 300ms}.search-filter:hover,.search-filter:focus{color:#333}.search-filter-selected{color:#f5f5f5;background-color:rgba(139,0,139,0.5)}.search-filter-selected:hover,.search-filter-selected:focus{color:#f5f5f5}.search-result-highlight{background-color:#ffdd57;color:black}.search-divider{border-bottom:1px solid #dbdbdb}.search-result-title{width:85%;color:#333}.search-result-code-title{font-size:0.875rem;font-family:"JuliaMono","SFMono-Regular","Menlo","Consolas","Liberation Mono","DejaVu Sans Mono",monospace}#search-modal .modal-card-body::-webkit-scrollbar,#search-modal .filter-tabs::-webkit-scrollbar{height:10px;width:10px;background-color:transparent}#search-modal .modal-card-body::-webkit-scrollbar-thumb,#search-modal .filter-tabs::-webkit-scrollbar-thumb{background-color:gray;border-radius:1rem}#search-modal .modal-card-body::-webkit-scrollbar-track,#search-modal .filter-tabs::-webkit-scrollbar-track{-webkit-box-shadow:inset 0 0 6px rgba(0,0,0,0.6);background-color:transparent}.w-100{width:100%}.gap-2{gap:0.5rem}.gap-4{gap:1rem}.gap-8{gap:2rem}.ansi span.sgr1{font-weight:bolder}.ansi span.sgr2{font-weight:lighter}.ansi span.sgr3{font-style:italic}.ansi span.sgr4{text-decoration:underline}.ansi span.sgr7{color:#fff;background-color:#222}.ansi span.sgr8{color:transparent}.ansi span.sgr8 span{color:transparent}.ansi span.sgr9{text-decoration:line-through}.ansi span.sgr30{color:#242424}.ansi span.sgr31{color:#a7201f}.ansi span.sgr32{color:#066f00}.ansi span.sgr33{color:#856b00}.ansi span.sgr34{color:#2149b0}.ansi span.sgr35{color:#7d4498}.ansi span.sgr36{color:#007989}.ansi span.sgr37{color:gray}.ansi span.sgr40{background-color:#242424}.ansi span.sgr41{background-color:#a7201f}.ansi span.sgr42{background-color:#066f00}.ansi span.sgr43{background-color:#856b00}.ansi span.sgr44{background-color:#2149b0}.ansi span.sgr45{background-color:#7d4498}.ansi span.sgr46{background-color:#007989}.ansi span.sgr47{background-color:gray}.ansi span.sgr90{color:#616161}.ansi span.sgr91{color:#cb3c33}.ansi span.sgr92{color:#0e8300}.ansi span.sgr93{color:#a98800}.ansi span.sgr94{color:#3c5dcd}.ansi span.sgr95{color:#9256af}.ansi span.sgr96{color:#008fa3}.ansi span.sgr97{color:#f5f5f5}.ansi span.sgr100{background-color:#616161}.ansi span.sgr101{background-color:#cb3c33}.ansi span.sgr102{background-color:#0e8300}.ansi span.sgr103{background-color:#a98800}.ansi span.sgr104{background-color:#3c5dcd}.ansi span.sgr105{background-color:#9256af}.ansi span.sgr106{background-color:#008fa3}.ansi span.sgr107{background-color:#f5f5f5}code.language-julia-repl>span.hljs-meta{color:#066f00;font-weight:bolder}/*! + Theme: Default + Description: Original highlight.js style + Author: (c) Ivan Sagalaev + Maintainer: @highlightjs/core-team + Website: https://highlightjs.org/ + License: see project LICENSE + Touched: 2021 +*/pre code.hljs{display:block;overflow-x:auto;padding:1em}code.hljs{padding:3px 5px}.hljs{background:#F3F3F3;color:#444}.hljs-comment{color:#697070}.hljs-tag,.hljs-punctuation{color:#444a}.hljs-tag .hljs-name,.hljs-tag .hljs-attr{color:#444}.hljs-keyword,.hljs-attribute,.hljs-selector-tag,.hljs-meta .hljs-keyword,.hljs-doctag,.hljs-name{font-weight:bold}.hljs-type,.hljs-string,.hljs-number,.hljs-selector-id,.hljs-selector-class,.hljs-quote,.hljs-template-tag,.hljs-deletion{color:#880000}.hljs-title,.hljs-section{color:#880000;font-weight:bold}.hljs-regexp,.hljs-symbol,.hljs-variable,.hljs-template-variable,.hljs-link,.hljs-selector-attr,.hljs-operator,.hljs-selector-pseudo{color:#ab5656}.hljs-literal{color:#695}.hljs-built_in,.hljs-bullet,.hljs-code,.hljs-addition{color:#397300}.hljs-meta{color:#1f7199}.hljs-meta .hljs-string{color:#38a}.hljs-emphasis{font-style:italic}.hljs-strong{font-weight:bold}.gap-4{gap:1rem} diff --git a/v0.20.3/assets/themeswap.js b/v0.20.3/assets/themeswap.js new file mode 100644 index 000000000..9f5eebe6a --- /dev/null +++ b/v0.20.3/assets/themeswap.js @@ -0,0 +1,84 @@ +// Small function to quickly swap out themes. Gets put into the tag.. +function set_theme_from_local_storage() { + // Initialize the theme to null, which means default + var theme = null; + // If the browser supports the localstorage and is not disabled then try to get the + // documenter theme + if (window.localStorage != null) { + // Get the user-picked theme from localStorage. May be `null`, which means the default + // theme. + theme = window.localStorage.getItem("documenter-theme"); + } + // Check if the users preference is for dark color scheme + var darkPreference = + window.matchMedia("(prefers-color-scheme: dark)").matches === true; + // Initialize a few variables for the loop: + // + // - active: will contain the index of the theme that should be active. Note that there + // is no guarantee that localStorage contains sane values. If `active` stays `null` + // we either could not find the theme or it is the default (primary) theme anyway. + // Either way, we then need to stick to the primary theme. + // + // - disabled: style sheets that should be disabled (i.e. all the theme style sheets + // that are not the currently active theme) + var active = null; + var disabled = []; + var primaryLightTheme = null; + var primaryDarkTheme = null; + for (var i = 0; i < document.styleSheets.length; i++) { + var ss = document.styleSheets[i]; + // The tag of each style sheet is expected to have a data-theme-name attribute + // which must contain the name of the theme. The names in localStorage much match this. + var themename = ss.ownerNode.getAttribute("data-theme-name"); + // attribute not set => non-theme stylesheet => ignore + if (themename === null) continue; + // To distinguish the default (primary) theme, it needs to have the data-theme-primary + // attribute set. + if (ss.ownerNode.getAttribute("data-theme-primary") !== null) { + primaryLightTheme = themename; + } + // Check if the theme is primary dark theme so that we could store its name in darkTheme + if (ss.ownerNode.getAttribute("data-theme-primary-dark") !== null) { + primaryDarkTheme = themename; + } + // If we find a matching theme (and it's not the default), we'll set active to non-null + if (themename === theme) active = i; + // Store the style sheets of inactive themes so that we could disable them + if (themename !== theme) disabled.push(ss); + } + var activeTheme = null; + if (active !== null) { + // If we did find an active theme, we'll (1) add the theme--$(theme) class to + document.getElementsByTagName("html")[0].className = "theme--" + theme; + activeTheme = theme; + } else { + // If we did _not_ find an active theme, then we need to fall back to the primary theme + // which can either be dark or light, depending on the user's OS preference. + var activeTheme = darkPreference ? primaryDarkTheme : primaryLightTheme; + // In case it somehow happens that the relevant primary theme was not found in the + // preceding loop, we abort without doing anything. + if (activeTheme === null) { + console.error("Unable to determine primary theme."); + return; + } + // When switching to the primary light theme, then we must not have a class name + // for the tag. That's only for non-primary or the primary dark theme. + if (darkPreference) { + document.getElementsByTagName("html")[0].className = + "theme--" + activeTheme; + } else { + document.getElementsByTagName("html")[0].className = ""; + } + } + for (var i = 0; i < document.styleSheets.length; i++) { + var ss = document.styleSheets[i]; + // The tag of each style sheet is expected to have a data-theme-name attribute + // which must contain the name of the theme. The names in localStorage much match this. + var themename = ss.ownerNode.getAttribute("data-theme-name"); + // attribute not set => non-theme stylesheet => ignore + if (themename === null) continue; + // we'll disable all the stylesheets, except for the active one + ss.disabled = !(themename == activeTheme); + } +} +set_theme_from_local_storage(); diff --git a/v0.20.3/assets/warner.js b/v0.20.3/assets/warner.js new file mode 100644 index 000000000..3f6f5d008 --- /dev/null +++ b/v0.20.3/assets/warner.js @@ -0,0 +1,52 @@ +function maybeAddWarning() { + // DOCUMENTER_NEWEST is defined in versions.js, DOCUMENTER_CURRENT_VERSION and DOCUMENTER_STABLE + // in siteinfo.js. + // If either of these are undefined something went horribly wrong, so we abort. + if ( + window.DOCUMENTER_NEWEST === undefined || + window.DOCUMENTER_CURRENT_VERSION === undefined || + window.DOCUMENTER_STABLE === undefined + ) { + return; + } + + // Current version is not a version number, so we can't tell if it's the newest version. Abort. + if (!/v(\d+\.)*\d+/.test(window.DOCUMENTER_CURRENT_VERSION)) { + return; + } + + // Current version is newest version, so no need to add a warning. + if (window.DOCUMENTER_NEWEST === window.DOCUMENTER_CURRENT_VERSION) { + return; + } + + // Add a noindex meta tag (unless one exists) so that search engines don't index this version of the docs. + if (document.body.querySelector('meta[name="robots"]') === null) { + const meta = document.createElement("meta"); + meta.name = "robots"; + meta.content = "noindex"; + + document.getElementsByTagName("head")[0].appendChild(meta); + } + + const div = document.createElement("div"); + div.classList.add("outdated-warning-overlay"); + const closer = document.createElement("button"); + closer.classList.add("outdated-warning-closer", "delete"); + closer.addEventListener("click", function () { + document.body.removeChild(div); + }); + const href = window.documenterBaseURL + "/../" + window.DOCUMENTER_STABLE; + div.innerHTML = + 'This documentation is not for the latest stable release, but for either the development version or an older release.
Click here to go to the documentation for the latest stable release.'; + div.appendChild(closer); + document.body.appendChild(div); +} + +if (document.readyState === "loading") { + document.addEventListener("DOMContentLoaded", maybeAddWarning); +} else { + maybeAddWarning(); +} diff --git a/v0.20.3/benchmarking/index.html b/v0.20.3/benchmarking/index.html new file mode 100644 index 000000000..786369666 --- /dev/null +++ b/v0.20.3/benchmarking/index.html @@ -0,0 +1,2 @@ + +Benchmarking · MLJ diff --git a/v0.20.3/common_mlj_workflows/index.html b/v0.20.3/common_mlj_workflows/index.html new file mode 100644 index 000000000..d3978c669 --- /dev/null +++ b/v0.20.3/common_mlj_workflows/index.html @@ -0,0 +1,538 @@ + +Common MLJ Workflows · MLJ

Common MLJ Workflows

Data ingestion

import RDatasets
+channing = RDatasets.dataset("boot", "channing")
+
+julia> first(channing, 4)
+4×5 DataFrame
+ Row │ Sex   Entry  Exit   Time   Cens
+     │ Cat…  Int32  Int32  Int32  Int32
+─────┼──────────────────────────────────
+   1 │ Male    782    909    127      1
+   2 │ Male   1020   1128    108      1
+   3 │ Male    856    969    113      1
+   4 │ Male    915    957     42      1

Inspecting metadata, including column scientific types:

schema(channing)
┌───────┬───────────────┬──────────────────────────────────┐
+│ names │ scitypes      │ types                            │
+├───────┼───────────────┼──────────────────────────────────┤
+│ Sex   │ Multiclass{2} │ CategoricalValue{String, UInt32} │
+│ Entry │ Count         │ Int64                            │
+│ Exit  │ Count         │ Int64                            │
+│ Time  │ Count         │ Int64                            │
+│ Cens  │ Count         │ Int64                            │
+└───────┴───────────────┴──────────────────────────────────┘
+

Horizontally splitting data and shuffling rows.

Here y is the :Exit column and X everything else:

y, X =  unpack(channing, ==(:Exit), rng=123);

Here y is the :Exit column and X everything else except :Time:

y, X =  unpack(channing,
+               ==(:Exit),
+               !=(:Time);
+               rng=123);
+scitype(y)
AbstractVector{Count} (alias for AbstractArray{Count, 1})
schema(X)
┌───────┬───────────────┬──────────────────────────────────┐
+│ names │ scitypes      │ types                            │
+├───────┼───────────────┼──────────────────────────────────┤
+│ Sex   │ Multiclass{2} │ CategoricalValue{String, UInt32} │
+│ Entry │ Count         │ Int64                            │
+│ Cens  │ Count         │ Int64                            │
+└───────┴───────────────┴──────────────────────────────────┘
+

Fixing wrong scientific types in X:

X = coerce(X, :Exit=>Continuous, :Entry=>Continuous, :Cens=>Multiclass)
+schema(X)
┌───────┬─────────────────┬──────────────────────────────────┐
+│ names │ scitypes        │ types                            │
+├───────┼─────────────────┼──────────────────────────────────┤
+│ Sex   │ Multiclass{2}   │ CategoricalValue{String, UInt32} │
+│ Entry │ Continuous      │ Float64                          │
+│ Cens  │ Multiclass{462} │ CategoricalValue{Int64, UInt32}  │
+└───────┴─────────────────┴──────────────────────────────────┘
+

Loading a built-in supervised dataset:

table = load_iris();
+schema(table)
┌──────────────┬───────────────┬──────────────────────────────────┐
+│ names        │ scitypes      │ types                            │
+├──────────────┼───────────────┼──────────────────────────────────┤
+│ sepal_length │ Continuous    │ Float64                          │
+│ sepal_width  │ Continuous    │ Float64                          │
+│ petal_length │ Continuous    │ Float64                          │
+│ petal_width  │ Continuous    │ Float64                          │
+│ target       │ Multiclass{3} │ CategoricalValue{String, UInt32} │
+└──────────────┴───────────────┴──────────────────────────────────┘
+

Loading a built-in data set already split into X and y:

X, y = @load_iris;
+selectrows(X, 1:4) # selectrows works whenever `Tables.istable(X)==true`.
(sepal_length = [5.1, 4.9, 4.7, 4.6],
+ sepal_width = [3.5, 3.0, 3.2, 3.1],
+ petal_length = [1.4, 1.4, 1.3, 1.5],
+ petal_width = [0.2, 0.2, 0.2, 0.2],)
y[1:4]
4-element CategoricalArray{String,1,UInt32}:
+ "setosa"
+ "setosa"
+ "setosa"
+ "setosa"

Splitting data vertically after row shuffling:

channing_train, channing_test = partition(channing, 0.6, rng=123);

Or, if already horizontally split:

(Xtrain, Xtest), (ytrain, ytest)  = partition((X, y), 0.6, multi=true,  rng=123)
(((sepal_length = [6.7, 5.7, 7.2, 4.4, 5.6, 6.5, 4.4, 6.1, 5.4, 4.9  …  6.4, 5.5, 5.4, 4.8, 6.5, 4.9, 6.5, 6.7, 5.6, 6.4], sepal_width = [3.3, 2.8, 3.0, 2.9, 2.5, 3.0, 3.0, 2.9, 3.9, 2.5  …  3.1, 2.3, 3.7, 3.1, 3.0, 2.4, 2.8, 3.3, 2.9, 2.8], petal_length = [5.7, 4.1, 5.8, 1.4, 3.9, 5.2, 1.3, 4.7, 1.7, 4.5  …  5.5, 4.0, 1.5, 1.6, 5.5, 3.3, 4.6, 5.7, 3.6, 5.6], petal_width = [2.1, 1.3, 1.6, 0.2, 1.1, 2.0, 0.2, 1.4, 0.4, 1.7  …  1.8, 1.3, 0.2, 0.2, 1.8, 1.0, 1.5, 2.5, 1.3, 2.2]), (sepal_length = [6.0, 5.8, 6.7, 5.1, 5.0, 6.3, 5.7, 6.4, 6.1, 5.0  …  6.4, 6.8, 6.9, 6.1, 6.7, 5.0, 7.6, 6.3, 5.1, 5.0], sepal_width = [2.7, 2.6, 3.0, 3.8, 3.4, 2.8, 2.5, 3.2, 2.8, 3.5  …  2.7, 3.2, 3.1, 2.8, 2.5, 3.5, 3.0, 2.5, 3.8, 3.6], petal_length = [5.1, 4.0, 5.2, 1.9, 1.5, 5.1, 5.0, 4.5, 4.7, 1.6  …  5.3, 5.9, 5.4, 4.0, 5.8, 1.3, 6.6, 5.0, 1.6, 1.4], petal_width = [1.6, 1.2, 2.3, 0.4, 0.2, 1.5, 2.0, 1.5, 1.2, 0.6  …  1.9, 2.3, 2.1, 1.3, 1.8, 0.3, 2.1, 1.9, 0.2, 0.2])), (CategoricalValue{String, UInt32}["virginica", "versicolor", "virginica", "setosa", "versicolor", "virginica", "setosa", "versicolor", "setosa", "virginica"  …  "virginica", "versicolor", "setosa", "setosa", "virginica", "versicolor", "versicolor", "virginica", "versicolor", "virginica"], CategoricalValue{String, UInt32}["versicolor", "versicolor", "virginica", "setosa", "setosa", "virginica", "virginica", "versicolor", "versicolor", "setosa"  …  "virginica", "virginica", "virginica", "versicolor", "virginica", "setosa", "virginica", "virginica", "setosa", "setosa"]))

Reference: Model Search

Searching for a supervised model:

X, y = @load_boston
+ms = models(matching(X, y))
69-element Vector{NamedTuple{(:name, :package_name, :is_supervised, :abstract_type, :deep_properties, :docstring, :fit_data_scitype, :human_name, :hyperparameter_ranges, :hyperparameter_types, :hyperparameters, :implemented_methods, :inverse_transform_scitype, :is_pure_julia, :is_wrapper, :iteration_parameter, :load_path, :package_license, :package_url, :package_uuid, :predict_scitype, :prediction_type, :reporting_operations, :reports_feature_importances, :supports_class_weights, :supports_online, :supports_training_losses, :supports_weights, :transform_scitype, :input_scitype, :target_scitype, :output_scitype)}}:
+ (name = ARDRegressor, package_name = MLJScikitLearnInterface, ... )
+ (name = AdaBoostRegressor, package_name = MLJScikitLearnInterface, ... )
+ (name = BaggingRegressor, package_name = MLJScikitLearnInterface, ... )
+ (name = BayesianRidgeRegressor, package_name = MLJScikitLearnInterface, ... )
+ (name = CatBoostRegressor, package_name = CatBoost, ... )
+ (name = ConstantRegressor, package_name = MLJModels, ... )
+ (name = DecisionTreeRegressor, package_name = BetaML, ... )
+ (name = DecisionTreeRegressor, package_name = DecisionTree, ... )
+ (name = DeterministicConstantRegressor, package_name = MLJModels, ... )
+ (name = DummyRegressor, package_name = MLJScikitLearnInterface, ... )
+ ⋮
+ (name = SGDRegressor, package_name = MLJScikitLearnInterface, ... )
+ (name = SRRegressor, package_name = SymbolicRegression, ... )
+ (name = SVMLinearRegressor, package_name = MLJScikitLearnInterface, ... )
+ (name = SVMNuRegressor, package_name = MLJScikitLearnInterface, ... )
+ (name = SVMRegressor, package_name = MLJScikitLearnInterface, ... )
+ (name = StableForestRegressor, package_name = SIRUS, ... )
+ (name = StableRulesRegressor, package_name = SIRUS, ... )
+ (name = TheilSenRegressor, package_name = MLJScikitLearnInterface, ... )
+ (name = XGBoostRegressor, package_name = XGBoost, ... )
ms[6]
(name = "ConstantRegressor",
+ package_name = "MLJModels",
+ is_supervised = true,
+ abstract_type = Probabilistic,
+ deep_properties = (),
+ docstring = "```\nConstantRegressor\n```\n\nThis \"dummy\" probabilis...",
+ fit_data_scitype = Tuple{Table, AbstractVector{Continuous}},
+ human_name = "constant regressor",
+ hyperparameter_ranges = (nothing,),
+ hyperparameter_types = ("Type{D} where D<:Distributions.Sampleable",),
+ hyperparameters = (:distribution_type,),
+ implemented_methods = [:fitted_params, :predict],
+ inverse_transform_scitype = Unknown,
+ is_pure_julia = true,
+ is_wrapper = false,
+ iteration_parameter = nothing,
+ load_path = "MLJModels.ConstantRegressor",
+ package_license = "MIT",
+ package_url = "https://github.com/alan-turing-institute/MLJModels.jl",
+ package_uuid = "d491faf4-2d78-11e9-2867-c94bc002c0b7",
+ predict_scitype = AbstractVector{ScientificTypesBase.Density{Continuous}},
+ prediction_type = :probabilistic,
+ reporting_operations = (),
+ reports_feature_importances = false,
+ supports_class_weights = false,
+ supports_online = false,
+ supports_training_losses = false,
+ supports_weights = false,
+ transform_scitype = Unknown,
+ input_scitype = Table,
+ target_scitype = AbstractVector{Continuous},
+ output_scitype = Unknown)
models("Tree");
28-element Vector{NamedTuple{(:name, :package_name, :is_supervised, :abstract_type, :deep_properties, :docstring, :fit_data_scitype, :human_name, :hyperparameter_ranges, :hyperparameter_types, :hyperparameters, :implemented_methods, :inverse_transform_scitype, :is_pure_julia, :is_wrapper, :iteration_parameter, :load_path, :package_license, :package_url, :package_uuid, :predict_scitype, :prediction_type, :reporting_operations, :reports_feature_importances, :supports_class_weights, :supports_online, :supports_training_losses, :supports_weights, :transform_scitype, :input_scitype, :target_scitype, :output_scitype)}}:
+ (name = ABODDetector, package_name = OutlierDetectionNeighbors, ... )
+ (name = AdaBoostStumpClassifier, package_name = DecisionTree, ... )
+ (name = COFDetector, package_name = OutlierDetectionNeighbors, ... )
+ (name = DNNDetector, package_name = OutlierDetectionNeighbors, ... )
+ (name = DecisionTreeClassifier, package_name = BetaML, ... )
+ (name = DecisionTreeClassifier, package_name = DecisionTree, ... )
+ (name = DecisionTreeRegressor, package_name = BetaML, ... )
+ (name = DecisionTreeRegressor, package_name = DecisionTree, ... )
+ (name = EvoTreeClassifier, package_name = EvoTrees, ... )
+ (name = EvoTreeCount, package_name = EvoTrees, ... )
+ ⋮
+ (name = LOFDetector, package_name = OutlierDetectionNeighbors, ... )
+ (name = MultitargetKNNClassifier, package_name = NearestNeighborModels, ... )
+ (name = MultitargetKNNRegressor, package_name = NearestNeighborModels, ... )
+ (name = OneRuleClassifier, package_name = OneRule, ... )
+ (name = RandomForestClassifier, package_name = BetaML, ... )
+ (name = RandomForestClassifier, package_name = DecisionTree, ... )
+ (name = RandomForestRegressor, package_name = BetaML, ... )
+ (name = RandomForestRegressor, package_name = DecisionTree, ... )
+ (name = SMOTENC, package_name = Imbalance, ... )

A more refined search:

models() do model
+    matching(model, X, y) &&
+    model.prediction_type == :deterministic &&
+    model.is_pure_julia
+end;

Searching for an unsupervised model:

models(matching(X))
63-element Vector{NamedTuple{(:name, :package_name, :is_supervised, :abstract_type, :deep_properties, :docstring, :fit_data_scitype, :human_name, :hyperparameter_ranges, :hyperparameter_types, :hyperparameters, :implemented_methods, :inverse_transform_scitype, :is_pure_julia, :is_wrapper, :iteration_parameter, :load_path, :package_license, :package_url, :package_uuid, :predict_scitype, :prediction_type, :reporting_operations, :reports_feature_importances, :supports_class_weights, :supports_online, :supports_training_losses, :supports_weights, :transform_scitype, :input_scitype, :target_scitype, :output_scitype)}}:
+ (name = ABODDetector, package_name = OutlierDetectionNeighbors, ... )
+ (name = ABODDetector, package_name = OutlierDetectionPython, ... )
+ (name = AffinityPropagation, package_name = MLJScikitLearnInterface, ... )
+ (name = AgglomerativeClustering, package_name = MLJScikitLearnInterface, ... )
+ (name = AutoEncoder, package_name = BetaML, ... )
+ (name = Birch, package_name = MLJScikitLearnInterface, ... )
+ (name = BisectingKMeans, package_name = MLJScikitLearnInterface, ... )
+ (name = CBLOFDetector, package_name = OutlierDetectionPython, ... )
+ (name = CDDetector, package_name = OutlierDetectionPython, ... )
+ (name = COFDetector, package_name = OutlierDetectionNeighbors, ... )
+ ⋮
+ (name = RODDetector, package_name = OutlierDetectionPython, ... )
+ (name = RandomForestImputer, package_name = BetaML, ... )
+ (name = SODDetector, package_name = OutlierDetectionPython, ... )
+ (name = SOSDetector, package_name = OutlierDetectionPython, ... )
+ (name = SelfOrganizingMap, package_name = SelfOrganizingMaps, ... )
+ (name = SimpleImputer, package_name = BetaML, ... )
+ (name = SpectralClustering, package_name = MLJScikitLearnInterface, ... )
+ (name = Standardizer, package_name = MLJModels, ... )
+ (name = TSVDTransformer, package_name = TSVD, ... )

Getting the metadata entry for a given model type:

info("PCA")
+info("RidgeRegressor", pkg="MultivariateStats") # a model type in multiple packages
(name = "RidgeRegressor",
+ package_name = "MultivariateStats",
+ is_supervised = true,
+ abstract_type = Deterministic,
+ deep_properties = (),
+ docstring = "```\nRidgeRegressor\n```\n\nA model type for construct...",
+ fit_data_scitype =
+     Tuple{Table{<:AbstractVector{<:Continuous}}, AbstractVector{Continuous}},
+ human_name = "ridge regressor",
+ hyperparameter_ranges = (nothing, nothing),
+ hyperparameter_types = ("Union{Real, AbstractVecOrMat}", "Bool"),
+ hyperparameters = (:lambda, :bias),
+ implemented_methods = [:clean!, :fit, :fitted_params, :predict],
+ inverse_transform_scitype = Unknown,
+ is_pure_julia = true,
+ is_wrapper = false,
+ iteration_parameter = nothing,
+ load_path = "MLJMultivariateStatsInterface.RidgeRegressor",
+ package_license = "MIT",
+ package_url = "https://github.com/JuliaStats/MultivariateStats.jl",
+ package_uuid = "6f286f6a-111f-5878-ab1e-185364afe411",
+ predict_scitype = AbstractVector{Continuous},
+ prediction_type = :deterministic,
+ reporting_operations = (),
+ reports_feature_importances = false,
+ supports_class_weights = false,
+ supports_online = false,
+ supports_training_losses = false,
+ supports_weights = false,
+ transform_scitype = Unknown,
+ input_scitype = Table{<:AbstractVector{<:Continuous}},
+ target_scitype = AbstractVector{Continuous},
+ output_scitype = Unknown)

Extracting the model document string (output omitted):

doc("DecisionTreeClassifier", pkg="DecisionTree")

Instantiating a model

Reference: Getting Started, Loading Model Code

Tree = @load DecisionTreeClassifier pkg=DecisionTree
+tree = Tree(min_samples_split=5, max_depth=4)
DecisionTreeClassifier(
+  max_depth = 4, 
+  min_samples_leaf = 1, 
+  min_samples_split = 5, 
+  min_purity_increase = 0.0, 
+  n_subfeatures = 0, 
+  post_prune = false, 
+  merge_purity_threshold = 1.0, 
+  display_depth = 5, 
+  feature_importance = :impurity, 
+  rng = Random._GLOBAL_RNG())

or

tree = (@load DecisionTreeClassifier)()
+tree.min_samples_split = 5
+tree.max_depth = 4

Evaluating a model

Reference: Evaluating Model Performance

X, y = @load_boston
+KNN = @load KNNRegressor
+knn = KNN()
+evaluate(knn, X, y,
+         resampling=CV(nfolds=5),
+         measure=[RootMeanSquaredError(), LPLoss(1)])
PerformanceEvaluation object with these fields:
+  model, measure, operation, measurement, per_fold,
+  per_observation, fitted_params_per_fold,
+  report_per_fold, train_test_rows, resampling, repeats
+Extract:
+┌────────────────────────┬───────────┬─────────────┬─────────┬──────────────────
+│ measure                │ operation │ measurement │ 1.96*SE │ per_fold        ⋯
+├────────────────────────┼───────────┼─────────────┼─────────┼──────────────────
+│ RootMeanSquaredError() │ predict   │ 8.77        │ 1.84    │ [8.53, 8.8, 10. ⋯
+│ LPLoss(                │ predict   │ 6.02        │ 1.26    │ [6.52, 5.7, 7.6 ⋯
+│   p = 1)               │           │             │         │                 ⋯
+└────────────────────────┴───────────┴─────────────┴─────────┴──────────────────
+                                                                1 column omitted
+

Note RootMeanSquaredError() has alias rms and LPLoss(1) has aliases l1, mae.

Do measures() to list all losses and scores and their aliases.

Basic fit/evaluate/predict by hand:

Reference: Getting Started, Machines, Evaluating Model Performance, Performance Measures

crabs = load_crabs() |> DataFrames.DataFrame
+schema(crabs)
┌───────┬───────────────┬──────────────────────────────────┐
+│ names │ scitypes      │ types                            │
+├───────┼───────────────┼──────────────────────────────────┤
+│ sp    │ Multiclass{2} │ CategoricalValue{String, UInt32} │
+│ sex   │ Multiclass{2} │ CategoricalValue{String, UInt32} │
+│ index │ Count         │ Int64                            │
+│ FL    │ Continuous    │ Float64                          │
+│ RW    │ Continuous    │ Float64                          │
+│ CL    │ Continuous    │ Float64                          │
+│ CW    │ Continuous    │ Float64                          │
+│ BD    │ Continuous    │ Float64                          │
+└───────┴───────────────┴──────────────────────────────────┘
+
y, X = unpack(crabs, ==(:sp), !in([:index, :sex]); rng=123)
+
+
+Tree = @load DecisionTreeClassifier pkg=DecisionTree
DecisionTreeClassifier(
+  max_depth = 2, 
+  min_samples_leaf = 1, 
+  min_samples_split = 2, 
+  min_purity_increase = 0.0, 
+  n_subfeatures = 0, 
+  post_prune = false, 
+  merge_purity_threshold = 1.0, 
+  display_depth = 5, 
+  feature_importance = :impurity, 
+  rng = Random._GLOBAL_RNG())

Bind the model and data together in a machine, which will additionally, store the learned parameters (fitresults) when fit:

mach = machine(tree, X, y)
untrained Machine; caches model-specific representations of data
+  model: DecisionTreeClassifier(max_depth = 2, …)
+  args: 
+    1:	Source @607 ⏎ Table{AbstractVector{Continuous}}
+    2:	Source @916 ⏎ AbstractVector{Multiclass{2}}
+

Split row indices into training and evaluation rows:

train, test = partition(eachindex(y), 0.7); # 70:30 split
([1, 2, 3, 4, 5, 6, 7, 8, 9, 10  …  131, 132, 133, 134, 135, 136, 137, 138, 139, 140], [141, 142, 143, 144, 145, 146, 147, 148, 149, 150  …  191, 192, 193, 194, 195, 196, 197, 198, 199, 200])

Fit on the train data set and evaluate on the test data set:

fit!(mach, rows=train)
+yhat = predict(mach, X[test,:])
+LogLoss(tol=1e-4)(yhat, y[test])
1.0788055664326648

Note LogLoss() has aliases log_loss and cross_entropy.

Run measures() to list all losses and scores and their aliases ("instances").

Predict on the new data set:

Xnew = (FL = rand(3), RW = rand(3), CL = rand(3), CW = rand(3), BD =rand(3))
+predict(mach, Xnew)      # a vector of distributions
3-element UnivariateFiniteVector{Multiclass{2}, String, UInt32, Float64}:
+ UnivariateFinite{Multiclass{2}}(B=>0.667, O=>0.333)
+ UnivariateFinite{Multiclass{2}}(B=>0.667, O=>0.333)
+ UnivariateFinite{Multiclass{2}}(B=>0.667, O=>0.333)
predict_mode(mach, Xnew) # a vector of point-predictions
3-element CategoricalArray{String,1,UInt32}:
+ "B"
+ "B"
+ "B"

More performance evaluation examples

Evaluating model + data directly:

evaluate(tree, X, y,
+         resampling=Holdout(fraction_train=0.7, shuffle=true, rng=1234),
+         measure=[LogLoss(), Accuracy()])
PerformanceEvaluation object with these fields:
+  model, measure, operation, measurement, per_fold,
+  per_observation, fitted_params_per_fold,
+  report_per_fold, train_test_rows, resampling, repeats
+Extract:
+┌──────────────────────┬──────────────┬─────────────┬──────────┐
+│ measure              │ operation    │ measurement │ per_fold │
+├──────────────────────┼──────────────┼─────────────┼──────────┤
+│ LogLoss(             │ predict      │ 1.12        │ [1.12]   │
+│   tol = 2.22045e-16) │              │             │          │
+│ Accuracy()           │ predict_mode │ 0.683       │ [0.683]  │
+└──────────────────────┴──────────────┴─────────────┴──────────┘
+

If a machine is already defined, as above:

evaluate!(mach,
+          resampling=Holdout(fraction_train=0.7, shuffle=true, rng=1234),
+          measure=[LogLoss(), Accuracy()])
PerformanceEvaluation object with these fields:
+  model, measure, operation, measurement, per_fold,
+  per_observation, fitted_params_per_fold,
+  report_per_fold, train_test_rows, resampling, repeats
+Extract:
+┌──────────────────────┬──────────────┬─────────────┬──────────┐
+│ measure              │ operation    │ measurement │ per_fold │
+├──────────────────────┼──────────────┼─────────────┼──────────┤
+│ LogLoss(             │ predict      │ 1.12        │ [1.12]   │
+│   tol = 2.22045e-16) │              │             │          │
+│ Accuracy()           │ predict_mode │ 0.683       │ [0.683]  │
+└──────────────────────┴──────────────┴─────────────┴──────────┘
+

Using cross-validation:

evaluate!(mach, resampling=CV(nfolds=5, shuffle=true, rng=1234),
+          measure=[LogLoss(), Accuracy()])
PerformanceEvaluation object with these fields:
+  model, measure, operation, measurement, per_fold,
+  per_observation, fitted_params_per_fold,
+  report_per_fold, train_test_rows, resampling, repeats
+Extract:
+┌──────────────────────┬──────────────┬─────────────┬─────────┬─────────────────
+│ measure              │ operation    │ measurement │ 1.96*SE │ per_fold       ⋯
+├──────────────────────┼──────────────┼─────────────┼─────────┼─────────────────
+│ LogLoss(             │ predict      │ 0.748       │ 0.432   │ [0.552, 0.534, ⋯
+│   tol = 2.22045e-16) │              │             │         │                ⋯
+│ Accuracy()           │ predict_mode │ 0.7         │ 0.0866  │ [0.775, 0.7, 0 ⋯
+└──────────────────────┴──────────────┴─────────────┴─────────┴─────────────────
+                                                                1 column omitted
+

With user-specified train/test pairs of row indices:

f1, f2, f3 = 1:13, 14:26, 27:36
+pairs = [(f1, vcat(f2, f3)), (f2, vcat(f3, f1)), (f3, vcat(f1, f2))];
+evaluate!(mach,
+          resampling=pairs,
+          measure=[LogLoss(), Accuracy()])
PerformanceEvaluation object with these fields:
+  model, measure, operation, measurement, per_fold,
+  per_observation, fitted_params_per_fold,
+  report_per_fold, train_test_rows, resampling, repeats
+Extract:
+┌──────────────────────┬──────────────┬─────────────┬─────────┬─────────────────
+│ measure              │ operation    │ measurement │ 1.96*SE │ per_fold       ⋯
+├──────────────────────┼──────────────┼─────────────┼─────────┼─────────────────
+│ LogLoss(             │ predict      │ 3.8         │ 1.55    │ [5.1, 3.38, 3. ⋯
+│   tol = 2.22045e-16) │              │             │         │                ⋯
+│ Accuracy()           │ predict_mode │ 0.736       │ 0.0513  │ [0.696, 0.739, ⋯
+└──────────────────────┴──────────────┴─────────────┴─────────┴─────────────────
+                                                                1 column omitted
+

Changing a hyperparameter and re-evaluating:

tree.max_depth = 3
+evaluate!(mach,
+          resampling=CV(nfolds=5, shuffle=true, rng=1234),
+          measure=[LogLoss(), Accuracy()])
PerformanceEvaluation object with these fields:
+  model, measure, operation, measurement, per_fold,
+  per_observation, fitted_params_per_fold,
+  report_per_fold, train_test_rows, resampling, repeats
+Extract:
+┌──────────────────────┬──────────────┬─────────────┬─────────┬─────────────────
+│ measure              │ operation    │ measurement │ 1.96*SE │ per_fold       ⋯
+├──────────────────────┼──────────────┼─────────────┼─────────┼─────────────────
+│ LogLoss(             │ predict      │ 1.19        │ 0.957   │ [1.26, 0.2, 0. ⋯
+│   tol = 2.22045e-16) │              │             │         │                ⋯
+│ Accuracy()           │ predict_mode │ 0.865       │ 0.0877  │ [0.8, 0.95, 0. ⋯
+└──────────────────────┴──────────────┴─────────────┴─────────┴─────────────────
+                                                                1 column omitted
+

Inspecting training results

Fit an ordinary least square model to some synthetic data:

x1 = rand(100)
+x2 = rand(100)
+
+X = (x1=x1, x2=x2)
+y = x1 - 2x2 + 0.1*rand(100);
+
+OLS = @load LinearRegressor pkg=GLM
+ols = OLS()
+mach =  machine(ols, X, y) |> fit!
trained Machine; caches model-specific representations of data
+  model: LinearRegressor(fit_intercept = true, …)
+  args: 
+    1:	Source @843 ⏎ Table{AbstractVector{Continuous}}
+    2:	Source @676 ⏎ AbstractVector{Continuous}
+

Get a named tuple representing the learned parameters, human-readable if appropriate:

fitted_params(mach)
(features = [:x1, :x2],
+ coef = [1.011604092919142, -2.016096643675243],
+ intercept = 0.052080100708338774,)

Get other training-related information:

report(mach)
(stderror = [0.007773048863073484, 0.009722106683070572, 0.009545019349934552],
+ dof_residual = 97.0,
+ vcov = [6.0420288627727974e-5 -5.2331899383945185e-5 -5.099118761721098e-5; -5.2331899383945185e-5 9.451935835700547e-5 8.524897211714471e-6; -5.099118761721098e-5 8.524897211714471e-6 9.110739439062502e-5],
+ deviance = 0.0751357917858701,
+ coef_table = ──────────────────────────────────────────────────────────────────────────────
+                  Coef.  Std. Error        t  Pr(>|t|)   Lower 95%   Upper 95%
+──────────────────────────────────────────────────────────────────────────────
+(Intercept)   0.0520801  0.00777305     6.70    <1e-08   0.0366528   0.0675075
+x1            1.0116     0.00972211   104.05    <1e-99   0.992308    1.0309
+x2           -2.0161     0.00954502  -211.22    <1e-99  -2.03504    -1.99715
+──────────────────────────────────────────────────────────────────────────────,)

Basic fit/transform for unsupervised models

Load data:

X, y = @load_iris
+train, test = partition(eachindex(y), 0.97, shuffle=true, rng=123)
([125, 100, 130, 9, 70, 148, 39, 64, 6, 107  …  110, 59, 139, 21, 112, 144, 140, 72, 109, 41], [106, 147, 47, 5])

Instantiate and fit the model/machine:

PCA = @load PCA
+pca = PCA(maxoutdim=2)
+mach = machine(pca, X)
+fit!(mach, rows=train)
trained Machine; caches model-specific representations of data
+  model: PCA(maxoutdim = 2, …)
+  args: 
+    1:	Source @296 ⏎ Table{AbstractVector{Continuous}}
+

Transform selected data bound to the machine:

transform(mach, rows=test);
(x1 = [-3.394282685448322, -1.5219827578765053, 2.53824745518522, 2.7299639893931382],
+ x2 = [0.547245022374522, -0.36842368617126425, 0.5199299511335688, 0.3448466122232349],)

Transform new data:

Xnew = (sepal_length=rand(3), sepal_width=rand(3),
+        petal_length=rand(3), petal_width=rand(3));
+transform(mach, Xnew)
(x1 = [5.243642624378951, 4.98664142241553, 4.809879833201745],
+ x2 = [-4.860013672929733, -4.696924661476474, -5.044545140029135],)

Inverting learned transformations

y = rand(100);
+stand = Standardizer()
+mach = machine(stand, y)
+fit!(mach)
+z = transform(mach, y);
+@assert inverse_transform(mach, z) ≈ y # true
[ Info: Training machine(Standardizer(features = Symbol[], …), …).

Nested hyperparameter tuning

Reference: Tuning Models

Define a model with nested hyperparameters:

Tree = @load DecisionTreeClassifier pkg=DecisionTree
+tree = Tree()
+forest = EnsembleModel(model=tree, n=300)
ProbabilisticEnsembleModel(
+  model = DecisionTreeClassifier(
+        max_depth = -1, 
+        min_samples_leaf = 1, 
+        min_samples_split = 2, 
+        min_purity_increase = 0.0, 
+        n_subfeatures = 0, 
+        post_prune = false, 
+        merge_purity_threshold = 1.0, 
+        display_depth = 5, 
+        feature_importance = :impurity, 
+        rng = Random._GLOBAL_RNG()), 
+  atomic_weights = Float64[], 
+  bagging_fraction = 0.8, 
+  rng = Random._GLOBAL_RNG(), 
+  n = 300, 
+  acceleration = CPU1{Nothing}(nothing), 
+  out_of_bag_measure = Any[])

Define ranges for hyperparameters to be tuned:

r1 = range(forest, :bagging_fraction, lower=0.5, upper=1.0, scale=:log10)
NumericRange(0.5 ≤ bagging_fraction ≤ 1.0; origin=0.75, unit=0.25; on log10 scale)
r2 = range(forest, :(model.n_subfeatures), lower=1, upper=4) # nested
NumericRange(1 ≤ model.n_subfeatures ≤ 4; origin=2.5, unit=1.5)

Wrap the model in a tuning strategy:

tuned_forest = TunedModel(model=forest,
+                          tuning=Grid(resolution=12),
+                          resampling=CV(nfolds=6),
+                          ranges=[r1, r2],
+                          measure=BrierLoss())
ProbabilisticTunedModel(
+  model = ProbabilisticEnsembleModel(
+        model = DecisionTreeClassifier(max_depth = -1, …), 
+        atomic_weights = Float64[], 
+        bagging_fraction = 0.8, 
+        rng = Random._GLOBAL_RNG(), 
+        n = 300, 
+        acceleration = CPU1{Nothing}(nothing), 
+        out_of_bag_measure = Any[]), 
+  tuning = Grid(
+        goal = nothing, 
+        resolution = 12, 
+        shuffle = true, 
+        rng = Random._GLOBAL_RNG()), 
+  resampling = CV(
+        nfolds = 6, 
+        shuffle = false, 
+        rng = Random._GLOBAL_RNG()), 
+  measure = BrierLoss(), 
+  weights = nothing, 
+  class_weights = nothing, 
+  operation = nothing, 
+  range = NumericRange{T, MLJBase.Bounded, Symbol} where T[NumericRange(0.5 ≤ bagging_fraction ≤ 1.0; origin=0.75, unit=0.25; on log10 scale), NumericRange(1 ≤ model.n_subfeatures ≤ 4; origin=2.5, unit=1.5)], 
+  selection_heuristic = MLJTuning.NaiveSelection(nothing), 
+  train_best = true, 
+  repeats = 1, 
+  n = nothing, 
+  acceleration = CPU1{Nothing}(nothing), 
+  acceleration_resampling = CPU1{Nothing}(nothing), 
+  check_measure = true, 
+  cache = true)

Bound the wrapped model to data:

mach = machine(tuned_forest, X, y)
untrained Machine; does not cache data
+  model: ProbabilisticTunedModel(model = ProbabilisticEnsembleModel(model = DecisionTreeClassifier(max_depth = -1, …), …), …)
+  args: 
+    1:	Source @097 ⏎ Table{AbstractVector{Continuous}}
+    2:	Source @328 ⏎ AbstractVector{Multiclass{3}}
+

Fitting the resultant machine optimizes the hyperparameters specified in range, using the specified tuning and resampling strategies and performance measure (possibly a vector of measures), and retrains on all data bound to the machine:

fit!(mach)
trained Machine; does not cache data
+  model: ProbabilisticTunedModel(model = ProbabilisticEnsembleModel(model = DecisionTreeClassifier(max_depth = -1, …), …), …)
+  args: 
+    1:	Source @097 ⏎ Table{AbstractVector{Continuous}}
+    2:	Source @328 ⏎ AbstractVector{Multiclass{3}}
+

Inspecting the optimal model:

F = fitted_params(mach)
(best_model = ProbabilisticEnsembleModel(model = DecisionTreeClassifier(max_depth = -1, …), …),
+ best_fitted_params = (fitresult = WrappedEnsemble(atom = DecisionTreeClassifier(max_depth = -1, …), …),),)
F.best_model
ProbabilisticEnsembleModel(
+  model = DecisionTreeClassifier(
+        max_depth = -1, 
+        min_samples_leaf = 1, 
+        min_samples_split = 2, 
+        min_purity_increase = 0.0, 
+        n_subfeatures = 3, 
+        post_prune = false, 
+        merge_purity_threshold = 1.0, 
+        display_depth = 5, 
+        feature_importance = :impurity, 
+        rng = Random._GLOBAL_RNG()), 
+  atomic_weights = Float64[], 
+  bagging_fraction = 0.5, 
+  rng = Random._GLOBAL_RNG(), 
+  n = 300, 
+  acceleration = CPU1{Nothing}(nothing), 
+  out_of_bag_measure = Any[])

Inspecting details of tuning procedure:

r = report(mach);
+keys(r)
(:best_model, :best_history_entry, :history, :best_report, :plotting)
r.history[[1,end]]
2-element Vector{@NamedTuple{model::MLJEnsembles.ProbabilisticEnsembleModel{MLJDecisionTreeInterface.DecisionTreeClassifier}, measure::Vector{StatisticalMeasuresBase.RobustMeasure{StatisticalMeasuresBase.FussyMeasure{StatisticalMeasuresBase.RobustMeasure{StatisticalMeasures._BrierLossType}, typeof(StatisticalMeasures.l2_check)}}}, measurement::Vector{Float64}, per_fold::Vector{Vector{Float64}}}}:
+ (model = ProbabilisticEnsembleModel(model = DecisionTreeClassifier(max_depth = -1, …), …), measure = [BrierLoss()], measurement = [0.11081644444444422], per_fold = [[-0.0, -0.0, 0.15127288888888868, 0.15621777777777726, 0.14722044444444407, 0.21018755555555535]])
+ (model = ProbabilisticEnsembleModel(model = DecisionTreeClassifier(max_depth = -1, …), …), measure = [BrierLoss()], measurement = [0.10196651851851846], per_fold = [[0.008245333333333438, 0.00044000000000009364, 0.13900177777777775, 0.1467857777777774, 0.13812622222222204, 0.17919999999999997]])

Visualizing these results:

using Plots
+plot(mach)

Predicting on new data using the optimized model:

predict(mach, Xnew)
3-element UnivariateFiniteVector{Multiclass{3}, String, UInt32, Float64}:
+ UnivariateFinite{Multiclass{3}}(setosa=>1.0, versicolor=>0.0, virginica=>0.0)
+ UnivariateFinite{Multiclass{3}}(setosa=>0.85, versicolor=>0.137, virginica=>0.0133)
+ UnivariateFinite{Multiclass{3}}(setosa=>1.0, versicolor=>0.0, virginica=>0.0)

Constructing linear pipelines

Reference: Composing Models

Constructing a linear (unbranching) pipeline with a learned target transformation/inverse transformation:

X, y = @load_reduced_ames
+KNN = @load KNNRegressor
+knn_with_target = TransformedTargetModel(model=KNN(K=3), transformer=Standardizer())
+pipe = (X -> coerce(X, :age=>Continuous)) |> OneHotEncoder() |> knn_with_target
DeterministicPipeline(
+  f = Main.var"#15#16"(), 
+  one_hot_encoder = OneHotEncoder(
+        features = Symbol[], 
+        drop_last = false, 
+        ordered_factor = true, 
+        ignore = false), 
+  transformed_target_model_deterministic = TransformedTargetModelDeterministic(
+        model = KNNRegressor(K = 3, …), 
+        transformer = Standardizer(features = Symbol[], …), 
+        inverse = nothing, 
+        cache = true), 
+  cache = true)

Evaluating the pipeline (just as you would any other model):

pipe.one_hot_encoder.drop_last = true # mutate a nested hyper-parameter
+evaluate(pipe, X, y, resampling=Holdout(), measure=RootMeanSquaredError(), verbosity=2)
PerformanceEvaluation object with these fields:
+  model, measure, operation, measurement, per_fold,
+  per_observation, fitted_params_per_fold,
+  report_per_fold, train_test_rows, resampling, repeats
+Extract:
+┌────────────────────────┬───────────┬─────────────┬───────────┐
+│ measure                │ operation │ measurement │ per_fold  │
+├────────────────────────┼───────────┼─────────────┼───────────┤
+│ RootMeanSquaredError() │ predict   │ 51200.0     │ [51200.0] │
+└────────────────────────┴───────────┴─────────────┴───────────┘
+

Inspecting the learned parameters in a pipeline:

mach = machine(pipe, X, y) |> fit!
+F = fitted_params(mach)
+F.transformed_target_model_deterministic.model
(tree = NearestNeighbors.KDTree{StaticArraysCore.SVector{56, Float64}, Distances.Euclidean, Float64, StaticArraysCore.SVector{56, Float64}}
+  Number of points: 1456
+  Dimensions: 56
+  Metric: Distances.Euclidean(0.0)
+  Reordered: true,)

Constructing a linear (unbranching) pipeline with a static (unlearned) target transformation/inverse transformation:

Tree = @load DecisionTreeRegressor pkg=DecisionTree verbosity=0
+tree_with_target = TransformedTargetModel(model=Tree(),
+                                          transformer=y -> log.(y),
+                                          inverse = z -> exp.(z))
+pipe2 = (X -> coerce(X, :age=>Continuous)) |> OneHotEncoder() |> tree_with_target;

Creating a homogeneous ensemble of models

Reference: Homogeneous Ensembles

X, y = @load_iris
+Tree = @load DecisionTreeClassifier pkg=DecisionTree
+tree = Tree()
+forest = EnsembleModel(model=tree, bagging_fraction=0.8, n=300)
+mach = machine(forest, X, y)
+evaluate!(mach, measure=LogLoss())
PerformanceEvaluation object with these fields:
+  model, measure, operation, measurement, per_fold,
+  per_observation, fitted_params_per_fold,
+  report_per_fold, train_test_rows, resampling, repeats
+Extract:
+┌──────────────────────┬───────────┬─────────────┬─────────┬────────────────────
+│ measure              │ operation │ measurement │ 1.96*SE │ per_fold          ⋯
+├──────────────────────┼───────────┼─────────────┼─────────┼────────────────────
+│ LogLoss(             │ predict   │ 0.421       │ 0.508   │ [3.89e-15, 3.89e- ⋯
+│   tol = 2.22045e-16) │           │             │         │                   ⋯
+└──────────────────────┴───────────┴─────────────┴─────────┴────────────────────
+                                                                1 column omitted
+

Performance curves

Generate a plot of performance, as a function of some hyperparameter (building on the preceding example)

Single performance curve:

r = range(forest, :n, lower=1, upper=1000, scale=:log10)
+curve = learning_curve(mach,
+                       range=r,
+                       resampling=Holdout(),
+                       resolution=50,
+                       measure=LogLoss(),
+                       verbosity=0)
(parameter_name = "n",
+ parameter_scale = :log10,
+ parameter_values = [1, 2, 3, 4, 5, 6, 7, 8, 10, 11  …  281, 324, 373, 429, 494, 569, 655, 754, 869, 1000],
+ measurements = [15.218431430960575, 6.608003121338145, 6.586508049790735, 6.557716641074733, 6.564643919186257, 6.554665209414641, 2.761309309001218, 1.998324925450971, 1.1640157211960025, 1.1635074923549868  …  1.2421921856299438, 1.2328289465607303, 1.232660936494746, 1.2387429252643096, 1.2351081888595659, 1.2366288097323843, 1.239044879729414, 1.2448612762613058, 1.2431957394563597, 1.2466258022771786],)
using Plots
+plot(curve.parameter_values, curve.measurements, xlab=curve.parameter_name, xscale=curve.parameter_scale)

Multiple curves:

curve = learning_curve(mach,
+                       range=r,
+                       resampling=Holdout(),
+                       measure=LogLoss(),
+                       resolution=50,
+                       rng_name=:rng,
+                       rngs=4,
+                       verbosity=0)
(parameter_name = "n",
+ parameter_scale = :log10,
+ parameter_values = [1, 2, 3, 4, 5, 6, 7, 8, 10, 11  …  281, 324, 373, 429, 494, 569, 655, 754, 869, 1000],
+ measurements = [4.004850376568572 8.009700753137146 16.820371581588002 8.009700753137146; 4.004850376568572 8.040507294495367 9.087929700674836 8.009700753137146; … ; 1.2032095186799414 1.231410971269136 1.2618260081921822 1.2771759492571848; 1.208361023670845 1.2299991814751527 1.264384762090153 1.278189281728243],)
plot(curve.parameter_values, curve.measurements,
+xlab=curve.parameter_name, xscale=curve.parameter_scale)

diff --git a/v0.20.3/composing_models/index.html b/v0.20.3/composing_models/index.html new file mode 100644 index 000000000..14720ee30 --- /dev/null +++ b/v0.20.3/composing_models/index.html @@ -0,0 +1,2 @@ + +Composing Models · MLJ

Composing Models

Three common ways of combining multiple models together have out-of-the-box implementations in MLJ:

  • Linear Pipelines (Pipeline)- for unbranching chains that take the output of one model (e.g., dimension reduction, such as PCA) and make it the input of the next model in the chain (e.g., a classification model, such as EvoTreeClassifier). To include transformations of the target variable in a supervised pipeline model, see Target Transformations.
  • Homogeneous Ensembles (EnsembleModel) - for blending the predictions of multiple supervised models all of the same type, but which receive different views of the training data to reduce overall variance. The technique implemented here is known as observation bagging.
  • Model Stacking - (Stack) for combining the predictions of a smaller number of models of possibly different types, with the help of an adjudicating model.

Additionally, more complicated model compositions are possible using:

  • Learning Networks - "blueprints" for combining models in flexible ways; these are simple transformations of your existing workflows which can be "exported" to define new, stand-alone model types.
diff --git a/v0.20.3/controlling_iterative_models/index.html b/v0.20.3/controlling_iterative_models/index.html new file mode 100644 index 000000000..88f2c4310 --- /dev/null +++ b/v0.20.3/controlling_iterative_models/index.html @@ -0,0 +1,166 @@ + +Controlling Iterative Models · MLJ

Controlling Iterative Models

Iterative supervised machine learning models are usually trained until an out-of-sample estimate of the performance satisfies some stopping criterion, such as k consecutive deteriorations of the performance (see Patience below). A more sophisticated kind of control might dynamically mutate parameters, such as a learning rate, in response to the behavior of these estimates.

Some iterative model implementations enable some form of automated control, with the method and options for doing so varying from model to model. But sometimes it is up to the user to arrange control, which in the crudest case reduces to manually experimenting with the iteration parameter.

In response to this ad hoc state of affairs, MLJ provides a uniform and feature-rich interface for controlling any iterative model that exposes its iteration parameter as a hyper-parameter, and which implements the "warm restart" behavior described in Machines.

Basic use

As in Tuning Models, iteration control in MLJ is implemented as a model wrapper, which allows composition with other meta-algorithms. Ordinarily, the wrapped model behaves just like the original model, but with the training occurring on a subset of the provided data (to allow computation of an out-of-sample loss) and with the iteration parameter automatically determined by the controls specified in the wrapper.

By setting retrain=true one can ask that the wrapped model retrain on all supplied data, after learning the appropriate number of iterations from the controlled training phase:

using MLJ
+
+X, y = make_moons(100, rng=123, noise=0.5)
+EvoTreeClassifier = @load EvoTreeClassifier verbosity=0
+
+iterated_model = IteratedModel(model=EvoTreeClassifier(rng=123, eta=0.005),
+                               resampling=Holdout(),
+                               measures=log_loss,
+                               controls=[Step(5),
+                                         Patience(2),
+                                         NumberLimit(100)],
+                               retrain=true)
+
+mach = machine(iterated_model, X, y)
julia> fit!(mach)┌ Info: Training machine(ProbabilisticIteratedModel(model = EvoTrees.EvoTreeClassifier{EvoTrees.MLogLoss}
+│  - nrounds: 100
+│  - L2: 0.0
+│  - lambda: 0.0
+│  - gamma: 0.0
+│  - eta: 0.005
+│  - max_depth: 6
+│  - min_weight: 1.0
+│  - rowsample: 1.0
+│  - colsample: 1.0
+│  - nbins: 64
+│  - alpha: 0.5
+│  - tree_type: binary
+│  - rng: Random.MersenneTwister(123)
+└ , …), …).
+[ Info: No iteration parameter specified. Using `iteration_parameter=:(nrounds)`. 
+[ Info: final loss: 0.46683584745719836
+[ Info: Stop triggered by Patience(2) stopping criterion. 
+[ Info: Retraining on all provided data. To suppress, specify `retrain=false`. 
+[ Info: Total of 215 iterations. 
+trained Machine; does not cache data
+  model: ProbabilisticIteratedModel(model = EvoTrees.EvoTreeClassifier{EvoTrees.MLogLoss}
+ - nrounds: 100
+ - L2: 0.0
+ - lambda: 0.0
+ - gamma: 0.0
+ - eta: 0.005
+ - max_depth: 6
+ - min_weight: 1.0
+ - rowsample: 1.0
+ - colsample: 1.0
+ - nbins: 64
+ - alpha: 0.5
+ - tree_type: binary
+ - rng: Random.MersenneTwister(123)
+, …)
+  args:
+    1:	Source @526 ⏎ Table{AbstractVector{Continuous}}
+    2:	Source @507 ⏎ AbstractVector{Multiclass{2}}

As detailed under IteratedModel below, the specified controls are repeatedly applied in sequence to a training machine, constructed under the hood, until one of the controls triggers a stop. Here Step(5) means "Compute 5 more iterations" (in this case starting from none); Patience(2) means "Stop at the end of the control cycle if there have been 2 consecutive drops in the log loss"; and NumberLimit(100) is a safeguard ensuring a stop after 100 control cycles (500 iterations). See Controls provided below for a complete list.

Because iteration is implemented as a wrapper, the "self-iterating" model can be evaluated using cross-validation, say, and the number of iterations on each fold will generally be different:

e = evaluate!(mach, resampling=CV(nfolds=3), measure=log_loss, verbosity=0);
+map(e.report_per_fold) do r
+    r.n_iterations
+end
3-element Vector{Int64}:
+ 340
+ 150
+ 500

Alternatively, one might wrap the self-iterating model in a tuning strategy, using TunedModel; see Tuning Models. In this way, the optimization of some other hyper-parameter is realized simultaneously with that of the iteration parameter, which will frequently be more efficient than a direct two-parameter search.

Controls provided

In the table below, mach is the training machine being iterated, constructed by binding the supplied data to the model specified in the IteratedModel wrapper, but trained in each iteration on a subset of the data, according to the value of the resampling hyper-parameter of the wrapper (using all data if resampling=nothing).

controldescriptioncan trigger a stop
Step(n=1)Train model for n more iterationsno
TimeLimit(t=0.5)Stop after t hoursyes
NumberLimit(n=100)Stop after n applications of the controlyes
NumberSinceBest(n=6)Stop when best loss occurred n control applications agoyes
InvalidValue()Stop when NaN, Inf or -Inf loss/training loss encounteredyes
Threshold(value=0.0)Stop when loss < valueyes
GL(alpha=2.0)† Stop after the "generalization loss (GL)" exceeds alphayes
PQ(alpha=0.75, k=5)† Stop after "progress-modified GL" exceeds alphayes
Patience(n=5)† Stop after n consecutive loss increasesyes
Warmup(c; n=1)Wait for n loss updates before checking criteria cno
Info(f=identity)Log to Info the value of f(mach), where mach is current machineno
Warn(predicate; f="")Log to Warn the value of f or f(mach), if predicate(mach) holdsno
Error(predicate; f="")Log to Error the value of f or f(mach), if predicate(mach) holds and then stopyes
Callback(f=mach->nothing)Call f(mach)yes
WithNumberDo(f=n->@info(n))Call f(n + 1) where n is the number of complete control cycles so faryes
WithIterationsDo(f=i->@info("iterations: $i"))Call f(i), where i is total number of iterationsyes
WithLossDo(f=x->@info("loss: $x"))Call f(loss) where loss is the current lossyes
WithTrainingLossesDo(f=v->@info(v))Call f(v) where v is the current batch of training lossesyes
WithEvaluationDo(f->e->@info("evaluation: $e))Call f(e) where e is the current performance evaluation objectyes
WithFittedParamsDo(f->fp->@info("fitted_params: $fp))Call f(fp) where fp is fitted parameters of training machineyes
WithReportDo(f->e->@info("report: $e))Call f(r) where r is the training machine reportyes
WithModelDo(f->m->@info("model: $m))Call f(m) where m is the model, which may be mutated by fyes
WithMachineDo(f->mach->@info("report: $mach))Call f(mach) wher mach is the training machine in its current stateyes
Save(filename="machine.jls")Save current training machine to machine1.jls, machine2.jsl, etcyes

Table 1. Atomic controls. Some advanced options are omitted.

† For more on these controls see Prechelt, Lutz (1998): "Early Stopping - But When?", in Neural Networks: Tricks of the Trade, ed. G. Orr, Springer.

Stopping option. All the following controls trigger a stop if the provided function f returns true and stop_if_true=true is specified in the constructor: Callback, WithNumberDo, WithLossDo, WithTrainingLossesDo.

There are also three control wrappers to modify a control's behavior:

wrapperdescription
IterationControl.skip(control, predicate=1)Apply control every predicate applications of the control wrapper (can also be a function; see doc-string)
IterationControl.louder(control, by=1)Increase the verbosity level of control by the specified value (negative values lower verbosity)
IterationControl.with_state_do(control; f=...)Apply control and call f(x) where x is the internal state of control; useful for debugging. Default f logs state to Info. Warning: internal control state is not yet part of the public API.
IterationControl.composite(controls...)Apply each control in controls in sequence; used internally by IterationControl.jl

Table 2. Wrapped controls

Using training losses, and controlling model tuning

Some iterative models report a training loss, as a byproduct of a fit! call and these can be used in two ways:

  1. To supplement an out-of-sample estimate of the loss in deciding when to stop, as in the PQ stopping criterion (see Prechelt, Lutz (1998))); or

  2. As a (generally less reliable) substitute for an out-of-sample loss, when wishing to train exclusively on all supplied data.

To have IteratedModel bind all data to the training machine and use training losses in place of an out-of-sample loss, specify resampling=nothing. To check if MyFavoriteIterativeModel reports training losses, load the model code and inspect supports_training_losses(MyFavoriteIterativeModel) (or do info("MyFavoriteIterativeModel"))

Controlling model tuning

An example of scenario 2 occurs when controlling hyperparameter optimization (model tuning). Recall that MLJ's TunedModel wrapper is implemented as an iterative model. Moreover, this wrapper reports, as a training loss, the lowest value of the optimization objective function so far (typically the lowest value of an out-of-sample loss, or -1 times an out-of-sample score). One may want to simply end the hyperparameter search when this value meets the NumberSinceBest stopping criterion discussed below, say, rather than introducing an extra layer of resampling to first "learn" the optimal value of the iteration parameter.

In the following example, we conduct a RandomSearch for the optimal value of the regularization parameter lambda in a RidgeRegressor using 6-fold cross-validation. By wrapping our "self-tuning" version of the regressor as an IteratedModel, with resampling=nothing and NumberSinceBest(20) in the controls, we terminate the search when the number of lambda values tested since the previous best cross-validation loss reaches 20.

using MLJ
+
+X, y = @load_boston;
+RidgeRegressor = @load RidgeRegressor pkg=MLJLinearModels verbosity=0
+model = RidgeRegressor()
+r = range(model, :lambda, lower=-1, upper=2, scale=x->10^x)
+self_tuning_model = TunedModel(model=model,
+                               tuning=RandomSearch(rng=123),
+                               resampling=CV(nfolds=6),
+                               range=r,
+                               measure=mae);
+iterated_model = IteratedModel(model=self_tuning_model,
+                               resampling=nothing,
+                               control=[Step(1), NumberSinceBest(20), NumberLimit(1000)])
+mach = machine(iterated_model, X, y)
julia> fit!(mach)[ Info: Training machine(DeterministicIteratedModel(model = DeterministicTunedModel(model = RidgeRegressor(lambda = 1.0, …), …), …), …).
+[ Info: No iteration parameter specified. Using `iteration_parameter=:(n)`. 
+[ Info: final loss: 3.8928800658727467
+[ Info: final training loss: 3.8928800658727467
+[ Info: Stop triggered by NumberSinceBest(20) stopping criterion. 
+[ Info: Total of 45 iterations. 
+trained Machine; does not cache data
+  model: DeterministicIteratedModel(model = DeterministicTunedModel(model = RidgeRegressor(lambda = 1.0, …), …), …)
+  args:
+    1:	Source @735 ⏎ Table{AbstractVector{Continuous}}
+    2:	Source @860 ⏎ AbstractVector{Continuous}
julia> report(mach).model_report.best_modelRidgeRegressor(
+  lambda = 0.4243170708090101,
+  fit_intercept = true,
+  penalize_intercept = false,
+  scale_penalty_with_samples = true,
+  solver = nothing)

We can use mach here to directly obtain predictions using the optimal model (trained on all data), as in

julia> predict(mach, selectrows(X, 1:4))4-element Vector{Float64}:
+ 31.309570596541448
+ 25.24911135120517
+ 29.89525728277618
+ 29.237112147518744

Custom controls

Under the hood, control in MLJIteration is implemented using IterationControl.jl. Rather than iterating a training machine directly, we iterate a wrapped version of this object, which includes other information that a control may want to access, such as the MLJ evaluation object. This information is summarized under The training machine wrapper below.

Controls must implement two update! methods, one for initializing the control's state on the first application of the control (this state being external to the control struct) and one for all subsequent control applications, which generally updates the state as well. There are two optional methods: done, for specifying conditions triggering a stop, and takedown for specifying actions to perform at the end of controlled training.

We summarize the training algorithm, as it relates to controls, after giving a simple example.

Example 1 - Non-uniform iteration steps

Below we define a control, IterateFromList(list), to train, on each application of the control, until the iteration count reaches the next value in a user-specified list, triggering a stop when the list is exhausted. For example, to train on iteration counts on a log scale, one might use IterateFromList([round(Int, 10^x) for x in range(1, 2, length=10)].

In the code, wrapper is an object that wraps the training machine (see above). The variable n is a counter for control cycles (unused in this example).


+import IterationControl # or MLJ.IterationControl
+
+struct IterateFromList
+    list::Vector{<:Int} # list of iteration parameter values
+    IterateFromList(v) = new(unique(sort(v)))
+end
+
+function IterationControl.update!(control::IterateFromList, wrapper, verbosity, n)
+    Δi = control.list[1]
+    verbosity > 1 && @info "Training $Δi more iterations. "
+    MLJIteration.train!(wrapper, Δi) # trains the training machine
+    return (index = 2, )
+end
+
+function IterationControl.update!(control::IterateFromList, wrapper, verbosity, n, state)
+    index = state.positioin_in_list
+    Δi = control.list[i] - wrapper.n_iterations
+    verbosity > 1 && @info "Training $Δi more iterations. "
+    MLJIteration.train!(wrapper, Δi)
+    return (index = index + 1, )
+end

The first update method will be called the first time the control is applied, returning an initialized state = (index = 2,), which is passed to the second update method, which is called on subsequent control applications (and which returns the updated state).

A done method articulates the criterion for stopping:

IterationControl.done(control::IterateFromList, state) =
+    state.index > length(control.list)

For the sake of illustration, we'll implement a takedown method; its return value is included in the IteratedModel report:

IterationControl.takedown(control::IterateFromList, verbosity, state)
+    verbosity > 1 && = @info "Stepped through these values of the "*
+                              "iteration parameter: $(control.list)"
+    return (iteration_values=control.list, )
+end

The training machine wrapper

A training machine wrapper has these properties:

  • wrapper.machine - the training machine, type Machine

  • wrapper.model - the mutable atomic model, coinciding with wrapper.machine.model

  • wrapper.n_cycles - the number IterationControl.train!(wrapper, _) calls so far; generally the current control cycle count

  • wrapper.n_iterations - the total number of iterations applied to the model so far

  • wrapper.Δiterations - the number of iterations applied in the last IterationControl.train!(wrapper, _) call

  • wrapper.loss - the out-of-sample loss (based on the first measure in measures)

  • wrapper.training_losses - the last batch of training losses (if reported by model), an abstract vector of length wrapper.Δiteration.

  • wrapper.evaluation - the complete MLJ performance evaluation object, which has the following properties: measure, measurement, per_fold, per_observation, fitted_params_per_fold, report_per_fold (here there is only one fold). For further details, see Evaluating Model Performance.

The training algorithm

Here now is a simplified description of the training of an IteratedModel. First, the atomic model is bound in a machine - the training machine above - to a subset of the supplied data, and then wrapped in an object called wrapper below. To train the training machine machine for i more iterations, and update the other data in the wrapper, requires the call MLJIteration.train!(wrapper, i). Only controls can make this call (e.g., Step(...), or IterateFromList(...) above). If we assume for simplicity there is only a single control, called control, then training proceeds as follows:

n = 1 # initialize control cycle counter
+state = update!(control, wrapper, verbosity, n)
+finished = done(control, state)
+
+# subsequent training events:
+while !finished
+    n += 1
+    state = update!(control, wrapper, verbosity, n, state)
+    finished = done(control, state)
+end
+
+# finalization:
+return takedown(control, verbosity, state)

Example 2 - Cyclic learning rates

The control below implements a triangular cyclic learning rate policy, close to that described in L. N. Smith (2019): "Cyclical Learning Rates for Training Neural Networks," 2017 IEEE Winter Conference on Applications of Computer Vision (WACV), Santa Rosa, CA, USA, pp. 464-472. [In that paper learning rates are mutated (slowly) during each training iteration (epoch) while here mutations can occur once per iteration of the model, at most.]

For the sake of illustration, we suppose the iterative model, model, specified in the IteratedModel constructor, has a field called :learning_parameter, and that mutating this parameter does not trigger cold-restarts.

struct CycleLearningRate{F<:AbstractFloat}
+    stepsize::Int
+    lower::F
+    upper::F
+end
+
+# return one cycle of learning rate values:
+function one_cycle(c::CycleLearningRate)
+    rise = range(c.lower, c.upper, length=c.stepsize + 1)
+    fall = reverse(rise)
+    return vcat(rise[1:end - 1], fall[1:end - 1])
+end
+
+function IterationControl.update!(control::CycleLearningRate,
+                                  wrapper,
+                                  verbosity,
+                                  n,
+                                  state = (learning_rates=nothing, ))
+    rates = n == 0 ? one_cycle(control) : state.learning_rates
+    index = mod(n, length(rates)) + 1
+    r = rates[index]
+    verbosity > 1 && @info "learning rate: $r"
+    wrapper.model.iteration_control = r
+    return (learning_rates = rates,)
+end

API Reference

MLJIteration.IteratedModelFunction
IteratedModel(model=nothing,
+              controls=Any[Step(1), Patience(5), GL(2.0), TimeLimit(Dates.Millisecond(108000)), InvalidValue()],
+              retrain=false,
+              resampling=Holdout(),
+              measure=nothing,
+              weights=nothing,
+              class_weights=nothing,
+              operation=predict,
+              verbosity=1,
+              check_measure=true,
+              iteration_parameter=nothing,
+              cache=true)

Wrap the specified model <: Supervised in the specified iteration controls. Training a machine bound to the wrapper iterates a corresonding machine bound to model. Here model should support iteration.

To list all controls, do MLJIteration.CONTROLS. Controls are summarized at https://alan-turing-institute.github.io/MLJ.jl/dev/getting_started/ but query individual doc-strings for details and advanced options. For creating your own controls, refer to the documentation just cited.

To make out-of-sample losses available to the controls, the machine bound to model is only trained on part of the data, as iteration proceeds. See details on training below. Specify retrain=true to ensure the model is retrained on all available data, using the same number of iterations, once controlled iteration has stopped.

Specify resampling=nothing if all data is to be used for controlled iteration, with each out-of-sample loss replaced by the most recent training loss, assuming this is made available by the model (supports_training_losses(model) == true). Otherwise, resampling must have type Holdout (eg, Holdout(fraction_train=0.8, rng=123)).

Assuming retrain=true or resampling=nothing, iterated_model behaves exactly like the original model but with the iteration parameter automatically selected. If retrain=false (default) and resampling is not nothing, then iterated_model behaves like the original model trained on a subset of the provided data.

Controlled iteration can be continued with new fit! calls (warm restart) by mutating a control, or by mutating the iteration parameter of model, which is otherwise ignored.

Training

Given an instance iterated_model of IteratedModel, calling fit!(mach) on a machine mach = machine(iterated_model, data...) performs the following actions:

  • Assuming resampling !== nothing, the data is split into train and test sets, according to the specified resampling strategy, which must have type Holdout.

  • A clone of the wrapped model, iterated_model.model, is bound to the train data in an internal machine, train_mach. If resampling === nothing, all data is used instead. This machine is the object to which controls are applied. For example, Callback(fitted_params |> print) will print the value of fitted_params(train_mach).

  • The iteration parameter of the clone is set to 0.

  • The specified controls are repeatedly applied to train_mach in sequence, until one of the controls triggers a stop. Loss-based controls (eg, Patience(), GL(), Threshold(0.001)) use an out-of-sample loss, obtained by applying measure to predictions and the test target values. (Specifically, these predictions are those returned by operation(train_mach).) If resampling === nothing then the most recent training loss is used instead. Some controls require both out-of-sample and training losses (eg, PQ()).

  • Once a stop has been triggered, a clone of model is bound to all data in a machine called mach_production below, unless retrain == false or resampling === nothing, in which case mach_production coincides with train_mach.

Prediction

Calling predict(mach, Xnew) returns predict(mach_production, Xnew). Similar similar statements hold for predict_mean, predict_mode, predict_median.

Controls

A control is permitted to mutate the fields (hyper-parameters) of train_mach.model (the clone of model). For example, to mutate a learning rate one might use the control

Callback(mach -> mach.model.eta = 1.05*mach.model.eta)

However, unless model supports warm restarts with respect to changes in that parameter, this will trigger retraining of train_mach from scratch, with a different training outcome, which is not recommended.

Warm restarts

If iterated_model is mutated and fit!(mach) is called again, then a warm restart is attempted if the only parameters to change are model or controls or both.

Specifically, train_mach.model is mutated to match the current value of iterated_model.model and the iteration parameter of the latter is updated to the last value used in the preceding fit!(mach) call. Then repeated application of the (updated) controls begin anew.

source

Controls

IterationControl.StepType
Step(; n=1)

An iteration control, as in, Step(2).

Train for n more iterations. Will never trigger a stop.

source
EarlyStopping.TimeLimitType
TimeLimit(; t=0.5)

An early stopping criterion for loss-reporting iterative algorithms.

Stopping is triggered after t hours have elapsed since the stopping criterion was initiated.

Any Julia built-in Real type can be used for t. Subtypes of Period may also be used, as in TimeLimit(t=Minute(30)).

Internally, t is rounded to nearest millisecond. ``

source
EarlyStopping.NumberLimitType
NumberLimit(; n=100)

An early stopping criterion for loss-reporting iterative algorithms.

A stop is triggered by n consecutive loss updates, excluding "training" loss updates.

If wrapped in a stopper::EarlyStopper, this is the number of calls to done!(stopper).

source
EarlyStopping.NumberSinceBestType
NumberSinceBest(; n=6)

An early stopping criterion for loss-reporting iterative algorithms.

A stop is triggered when the number of calls to the control, since the lowest value of the loss so far, is n.

For a customizable loss-based stopping criterion, use WithLossDo or WithTrainingLossesDo with the stop_if_true=true option.

source
EarlyStopping.InvalidValueType
InvalidValue()

An early stopping criterion for loss-reporting iterative algorithms.

Stop if a loss (or training loss) is NaN, Inf or -Inf (or, more precisely, if isnan(loss) or isinf(loss) is true).

For a customizable loss-based stopping criterion, use WithLossDo or WithTrainingLossesDo with the stop_if_true=true option.

source
EarlyStopping.ThresholdType
Threshold(; value=0.0)

An early stopping criterion for loss-reporting iterative algorithms.

A stop is triggered as soon as the loss drops below value.

For a customizable loss-based stopping criterion, use WithLossDo or WithTrainingLossesDo with the stop_if_true=true option.

source
EarlyStopping.GLType
GL(; alpha=2.0)

An early stopping criterion for loss-reporting iterative algorithms.

A stop is triggered when the (rescaled) generalization loss exceeds the threshold alpha.

Terminology. Suppose $E_1, E_2, ..., E_t$ are a sequence of losses, for example, out-of-sample estimates of the loss associated with some iterative machine learning algorithm. Then the generalization loss at time t, is given by

$GL_t = 100 (E_t - E_{opt}) \over |E_{opt}|$

where $E_{opt}$ is the minimum value of the sequence.

Reference: Prechelt, Lutz (1998): "Early Stopping- But When?", in Neural Networks: Tricks of the Trade, ed. G. Orr, Springer..

source
EarlyStopping.PQType
PQ(; alpha=0.75, k=5, tol=eps(Float64))

A stopping criterion for training iterative supervised learners.

A stop is triggered when Prechelt's progress-modified generalization loss exceeds the threshold $PQ_T > alpha$, or if the training progress drops below $P_j ≤ tol$. Here k is the number of training (in-sample) losses used to estimate the training progress.

Context and explanation of terminology

The training progress at time $j$ is defined by

$P_j = 1000 |M - m|/|m|$

where $M$ is the mean of the last k training losses $F_1, F_2, …, F_k$ and $m$ is the minimum value of those losses.

The progress-modified generalization loss at time $t$ is then given by

$PQ_t = GL_t / P_t$

where $GL_t$ is the generalization loss at time $t$; see GL.

PQ will stop when the following are true:

  1. At least k training samples have been collected via done!(c::PQ, loss; training = true) or update_training(c::PQ, loss, state)
  2. The last update was an out-of-sample update. (done!(::PQ, loss; training=true) is always false)
  3. The progress-modified generalization loss exceeds the threshold $PQ_t > alpha$ OR the training progress stalls $P_j ≤ tol$.

Reference: Prechelt, Lutz (1998): "Early Stopping- But When?", in Neural Networks: Tricks of the Trade, ed. G. Orr, Springer..

source
IterationControl.InfoType
Info(f=identity)

An iteration control, as in, Info(my_loss_function).

Log to Info the value of f(m), where m is the object being iterated. If IterativeControl.expose(m) has been overloaded, then log f(expose(m)) instead.

Can be suppressed by setting the global verbosity level sufficiently low.

See also Warn, Error.

source
IterationControl.WarnType
Warn(predicate; f="")

An iteration control, as in, Warn(m -> length(m.cache) > 100, f="Memory low").

If predicate(m) is true, then log to Warn the value of f (or f(IterationControl.expose(m)) if f is a function). Here m is the object being iterated.

Can be suppressed by setting the global verbosity level sufficiently low.

See also Info, Error.

source
IterationControl.ErrorType
Error(predicate; f="", exception=nothing))

An iteration control, as in, Error(m -> isnan(m.bias), f="Bias overflow!").

If predicate(m) is true, then log at the Error level the value of f (or f(IterationControl.expose(m)) if f is a function) and stop iteration at the end of the current control cycle. Here m is the object being iterated.

Specify exception=... to throw an immediate execption, without waiting to the end of the control cycle.

See also Info, Warn.

source
IterationControl.CallbackType
Callback(f=_->nothing, stop_if_true=false, stop_message=nothing, raw=false)

An iteration control, as in, Callback(m->put!(v, my_loss_function(m)).

Call f(IterationControl.expose(m)), where m is the object being iterated, unless raw=true, in which case call f(m) (guaranteed if expose has not been overloaded.) If stop_if_true is true, then trigger an early stop if the value returned by f is true, logging the stop_message if specified.

source
IterationControl.WithNumberDoType
WithNumberDo(f=n->@info("number: $n"), stop_if_true=false, stop_message=nothing)

An iteration control, as in, WithNumberDo(n->put!(my_channel, n)).

Call f(n + 1), where n is the number of complete control cycles. of the control (so, n = 1, 2, 3, ..., unless control is wrapped in a IterationControl.skip)`.

If stop_if_true is true, then trigger an early stop if the value returned by f is true, logging the stop_message if specified.

source
MLJIteration.WithIterationsDoType
WithIterationsDo(f=x->@info("iterations: $x"), stop_if_true=false, stop_message=nothing)

An iteration control, as in, WithIterationsDo(x->put!(my_channel, x)).

Call f(x), where x is the current number of model iterations (generally more than the number of control cycles). If stop_if_true is true, then trigger an early stop if the value returned by f is true, logging the stop_message if specified.

source
IterationControl.WithLossDoType
WithLossDo(f=x->@info("loss: $x"), stop_if_true=false, stop_message=nothing)

An iteration control, as in, WithLossDo(x->put!(my_losses, x)).

Call f(loss), where loss is current loss.

If stop_if_true is true, then trigger an early stop if the value returned by f is true, logging the stop_message if specified.

source
IterationControl.WithTrainingLossesDoType
WithTrainingLossesDo(f=v->@info("training: $v"), stop_if_true=false, stop_message=nothing)

An iteration control, as in, WithTrainingLossesDo(v->put!(my_losses, last(v)).

Call f(training_losses), where training_losses is the vector of most recent batch of training losses.

If stop_if_true is true, then trigger an early stop if the value returned by f is true, logging the stop_message if specified.

source
MLJIteration.WithEvaluationDoType
WithEvaluationDo(f=x->@info("evaluation: $x"), stop_if_true=false, stop_message=nothing)

An iteration control, as in, WithEvaluationDo(x->put!(my_channel, x)).

Call f(x), where x is the latest performance evaluation, as returned by evaluate!(train_mach, resampling=..., ...). Not valid if resampling=nothing. If stop_if_true is true, then trigger an early stop if the value returned by f is true, logging the stop_message if specified.

source
MLJIteration.WithFittedParamsDoType
WithFittedParamsDo(f=x->@info("fitted_params: $x"), stop_if_true=false, stop_message=nothing)

An iteration control, as in, WithFittedParamsDo(x->put!(my_channel, x)).

Call f(x), where x = fitted_params(mach) is the fitted parameters of the training machine, mach, in its current state. If stop_if_true is true, then trigger an early stop if the value returned by f is true, logging the stop_message if specified.

source
MLJIteration.WithReportDoType
WithReportDo(f=x->@info("report: $x"), stop_if_true=false, stop_message=nothing)

An iteration control, as in, WithReportDo(x->put!(my_channel, x)).

Call f(x), where x = report(mach) is the report associated with the training machine, mach, in its current state. If stop_if_true is true, then trigger an early stop if the value returned by f is true, logging the stop_message if specified.

source
MLJIteration.WithModelDoType
WithModelDo(f=x->@info("model: $x"), stop_if_true=false, stop_message=nothing)

An iteration control, as in, WithModelDo(x->put!(my_channel, x)).

Call f(x), where x is the model associated with the training machine; f may mutate x, as in f(x) = (x.learning_rate *= 0.9). If stop_if_true is true, then trigger an early stop if the value returned by f is true, logging the stop_message if specified.

source
MLJIteration.WithMachineDoType
WithMachineDo(f=x->@info("machine: $x"), stop_if_true=false, stop_message=nothing)

An iteration control, as in, WithMachineDo(x->put!(my_channel, x)).

Call f(x), where x is the training machine in its current state. If stop_if_true is true, then trigger an early stop if the value returned by f is true, logging the stop_message if specified.

source
MLJIteration.SaveType
Save(filename="machine.jls")

An iteration control, as in, Save("run3/machine.jls").

Save the current state of the machine being iterated to disk, using the provided filename, decorated with a number, as in "run3/machine42.jls". The default behaviour uses the Serialization module but this can be changed by setting the method=save_fn(::String, ::Any) argument where save_fn is any serialization method. For more on what is meant by "the machine being iterated", see IteratedModel.

source

Control wrappers

IterationControl.skipFunction
IterationControl.skip(control, predicate=1)

An iteration control wrapper.

If predicate is an integer, k: Apply control on every k calls to apply the wrapped control, starting with the kth call.

If predicate is a function: Apply control as usual when predicate(n + 1) is true but otherwise skip. Here n is the number of control cycles applied so far.

source
IterationControl.louderFunction
IterationControl.louder(control, by=1)

Wrap control to make in more (or less) verbose. The same as control, but as if the global verbosity were increased by the value by.

source
IterationControl.with_state_doFunction
IterationControl.with_state_do(control,
+                              f=x->@info "$(typeof(control)) state: $x")

Wrap control to give access to it's internal state. Acts exactly like control except that f is called on the internal state of control. If f is not specified, the control type and state are logged to Info at every update (useful for debugging new controls).

Warning. The internal state of a control is not yet considered part of the public interface and could change between in any pre 1.0 release of IterationControl.jl.

source
diff --git a/v0.20.3/correcting_class_imbalance/index.html b/v0.20.3/correcting_class_imbalance/index.html new file mode 100644 index 000000000..a18ce6a5d --- /dev/null +++ b/v0.20.3/correcting_class_imbalance/index.html @@ -0,0 +1,23 @@ + +Correcting Class Imbalance · MLJ

Correcting Class Imbalance

Oversampling and undersampling methods

Models providing oversampling or undersampling methods, to correct for class imbalance, are listed under Class Imbalance. In particular, several popular algorithms are provided by the Imbalance.jl package, which includes detailed documentation and tutorials.

Incorporating class imbalance in supervised learning pipelines

One or more oversampling/undersampling algorithms can be fused with an MLJ classifier using the BalancedModel wrapper. This creates a new classifier which can be treated like any other; resampling to correct for class imbalance, relevant only for training of the atomic classifier, is then carried out internally. If, for example, one applies cross-validation to the wrapped classifier (using evaluate!, say) then this means over/undersampling is then repeated for each training fold automatically.

Refer to the MLJBalancing.jl documentation for further details.

MLJBalancing.BalancedModelFunction
BalancedModel(; model=nothing, balancer1=balancer_model1, balancer2=balancer_model2, ...)
+BalancedModel(model;  balancer1=balancer_model1, balancer2=balancer_model2, ...)

Given a classification model, and one or more balancer models that all implement the MLJModelInterface, BalancedModel allows constructing a sequential pipeline that wraps an arbitrary number of balancing models and a classifier together in a sequential pipeline.

Operation

  • During training, data is first passed to balancer1 and the result is passed to balancer2 and so on, the result from the final balancer is then passed to the classifier for training.
  • During prediction, the balancers have no effect.

Arguments

  • model::Supervised: A classification model that implements the MLJModelInterface.
  • balancer1::Static=...: The first balancer model to pass the data to. This keyword argument can have any name.
  • balancer2::Static=...: The second balancer model to pass the data to. This keyword argument can have any name.
  • and so on for an arbitrary number of balancers.

Returns

  • An instance of type ProbabilisticBalancedModel or DeterministicBalancedModel, depending on the prediction type of model.

Example

using MLJ
+using Imbalance
+
+# generate data
+X, y = Imbalance.generate_imbalanced_data(1000, 5; class_probs=[0.2, 0.3, 0.5])
+
+# prepare classification and balancing models
+SMOTENC = @load SMOTENC pkg=Imbalance verbosity=0
+TomekUndersampler = @load TomekUndersampler pkg=Imbalance verbosity=0
+LogisticClassifier = @load LogisticClassifier pkg=MLJLinearModels verbosity=0
+
+oversampler = SMOTENC(k=5, ratios=1.0, rng=42)
+undersampler = TomekUndersampler(min_ratios=0.5, rng=42)
+logistic_model = LogisticClassifier()
+
+# wrap them in a BalancedModel
+balanced_model = BalancedModel(model=logistic_model, balancer1=oversampler, balancer2=undersampler)
+
+# now this behaves as a unified model that can be trained, validated, fine-tuned, etc.
+mach = machine(balanced_model, X, y)
+fit!(mach)
source
diff --git a/v0.20.3/evaluating_model_performance/index.html b/v0.20.3/evaluating_model_performance/index.html new file mode 100644 index 000000000..b19c0151a --- /dev/null +++ b/v0.20.3/evaluating_model_performance/index.html @@ -0,0 +1,153 @@ + +Evaluating Model Performance · MLJ

Evaluating Model Performance

MLJ allows quick evaluation of a supervised model's performance against a battery of selected losses or scores. For more on available performance measures, see Performance Measures.

In addition to hold-out and cross-validation, the user can specify an explicit list of train/test pairs of row indices for resampling, or define new resampling strategies.

For simultaneously evaluating multiple models, see Comparing models of different type and nested cross-validation.

For externally logging the outcomes of performance evaluation experiments, see Logging Workflows

Evaluating against a single measure

julia> using MLJ
julia> X = (a=rand(12), b=rand(12), c=rand(12));
julia> y = X.a + 2X.b + 0.05*rand(12);
julia> model = (@load RidgeRegressor pkg=MultivariateStats verbosity=0)()RidgeRegressor( + lambda = 1.0, + bias = true)
julia> cv=CV(nfolds=3)CV( + nfolds = 3, + shuffle = false, + rng = Random._GLOBAL_RNG())
julia> evaluate(model, X, y, resampling=cv, measure=l2, verbosity=0)PerformanceEvaluation object with these fields: + model, measure, operation, measurement, per_fold, + per_observation, fitted_params_per_fold, + report_per_fold, train_test_rows, resampling, repeats +Extract: +┌──────────┬───────────┬─────────────┬─────────┬────────────────────────┐ +│ measure │ operation │ measurement │ 1.96*SE │ per_fold │ +├──────────┼───────────┼─────────────┼─────────┼────────────────────────┤ +│ LPLoss( │ predict │ 0.194 │ 0.232 │ [0.383, 0.0673, 0.131] │ +│ p = 2) │ │ │ │ │ +└──────────┴───────────┴─────────────┴─────────┴────────────────────────┘

Alternatively, instead of applying evaluate to a model + data, one may call evaluate! on an existing machine wrapping the model in data:

julia> mach = machine(model, X, y)untrained Machine; caches model-specific representations of data
+  model: RidgeRegressor(lambda = 1.0, …)
+  args:
+    1:	Source @119 ⏎ Table{AbstractVector{Continuous}}
+    2:	Source @958 ⏎ AbstractVector{Continuous}
julia> evaluate!(mach, resampling=cv, measure=l2, verbosity=0)PerformanceEvaluation object with these fields: + model, measure, operation, measurement, per_fold, + per_observation, fitted_params_per_fold, + report_per_fold, train_test_rows, resampling, repeats +Extract: +┌──────────┬───────────┬─────────────┬─────────┬────────────────────────┐ +│ measure │ operation │ measurement │ 1.96*SE │ per_fold │ +├──────────┼───────────┼─────────────┼─────────┼────────────────────────┤ +│ LPLoss( │ predict │ 0.194 │ 0.232 │ [0.383, 0.0673, 0.131] │ +│ p = 2) │ │ │ │ │ +└──────────┴───────────┴─────────────┴─────────┴────────────────────────┘

(The latter call is a mutating call as the learned parameters stored in the machine potentially change. )

Multiple measures

Multiple measures are specified as a vector:

julia> evaluate!(
+           mach,
+           resampling=cv,
+           measures=[l1, rms, rmslp1],
+       	verbosity=0,
+       )PerformanceEvaluation object with these fields:
+  model, measure, operation, measurement, per_fold,
+  per_observation, fitted_params_per_fold,
+  report_per_fold, train_test_rows, resampling, repeats
+Extract:
+┌──────────────────────────────────────┬───────────┬─────────────┬─────────┬────
+│ measure                              │ operation │ measurement │ 1.96*SE │ p ⋯
+├──────────────────────────────────────┼───────────┼─────────────┼─────────┼────
+│ LPLoss(                              │ predict   │ 0.384       │ 0.267   │ [ ⋯
+│   p = 1)                             │           │             │         │   ⋯
+│ RootMeanSquaredError()               │ predict   │ 0.44        │ 0.257   │ [ ⋯
+│ RootMeanSquaredLogProportionalError( │ predict   │ 0.174       │ 0.0791  │ [ ⋯
+│   offset = 1)                        │           │             │         │   ⋯
+└──────────────────────────────────────┴───────────┴─────────────┴─────────┴────
+                                                                1 column omitted

Custom measures can also be provided.

Specifying weights

Per-observation weights can be passed to measures. If a measure does not support weights, the weights are ignored:

julia> holdout = Holdout(fraction_train=0.8)Holdout(
+  fraction_train = 0.8,
+  shuffle = false,
+  rng = Random._GLOBAL_RNG())
julia> weights = [1, 1, 2, 1, 1, 2, 3, 1, 1, 2, 3, 1];
julia> evaluate!( + mach, + resampling=CV(nfolds=3), + measure=[l2, rsquared], + weights=weights, + )┌ Warning: Sample weights ignored in evaluations of the following measures, as unsupported: +│ RSquared() +└ @ MLJBase ~/.julia/packages/MLJBase/eCnWm/src/resampling.jl:809 + Evaluating over 3 folds: 67%[================> ] ETA: 0:00:00 Evaluating over 3 folds: 100%[=========================] Time: 0:00:00 +PerformanceEvaluation object with these fields: + model, measure, operation, measurement, per_fold, + per_observation, fitted_params_per_fold, + report_per_fold, train_test_rows, resampling, repeats +Extract: +┌────────────┬───────────┬─────────────┬─────────┬───────────────────────┐ +│ measure │ operation │ measurement │ 1.96*SE │ per_fold │ +├────────────┼───────────┼─────────────┼─────────┼───────────────────────┤ +│ LPLoss( │ predict │ 0.278 │ 0.324 │ [0.546, 0.117, 0.17] │ +│ p = 2) │ │ │ │ │ +│ RSquared() │ predict │ 0.488 │ 0.153 │ [0.385, 0.473, 0.604] │ +└────────────┴───────────┴─────────────┴─────────┴───────────────────────┘

In classification problems, use class_weights=... to specify a class weight dictionary.

MLJBase.evaluate!Function
evaluate!(mach; resampling=CV(), measure=nothing, options...)

Estimate the performance of a machine mach wrapping a supervised model in data, using the specified resampling strategy (defaulting to 6-fold cross-validation) and measure, which can be a single measure or vector. Returns a PerformanceEvaluation object.

Available resampling strategies are CV, Holdout, StratifiedCV and TimeSeriesCV. If resampling is not an instance of one of these, then a vector of tuples of the form (train_rows, test_rows) is expected. For example, setting

resampling = [((1:100), (101:200)),
+               ((101:200), (1:100))]

gives two-fold cross-validation using the first 200 rows of data.

Any measure conforming to the StatisticalMeasuresBase.jl API can be provided, assuming it can consume multiple observations.

Although evaluate! is mutating, mach.model and mach.args are not mutated.

Additional keyword options

  • rows - vector of observation indices from which both train and test folds are constructed (default is all observations)

  • operation/operations=nothing - One of predict, predict_mean, predict_mode, predict_median, or predict_joint, or a vector of these of the same length as measure/measures. Automatically inferred if left unspecified. For example, predict_mode will be used for a Multiclass target, if model is a probabilistic predictor, but measure is expects literal (point) target predictions. Operations actually applied can be inspected from the operation field of the object returned.

  • weights - per-sample Real weights for measures that support them (not to be confused with weights used in training, such as the w in mach = machine(model, X, y, w)).

  • class_weights - dictionary of Real per-class weights for use with measures that support these, in classification problems (not to be confused with weights used in training, such as the w in mach = machine(model, X, y, w)).

  • repeats::Int=1: set to a higher value for repeated (Monte Carlo) resampling. For example, if repeats = 10, then resampling = CV(nfolds=5, shuffle=true), generates a total of 50 (train, test) pairs for evaluation and subsequent aggregation.

  • acceleration=CPU1(): acceleration/parallelization option; can be any instance of CPU1, (single-threaded computation), CPUThreads (multi-threaded computation) or CPUProcesses (multi-process computation); default is default_resource(). These types are owned by ComputationalResources.jl.

  • force=false: set to true to force cold-restart of each training event

  • verbosity::Int=1 logging level; can be negative

  • check_measure=true: whether to screen measures for possible incompatibility with the model. Will not catch all incompatibilities.

  • per_observation=true: whether to calculate estimates for individual observations; if false the per_observation field of the returned object is populated with missings. Setting to false may reduce compute time and allocations.

  • logger - a logger object (see MLJBase.log_evaluation)

See also evaluate, PerformanceEvaluation

source
MLJBase.PerformanceEvaluationType
PerformanceEvaluation

Type of object returned by evaluate (for models plus data) or evaluate! (for machines). Such objects encode estimates of the performance (generalization error) of a supervised model or outlier detection model.

When evaluate/evaluate! is called, a number of train/test pairs ("folds") of row indices are generated, according to the options provided, which are discussed in the evaluate! doc-string. Rows correspond to observations. The generated train/test pairs are recorded in the train_test_rows field of the PerformanceEvaluation struct, and the corresponding estimates, aggregated over all train/test pairs, are recorded in measurement, a vector with one entry for each measure (metric) recorded in measure.

When displayed, a PerformanceEvalution object includes a value under the heading 1.96*SE, derived from the standard error of the per_fold entries. This value is suitable for constructing a formal 95% confidence interval for the given measurement. Such intervals should be interpreted with caution. See, for example, Bates et al. (2021).

Fields

These fields are part of the public API of the PerformanceEvaluation struct.

  • model: model used to create the performance evaluation. In the case a tuning model, this is the best model found.

  • measure: vector of measures (metrics) used to evaluate performance

  • measurement: vector of measurements - one for each element of measure - aggregating the performance measurements over all train/test pairs (folds). The aggregation method applied for a given measure m is StatisticalMeasuresBase.external_aggregation_mode(m) (commonly Mean() or Sum())

  • operation (e.g., predict_mode): the operations applied for each measure to generate predictions to be evaluated. Possibilities are: predict, predict_mean, predict_mode, predict_median, or predict_joint.

  • per_fold: a vector of vectors of individual test fold evaluations (one vector per measure). Useful for obtaining a rough estimate of the variance of the performance estimate.

  • per_observation: a vector of vectors of vectors containing individual per-observation measurements: for an evaluation e, e.per_observation[m][f][i] is the measurement for the ith observation in the fth test fold, evaluated using the mth measure. Useful for some forms of hyper-parameter optimization. Note that an aggregregated measurement for some measure measure is repeated across all observations in a fold if StatisticalMeasures.can_report_unaggregated(measure) == true. If e has been computed with the per_observation=false option, then e_per_observation is a vector of missings.

  • fitted_params_per_fold: a vector containing fitted params(mach) for each machine mach trained during resampling - one machine per train/test pair. Use this to extract the learned parameters for each individual training event.

  • report_per_fold: a vector containing report(mach) for each machine mach training in resampling - one machine per train/test pair.

  • train_test_rows: a vector of tuples, each of the form (train, test), where train and test are vectors of row (observation) indices for training and evaluation respectively.

  • resampling: the resampling strategy used to generate the train/test pairs.

  • repeats: the number of times the resampling strategy was repeated.

source

User-specified train/test sets

Users can either provide an explicit list of train/test pairs of row indices for resampling, as in this example:

julia> fold1 = 1:6; fold2 = 7:12;
julia> evaluate!( + mach, + resampling = [(fold1, fold2), (fold2, fold1)], + measures=[l1, l2], + verbosity=0, + )PerformanceEvaluation object with these fields: + model, measure, operation, measurement, per_fold, + per_observation, fitted_params_per_fold, + report_per_fold, train_test_rows, resampling, repeats +Extract: +┌──────────┬───────────┬─────────────┬─────────┬────────────────┐ +│ measure │ operation │ measurement │ 1.96*SE │ per_fold │ +├──────────┼───────────┼─────────────┼─────────┼────────────────┤ +│ LPLoss( │ predict │ 0.401 │ 0.429 │ [0.246, 0.555] │ +│ p = 1) │ │ │ │ │ +│ LPLoss( │ predict │ 0.214 │ 0.35 │ [0.0875, 0.34] │ +│ p = 2) │ │ │ │ │ +└──────────┴───────────┴─────────────┴─────────┴────────────────┘

Or the user can define their own re-usable ResamplingStrategy objects, - see Custom resampling strategies below.

Built-in resampling strategies

MLJBase.HoldoutType
holdout = Holdout(; fraction_train=0.7,
+                     shuffle=nothing,
+                     rng=nothing)

Holdout resampling strategy, for use in evaluate!, evaluate and in tuning.

train_test_pairs(holdout, rows)

Returns the pair [(train, test)], where train and test are vectors such that rows=vcat(train, test) and length(train)/length(rows) is approximatey equal to fraction_train`.

Pre-shuffling of rows is controlled by rng and shuffle. If rng is an integer, then the Holdout keyword constructor resets it to MersenneTwister(rng). Otherwise some AbstractRNG object is expected.

If rng is left unspecified, rng is reset to Random.GLOBAL_RNG, in which case rows are only pre-shuffled if shuffle=true is specified.

source
MLJBase.CVType
cv = CV(; nfolds=6,  shuffle=nothing, rng=nothing)

Cross-validation resampling strategy, for use in evaluate!, evaluate and tuning.

train_test_pairs(cv, rows)

Returns an nfolds-length iterator of (train, test) pairs of vectors (row indices), where each train and test is a sub-vector of rows. The test vectors are mutually exclusive and exhaust rows. Each train vector is the complement of the corresponding test vector. With no row pre-shuffling, the order of rows is preserved, in the sense that rows coincides precisely with the concatenation of the test vectors, in the order they are generated. The first r test vectors have length n + 1, where n, r = divrem(length(rows), nfolds), and the remaining test vectors have length n.

Pre-shuffling of rows is controlled by rng and shuffle. If rng is an integer, then the CV keyword constructor resets it to MersenneTwister(rng). Otherwise some AbstractRNG object is expected.

If rng is left unspecified, rng is reset to Random.GLOBAL_RNG, in which case rows are only pre-shuffled if shuffle=true is explicitly specified.

source
MLJBase.StratifiedCVType
stratified_cv = StratifiedCV(; nfolds=6,
+                               shuffle=false,
+                               rng=Random.GLOBAL_RNG)

Stratified cross-validation resampling strategy, for use in evaluate!, evaluate and in tuning. Applies only to classification problems (OrderedFactor or Multiclass targets).

train_test_pairs(stratified_cv, rows, y)

Returns an nfolds-length iterator of (train, test) pairs of vectors (row indices) where each train and test is a sub-vector of rows. The test vectors are mutually exclusive and exhaust rows. Each train vector is the complement of the corresponding test vector.

Unlike regular cross-validation, the distribution of the levels of the target y corresponding to each train and test is constrained, as far as possible, to replicate that of y[rows] as a whole.

The stratified train_test_pairs algorithm is invariant to label renaming. For example, if you run replace!(y, 'a' => 'b', 'b' => 'a') and then re-run train_test_pairs, the returned (train, test) pairs will be the same.

Pre-shuffling of rows is controlled by rng and shuffle. If rng is an integer, then the StratifedCV keywod constructor resets it to MersenneTwister(rng). Otherwise some AbstractRNG object is expected.

If rng is left unspecified, rng is reset to Random.GLOBAL_RNG, in which case rows are only pre-shuffled if shuffle=true is explicitly specified.

source
MLJBase.TimeSeriesCVType
tscv = TimeSeriesCV(; nfolds=4)

Cross-validation resampling strategy, for use in evaluate!, evaluate and tuning, when observations are chronological and not expected to be independent.

train_test_pairs(tscv, rows)

Returns an nfolds-length iterator of (train, test) pairs of vectors (row indices), where each train and test is a sub-vector of rows. The rows are partitioned sequentially into nfolds + 1 approximately equal length partitions, where the first partition is the first train set, and the second partition is the first test set. The second train set consists of the first two partitions, and the second test set consists of the third partition, and so on for each fold.

The first partition (which is the first train set) has length n + r, where n, r = divrem(length(rows), nfolds + 1), and the remaining partitions (all of the test folds) have length n.

Examples

julia> MLJBase.train_test_pairs(TimeSeriesCV(nfolds=3), 1:10)
+3-element Vector{Tuple{UnitRange{Int64}, UnitRange{Int64}}}:
+ (1:4, 5:6)
+ (1:6, 7:8)
+ (1:8, 9:10)
+
+julia> model = (@load RidgeRegressor pkg=MultivariateStats verbosity=0)();
+
+julia> data = @load_sunspots;
+
+julia> X = (lag1 = data.sunspot_number[2:end-1],
+            lag2 = data.sunspot_number[1:end-2]);
+
+julia> y = data.sunspot_number[3:end];
+
+julia> tscv = TimeSeriesCV(nfolds=3);
+
+julia> evaluate(model, X, y, resampling=tscv, measure=rmse, verbosity=0)
+┌───────────────────────────┬───────────────┬────────────────────┐
+│ _.measure                 │ _.measurement │ _.per_fold         │
+├───────────────────────────┼───────────────┼────────────────────┤
+│ RootMeanSquaredError @753 │ 21.7          │ [25.4, 16.3, 22.4] │
+└───────────────────────────┴───────────────┴────────────────────┘
+_.per_observation = [missing]
+_.fitted_params_per_fold = [ … ]
+_.report_per_fold = [ … ]
+_.train_test_rows = [ … ]
source

Custom resampling strategies

To define a new resampling strategy, make relevant parameters of your strategy the fields of a new type MyResamplingStrategy <: MLJ.ResamplingStrategy, and implement one of the following methods:

MLJ.train_test_pairs(my_strategy::MyResamplingStrategy, rows)
+MLJ.train_test_pairs(my_strategy::MyResamplingStrategy, rows, y)
+MLJ.train_test_pairs(my_strategy::MyResamplingStrategy, rows, X, y)

Each method takes a vector of indices rows and returns a vector [(t1, e1), (t2, e2), ... (tk, ek)] of train/test pairs of row indices selected from rows. Here X, y are the input and target data (ignored in simple strategies, such as Holdout and CV).

Here is the code for the Holdout strategy as an example:

struct Holdout <: ResamplingStrategy
+    fraction_train::Float64
+    shuffle::Bool
+    rng::Union{Int,AbstractRNG}
+
+    function Holdout(fraction_train, shuffle, rng)
+        0 < fraction_train < 1 ||
+            error("`fraction_train` must be between 0 and 1.")
+        return new(fraction_train, shuffle, rng)
+    end
+end
+
+# Keyword Constructor
+function Holdout(; fraction_train::Float64=0.7, shuffle=nothing, rng=nothing)
+    if rng isa Integer
+        rng = MersenneTwister(rng)
+    end
+    if shuffle === nothing
+        shuffle = ifelse(rng===nothing, false, true)
+    end
+    if rng === nothing
+        rng = Random.GLOBAL_RNG
+    end
+    return Holdout(fraction_train, shuffle, rng)
+end
+
+function train_test_pairs(holdout::Holdout, rows)
+    train, test = partition(rows, holdout.fraction_train,
+                          shuffle=holdout.shuffle, rng=holdout.rng)
+    return [(train, test),]
+end
diff --git a/v0.20.3/frequently_asked_questions/index.html b/v0.20.3/frequently_asked_questions/index.html new file mode 100644 index 000000000..704f0ac6d --- /dev/null +++ b/v0.20.3/frequently_asked_questions/index.html @@ -0,0 +1,2 @@ + +FAQ · MLJ

Frequently Asked Questions

Julia already has a great machine learning toolbox, ScitkitLearn.jl. Why MLJ?

An alternative machine learning toolbox for Julia users is ScikitLearn.jl. Initially intended as a Julia wrapper for the popular python library scikit-learn, ML algorithms written in Julia can also implement the ScikitLearn.jl API. Meta-algorithms (systematic tuning, pipelining, etc) remain python wrapped code, however.

While ScikitLearn.jl provides the Julia user with access to a mature and large library of machine learning models, the scikit-learn API on which it is modeled, dating back to 2007, is not likely to evolve significantly in the future. MLJ enjoys (or will enjoy) several features that should make it an attractive alternative in the longer term:

  • One language. ScikitLearn.jl wraps Python code, which in turn wraps C code for performance-critical routines. A Julia machine learning algorithm that implements the MLJ model interface is 100% Julia. Writing code in Julia is almost as fast as Python and well-written Julia code runs almost as fast as C. Additionally, a single language design provides superior interoperability. For example, one can implement: (i) gradient-descent tuning of hyperparameters, using automatic differentiation libraries such as Flux.jl; and (ii) GPU performance boosts without major code refactoring, using CuArrays.jl.

  • Registry for model metadata. In ScikitLearn.jl the list of available models, as well as model metadata (whether a model handles categorical inputs, whether it can make probabilistic predictions, etc) must be gleaned from the documentation. In MLJ, this information is more structured and is accessible to MLJ via a searchable model registry (without the models needing to be loaded).

  • Flexible API for model composition. Pipelines in scikit-learn are more of an afterthought than an integral part of the original design. By contrast, MLJ's user-interaction API was predicated on the requirements of a flexible "learning network" API, one that allows models to be connected in essentially arbitrary ways (such as Wolpert model stacks). Networks can be built and tested in stages before being exported as first-class stand-alone models. Networks feature "smart" training (only necessary components are retrained after parameter changes) and will eventually be trainable using a DAG scheduler.

  • Clean probabilistic API. The scikit-learn API does not specify a universal standard for the form of probabilistic predictions. By fixing a probabilistic API along the lines of the skpro project, MLJ aims to improve support for Bayesian statistics and probabilistic graphical models.

  • Universal adoption of categorical data types. Python's scientific array library NumPy has no dedicated data type for representing categorical data (i.e., no type that tracks the pool of all possible classes). Generally, scikit-learn models deal with this by requiring data to be relabeled as integers. However, the naive user trains a model on relabeled categorical data only to discover that evaluation on a test set crashes their code because a categorical feature takes on a value not observed in training. MLJ mitigates such issues by insisting on the use of categorical data types, and by insisting that MLJ model implementations preserve the class pools. If, for example, a training target contains classes in the pool that do not appear in the training set, a probabilistic prediction will nevertheless predict a distribution whose support includes the missing class, but which is appropriately weighted with probability zero.

Finally, we note that a large number of ScikitLearn.jl models are now wrapped for use in MLJ.

diff --git a/v0.20.3/generating_synthetic_data.ipynb b/v0.20.3/generating_synthetic_data.ipynb new file mode 100644 index 000000000..37b62510d --- /dev/null +++ b/v0.20.3/generating_synthetic_data.ipynb @@ -0,0 +1,2716 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Generating Synthetic Data\n", + "\n", + "MLJ has a set of functions that generate random data sets, closely resembling functions of the same name in [scikit-learn](https://scikit-learn.org/stable/datasets/index.html#generated-datasets). They are great for testing machine learning models (e.g., testing user-defined composite models; see [Composing Models](@ref))" + ] + }, + { + "cell_type": "code", + "execution_count": 32, + "metadata": {}, + "outputs": [], + "source": [ + "using MLJ, VegaLite, DataFrames" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Generating Gaussian blobs\n", + "\n", + "```@docs\n", + "make_blobs\n", + "```" + ] + }, + { + "cell_type": "code", + "execution_count": 33, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "

3 rows × 4 columns

x1x2x3y
Float64Float64Float64Cat…
15.306017.335489.94462
25.147575.88138.840962
33.341189.3661712.15292
" + ], + "text/latex": [ + "\\begin{tabular}{r|cccc}\n", + "\t& x1 & x2 & x3 & y\\\\\n", + "\t\\hline\n", + "\t& Float64 & Float64 & Float64 & Cat…\\\\\n", + "\t\\hline\n", + "\t1 & 5.30601 & 7.33548 & 9.9446 & 2 \\\\\n", + "\t2 & 5.14757 & 5.8813 & 8.84096 & 2 \\\\\n", + "\t3 & 3.34118 & 9.36617 & 12.1529 & 2 \\\\\n", + "\\end{tabular}\n" + ], + "text/plain": [ + "3×4 DataFrame\n", + "│ Row │ x1 │ x2 │ x3 │ y │\n", + "│ │ \u001b[90mFloat64\u001b[39m │ \u001b[90mFloat64\u001b[39m │ \u001b[90mFloat64\u001b[39m │ \u001b[90mCat…\u001b[39m │\n", + "├─────┼─────────┼─────────┼─────────┼──────┤\n", + "│ 1 │ 5.30601 │ 7.33548 │ 9.9446 │ 2 │\n", + "│ 2 │ 5.14757 │ 5.8813 │ 8.84096 │ 2 │\n", + "│ 3 │ 3.34118 │ 9.36617 │ 12.1529 │ 2 │" + ] + }, + "execution_count": 33, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "X, y = make_blobs(100, 3; centers=2, cluster_std=[1.0, 3.0])\n", + "dfBlobs = DataFrame(X)\n", + "dfBlobs.y = y\n", + "first(dfBlobs, 3)" + ] + }, + { + "cell_type": "code", + "execution_count": 34, + "metadata": {}, + "outputs": [ + { + "data": { + "application/vnd.vegalite.v4+json": { + "data": { + "values": [ + { + "x1": 5.306005021959495, + "x2": 7.335482624189757, + "x3": 9.944601000458027, + "y": 2 + }, + { + "x1": 5.147568921504009, + "x2": 5.8813034532746995, + "x3": 8.840963053930292, + "y": 2 + }, + { + "x1": 3.3411808726853685, + "x2": 9.366172631347517, + "x3": 12.152884110801548, + "y": 2 + }, + { + "x1": 2.6932427492747086, + "x2": 3.8755332877752684, + "x3": 9.270813880679283, + "y": 2 + }, + { + "x1": 0.6770531437203839, + "x2": 10.264623631901205, + "x3": 5.085967793710564, + "y": 2 + }, + { + "x1": 5.968852193249184, + "x2": 8.357575341247632, + "x3": 8.872844466144551, + "y": 2 + }, + { + "x1": -4.39743258847105, + "x2": 0.22579121587189077, + "x3": 2.9775513794984425, + "y": 1 + }, + { + "x1": 4.09852883091066, + "x2": 7.612602535987916, + "x3": 8.060627087119165, + "y": 2 + }, + { + "x1": -7.324367023029809, + "x2": 0.0778041721398871, + "x3": 2.0349068187426385, + "y": 1 + }, + { + "x1": -8.745561356435438, + "x2": 2.5529094716793113, + "x3": 1.5204328242390648, + "y": 1 + }, + { + "x1": 7.616977847853697, + "x2": 9.82225276655119, + "x3": 7.172031495878239, + "y": 2 + }, + { + "x1": 8.071772326373031, + "x2": 8.983164044631483, + "x3": 8.557783745260094, + "y": 2 + }, + { + "x1": -7.046450150439305, + "x2": 0.24482682433383396, + "x3": 0.3932962560518285, + "y": 1 + }, + { + "x1": -4.705496488341051, + "x2": -0.45049666203100913, + "x3": 1.9262734168981595, + "y": 1 + }, + { + "x1": 11.595152560575603, + "x2": 9.228140189126826, + "x3": 9.288065928559037, + "y": 2 + }, + { + "x1": -6.069424960021441, + "x2": 1.2911725382085457, + "x3": 1.0907380932065402, + "y": 1 + }, + { + "x1": -5.7449277814355835, + "x2": 1.0472264031263927, + "x3": 0.16999694438193202, + "y": 1 + }, + { + "x1": 1.1113880031932513, + "x2": 11.133362944448933, + "x3": 9.214199993916857, + "y": 2 + }, + { + "x1": 6.159361484032356, + "x2": 3.3863641163542564, + "x3": 7.53031742921594, + "y": 2 + }, + { + "x1": -8.105345303776742, + "x2": 0.5952895660270243, + "x3": 1.7554179358559807, + "y": 1 + }, + { + "x1": -6.423032897347717, + "x2": 0.5814779750175166, + "x3": 3.085181299091594, + "y": 1 + }, + { + "x1": 8.372217288708146, + "x2": 6.63661376428365, + "x3": 10.84923190020915, + "y": 2 + }, + { + "x1": -5.348993383818792, + "x2": 0.3297705489333681, + "x3": 1.5587688704737914, + "y": 1 + }, + { + "x1": 2.085881445011414, + "x2": 7.9335745372738335, + "x3": 6.05406927966831, + "y": 2 + }, + { + "x1": -9.017987265056075, + "x2": 1.8509357716604753, + "x3": 0.0913476444073924, + "y": 1 + }, + { + "x1": 9.646253407877666, + "x2": 5.80000520980094, + "x3": 8.972265910935798, + "y": 2 + }, + { + "x1": 8.864702033702638, + "x2": 8.92532437360878, + "x3": 8.900877681863578, + "y": 2 + }, + { + "x1": 0.18156583554744632, + "x2": 13.549088006915632, + "x3": 7.285014638622502, + "y": 2 + }, + { + "x1": 1.1439631988552952, + "x2": 8.310479261886254, + "x3": 9.460835048564668, + "y": 2 + }, + { + "x1": -6.496162315707228, + "x2": 0.7798105520168815, + "x3": 1.6795365742316954, + "y": 1 + }, + { + "x1": -4.942995188595747, + "x2": 0.4505625884730579, + "x3": 1.2216877281296248, + "y": 1 + }, + { + "x1": 0.9927022823824796, + "x2": 11.965635610307858, + "x3": 6.59138407635763, + "y": 2 + }, + { + "x1": 4.037814391736782, + "x2": 5.694494099269116, + "x3": 9.530437252586479, + "y": 2 + }, + { + "x1": -7.022998297300603, + "x2": 0.052246945630883945, + "x3": 3.186839613706088, + "y": 1 + }, + { + "x1": -6.33290824566598, + "x2": 3.4766182520062405, + "x3": 2.525264364835934, + "y": 1 + }, + { + "x1": -5.885140734550758, + "x2": 1.2215416489037356, + "x3": 2.493864702782236, + "y": 1 + }, + { + "x1": -7.813223110136804, + "x2": 0.23525894794594449, + "x3": 2.3814224877045964, + "y": 1 + }, + { + "x1": 8.304706959809739, + "x2": 6.574644565329651, + "x3": 10.016530896276773, + "y": 2 + }, + { + "x1": -6.603423692500952, + "x2": 1.425112146628306, + "x3": 0.8244441447758177, + "y": 1 + }, + { + "x1": 5.519754886868661, + "x2": 6.480775124256417, + "x3": 6.503306958983235, + "y": 2 + }, + { + "x1": 7.514858472294074, + "x2": 9.001407841014714, + "x3": 9.811963528463925, + "y": 2 + }, + { + "x1": -6.123682614912434, + "x2": 1.227163884479407, + "x3": 3.5355119070179524, + "y": 1 + }, + { + "x1": -6.726198326624127, + "x2": 0.4092968630902092, + "x3": 2.3954114685515053, + "y": 1 + }, + { + "x1": 5.930194311170686, + "x2": 10.72008381949118, + "x3": 6.86470761326764, + "y": 2 + }, + { + "x1": 4.106202272427967, + "x2": 6.580252386026229, + "x3": 8.164325778675643, + "y": 2 + }, + { + "x1": -3.317973494422599, + "x2": 10.912676814784314, + "x3": 3.8953603521967715, + "y": 2 + }, + { + "x1": 6.909354062954444, + "x2": 5.0462303493528715, + "x3": 12.058630792439827, + "y": 2 + }, + { + "x1": 3.0718701931049344, + "x2": 7.367189629352266, + "x3": 7.31926454176149, + "y": 2 + }, + { + "x1": -5.984616236345019, + "x2": 1.521501511114606, + "x3": 2.2892600135996544, + "y": 1 + }, + { + "x1": 5.49440038119045, + "x2": 8.299688301816495, + "x3": 7.640181394759286, + "y": 2 + }, + { + "x1": 1.94809604268616, + "x2": 14.10842713282619, + "x3": 1.8126501801237964, + "y": 2 + }, + { + "x1": -8.018951433514307, + "x2": -0.14395932002221212, + "x3": 3.1714601355230707, + "y": 1 + }, + { + "x1": 7.445510067342759, + "x2": 2.048325157857418, + "x3": 12.350596266460862, + "y": 2 + }, + { + "x1": 6.7997073839834, + "x2": 6.248836131705375, + "x3": 6.062424512606552, + "y": 2 + }, + { + "x1": -5.01437330203226, + "x2": 0.8863392889119668, + "x3": 1.0448890951995615, + "y": 1 + }, + { + "x1": -5.690331552772256, + "x2": 0.4227513966241664, + "x3": 1.7938515639111308, + "y": 1 + }, + { + "x1": -6.416782685318507, + "x2": 0.4677530983954018, + "x3": 1.9298335856148408, + "y": 1 + }, + { + "x1": -6.724581625064847, + "x2": 1.071390768637628, + "x3": 0.010015938629739152, + "y": 1 + }, + { + "x1": -4.993266900110802, + "x2": 0.6802932861694558, + "x3": 0.7781437764257733, + "y": 1 + }, + { + "x1": 10.579278179278752, + "x2": 14.017196007306799, + "x3": 6.618059604222563, + "y": 2 + }, + { + "x1": -6.907622479405076, + "x2": 0.7024973864090679, + "x3": 1.437015733177828, + "y": 1 + }, + { + "x1": -7.006771533069767, + "x2": 0.06122210887553037, + "x3": 1.2912968442745845, + "y": 1 + }, + { + "x1": -7.417712978154991, + "x2": 1.1938244408633398, + "x3": 1.4153950234578692, + "y": 1 + }, + { + "x1": 6.461708647462787, + "x2": 6.362326553372559, + "x3": 3.6333771701021025, + "y": 2 + }, + { + "x1": 5.805648358564471, + "x2": 6.915095182581987, + "x3": 4.201497474748288, + "y": 2 + }, + { + "x1": -6.167624147587743, + "x2": 0.5810652676434077, + "x3": 2.194440109706654, + "y": 1 + }, + { + "x1": 1.0867390890964237, + "x2": 13.428551195047657, + "x3": 2.114490759258274, + "y": 2 + }, + { + "x1": 6.073832196564226, + "x2": 9.262124089761777, + "x3": 7.214356250800773, + "y": 2 + }, + { + "x1": -7.023691799257531, + "x2": 2.1259607415695045, + "x3": 3.066403827273544, + "y": 1 + }, + { + "x1": -6.066202061212535, + "x2": -0.3499305761941507, + "x3": 1.7616878518799626, + "y": 1 + }, + { + "x1": -6.9600298177887625, + "x2": 1.1603286676752858, + "x3": 2.2958061252417594, + "y": 1 + }, + { + "x1": -6.838167309401546, + "x2": 1.128604338975403, + "x3": 2.2180103336448695, + "y": 1 + }, + { + "x1": 9.046786149782797, + "x2": 10.234066855370795, + "x3": 7.287214057986282, + "y": 2 + }, + { + "x1": 3.757268865767819, + "x2": 8.367724450303331, + "x3": 6.872632413480283, + "y": 2 + }, + { + "x1": -2.7341183060205756, + "x2": 6.375070057367467, + "x3": 11.75140673036356, + "y": 2 + }, + { + "x1": -6.680099152743671, + "x2": 1.0233772778875447, + "x3": 2.0903190377434813, + "y": 1 + }, + { + "x1": 6.831981924130954, + "x2": 6.959582329903835, + "x3": 7.3768596542269425, + "y": 2 + }, + { + "x1": -6.509179841060516, + "x2": 0.43981444843331874, + "x3": 2.2104958957226026, + "y": 1 + }, + { + "x1": 0.11350607972926241, + "x2": 9.599540589097526, + "x3": 3.5284608613258026, + "y": 2 + }, + { + "x1": 2.6713741129457, + "x2": 12.39558366743411, + "x3": 6.579381228370869, + "y": 2 + }, + { + "x1": -5.525790421154029, + "x2": 1.6679081000809108, + "x3": 1.686566619576914, + "y": 1 + }, + { + "x1": 4.412278147541436, + "x2": 12.400023042361985, + "x3": 7.627176318873834, + "y": 2 + }, + { + "x1": -6.2940462712919265, + "x2": 0.7010010269441812, + "x3": 3.105554381298532, + "y": 1 + }, + { + "x1": -5.00586372428988, + "x2": 1.4289343309266256, + "x3": 2.034829163708235, + "y": 1 + }, + { + "x1": -7.005120953894531, + "x2": 0.3762386917608474, + "x3": 1.6388536125619892, + "y": 1 + }, + { + "x1": -5.640186654696778, + "x2": 0.1697165413221181, + "x3": 3.0447935484190976, + "y": 1 + }, + { + "x1": -7.926168775237008, + "x2": 0.28087436813656774, + "x3": 1.6883969652247877, + "y": 1 + }, + { + "x1": -5.9288954177643305, + "x2": 0.5069727367142964, + "x3": 0.9633394865473247, + "y": 1 + }, + { + "x1": 2.35487952738382, + "x2": 7.125062279061147, + "x3": 6.113245425203135, + "y": 2 + }, + { + "x1": -5.199101651306389, + "x2": 1.1615957466336222, + "x3": 2.2130749590766348, + "y": 1 + }, + { + "x1": -5.845188039061578, + "x2": 0.6062148079988804, + "x3": 2.4124295012622223, + "y": 1 + }, + { + "x1": 5.706468508158918, + "x2": 10.558914570024262, + "x3": 4.552052458812694, + "y": 2 + }, + { + "x1": 3.0919754182494783, + "x2": 9.204285616073179, + "x3": 9.058192770669272, + "y": 2 + }, + { + "x1": 8.02444912973628, + "x2": 8.24945263157183, + "x3": 4.699164177191914, + "y": 2 + }, + { + "x1": -6.446671601336954, + "x2": 2.489917331971359, + "x3": 2.3908297780791345, + "y": 1 + }, + { + "x1": 8.880477907180222, + "x2": 7.554181597572289, + "x3": 5.584436336204944, + "y": 2 + }, + { + "x1": -12.607510334560356, + "x2": 8.610727493054435, + "x3": 15.99470940274475, + "y": 1 + }, + { + "x1": 1.0250686299014449, + "x2": 2.924546519579291, + "x3": 7.228835230974251, + "y": 2 + }, + { + "x1": -5.647135589823031, + "x2": 0.24647446204594714, + "x3": 1.671781045300742, + "y": 1 + }, + { + "x1": -7.695274267358119, + "x2": -0.6810815053343753, + "x3": 1.1778108940061394, + "y": 1 + } + ] + }, + "encoding": { + "color": { + "field": "y", + "type": "nominal" + }, + "x": { + "field": "x1", + "type": "quantitative" + }, + "y": { + "field": "x2", + "type": "quantitative" + } + }, + "mark": "point" + }, + "image/png": "iVBORw0KGgoAAAANSUhEUgAAARwAAADyCAYAAACfzXqmAAAABmJLR0QA/wD/AP+gvaeTAAAgAElEQVR4nOy9d3xcV5n//z733pmRRqNeLcmWLDe597g7nZg4BUgCMTiEQJZdWAjlu8DyW1jMAgkJm11YegkQCCVAeiWx01ziXuQmW1bvvbeZe+/5/XHlsY0lN2lmJPu8X6+8rLntPHdkf3LOc54ipJQShUKhCANapA1QKBRXDkpwFApF2FCCo1AowoYSHIVCETaMSBtwLl544QVmzpwZaTMUilFLXl5epE24KEa14Bw5coRbb701ImOXlJSMyl/maLULRq9to9UuGJ5tJSUlI2xN6FFLKoVCETaU4CgUVzj33HMPCxYswLZtANauXcsdd9wRkrGU4CgUVzjLly9n37597Nq1i6amJl599VWWLVsWkrGU4CgUVzgf+tCHcLvdvPTSS7z66qtIKbnrrrtCMtaodhorFIrQk5SUxI033siLL77IzJkzWbVqFTk5OSEZK6SCs3nzZpYvX46u6wBUVVVRWFgYPL9o0SISEhJCaYJCobgA1q1bxz333ENJSQkPPfRQyMYJieB0d3ezZcsWPvShD1FTU4PX6wXgscceY+/evWRkZACQk5OjBEehGAXcfvvteL1eenp6uPPOO0M2TkgE55lnnuH555/H7/efcfzEiRP84Ac/IDc3NxTDKhSKSyQmJobs7Gzmz59PampqyMYJidN4/fr1/OUvfyExMfGM48XFxXzqU59i4cKFfPaznz1LkBQKRfjp7u4mMzOTY8eO8YUvfCGkY4lQ1sPJysqiqKgouKR65JFH+OhHP0pKSgof+9jHWLRoEZ///OcB2LJlC1u3bj3rGaHylisUlwMjEUFtmiZPPvkkU6dOZfHixSNg1TmQISQzM1N2d3dLKaXs7e2VhYWFwXNPPPGE/PSnP33O+7/73e+G0rxzUlxcHLGxz8VotUvK0WvbaLVLyuHZNprfayjCFocjpeTGG2+koKCAtrY2Hn/8cd7znveEa3iFQjEKCOm2+Pr16zEMZ4jo6GiefPJJNmzYQF9fH+vWreP2228P5fAKhWKUEVLBefjhh8/4vGzZMp5++ulQDqlQKEYxKtJYoVAEqW7q5HBpI80dvQAkx0Uza2IamSm+EXm+EhxFxJD9nVhFryE7qhFRcWg5K9GSJ0farCsS07J5cVsRBSUNuAyNtIQYAMrq2th+pJo5eWncsnwKhj48t68SHEVEkF31+DduAH9X8JhV/AbGgnvRJ10fOcMigZTYVTux6g4ibBORmo+euwo0PWwmvPhuEQdLGrhmfg5LZ2ThNpyx/abF9iPVvL2vHAS8b+W0IZ/xt7/9jUOHDrFhw4Yhr1HZ4oqIYO79Hfi70DLm4Fr+APqM94EEc98fkL1tkTYvrJg7fkLg3R9hl76NVb4Vc/djBN56EGmbYRm/qrGDgmJHbFbPmRAUGwC3obN6zgSunpdDQXEDVY0dZ93f39/PNddcwz333ENvb+85x1KCo4gAErvpGCAwln0GLXsxxqw70LLmgx1AthRH2sCw4W4+hFWxHeGJxVj4MVxXfRLhS8NuOo5d9FpYbDhS1oTb0Fk2M3vIa5bNzMZt6BwpazrrnNvt5plnnrmgpE8lOIrwIyUCQAiEEKedEAOn7UhYFRH0Nkdc9em3oU+6Hi13FcbcjwBgNxwNiw3NHb2kJESf0z/jMjRSErxBZ/LpCCFITEwMZhScCyU4ivAjNEieAtIm8O6PsWsPYB19HrtmH2gGevKUSFsYNgQDmUXitH+KJ303YRVecd4rpJSI8192TpTTWDEiyM46PLXbscwiROpUtMSJ57zeteCj+DduwK7dj1273zkoBMacuyH6yilZYsblQus+rMIXwB2D0N2Yh/4GgJaaHxYbkuOiKatrI2DauIzB5yAB06apvYec9PhhjaUERzFsrOOvYBX8BW9XB2a1F4RAn3gtxqL7Tl3k73L+j+2JA0DEjsPz3kcwi/6ObK9ERCWi5axASx16F+RyxJ8yF81fil27H3PHz4LHRWIu2tQ1YbFhZm4q249Us+1wFVfPnTDoNdsOVRIwbWbmDq90hRIcxbCwm09g7v8TaBr96YuIzcjCKt2MVfIGWspkiMvE2vM4dmsp4AiNa+HHEGkzICoeY/YHI/wGEUYIXCu/gF22Gau2AGwTLW062qTrEborLCZkpcYyd3I67+wvR9qS5bOycbsGtsUDFtsOV7H5QAVzJ6eTlRo75HM++clPnncsJTiKYWHXHgAk+tT30uNbjJGXh0iahLnjZ5hlW5Ft5eDvQrh9IDRkZy3+zY/ivvFbiLjMSJs/OhAa2sSr0SZeHTETblk2BQG8U1DBtsNVpCU6gX8Nrd2Yls28yemsXTZ835oSHMXw6O8EQHhTgodEjDPtlg1HARstaxHGsn9FaDrmnt9iFb+BVbwJY/49kbBYMQi6JrhtxVQW52dyuLSRxvYeABbnj2PWxDTGJavUBsUoQCTmAmCXbELPvBXZHec4QAFhuJFmH1r2IoTm/FXTxy/BKn4D2VEdKZMV52Bcsm/ExGUw1La4YljouSvQEnKw2yqJ2/3f+F/6AnbNfoQnDpG7GgDZeCx4vTUQWyJ86ZEwVxFh1AxHMTw0F65r/h3z8NPYx7cgvNGIlKkYs+4E28Jf+iZWyZvYLcUIzYXdUgyajpazMtKWKyKAEhzF8HH7MOZ/lPb4lST/Q41d17LPYO75DbKtAgkItw993kfQUq6c4D7FKZTgKEKKlrkAd/ps7PYKsG1EwgSE4Ym0WYohsFuKsSt2IDtrABCxmegTliKShl+sHZTgKMKB7kJLmhRpKxTnwg4Q2PUYdvlW0N1o8eMBsBo2YR1/BS1nBa7FnwBteLFBSnAUCgXmrsewy7c5WftT3xuchUqzH/v4K5iHnsYEjCX/cta9gUCA++67jyNHjtDb28uXv/xl7rvvvrOuAyU4CsUVj918Aqt8K8asO9FnnNnYQBge9BnvQwLWoafQJt9wVlXGv/71r5imyd69e2lubmbGjBncdddd+Hxnb68rwVGMOmRzEVbFdvB3IxJz0SddB7r73Df1tmGdeA27rRIRnYSed/WI+R0ud+zKHY6wTLt5yGuMaTdjF76IXbnjLMHJzc3lS1/6EgA+nw8hBIFAYPDnjJzZCsXwsQpfxDz4FzjZELZ8K9aJjbiu/wbCM3gej91WSeCN/wKz79RzSt7EWPwJ9AimC4wVZFcdxGXBuXK3dDciLgvZWXfWqeXLlwNQUlLC/fffzwMPPHBWm++TKMFRjBpkZy3mob8hhI4+4zaELw2r6DXslhKsgicxFt8/6H3mnt+A2YeevRht4mpniXD0ecy9v0PLWhTmtxiDyAuphuMURhNDFMT5n//5H5588kkeffRRVq4cOsZKCY5i1CCbjoNtoU1cjT7z/QCI5Mn4X/437IYjg99kB6C1FKG7MJZ+CjQX2rh5yOYT2PWHobUUOH8luisZEZuB1XAELP/QS1fLj+yoQUudftapF154gS1btrBlyxZcrnPvYqnUBsWoQdqW84M49f/BYIkGaUXAoisDffxSsPxYhS8NeY1Z+CJYfvTxS8469+yzz3LgwAGWLVvGokWLWLRoEfX19YM+R81wFCFEOmkNpZuR/Z2I+Cz06bejDeHM1VKmgBBY5VsQiTkIXypW4csD56YOPoTmgsSJyOYiAtt+hD7xamRrMXbDEWdrN3EidA/+l1/hIJInoeeuwjz8DFLaGPlrwYhyTpp9WIUvYR15Dj13FSL57Hiqxx577ILHUoKjCBnmviewTus8ILvqsWsP4L72PxCD1C0W8ePRp74X69jLjl/mJJ449DnrhhzHtejjBN74L+yavdg1e08+DWPBvQh3zEi9zmWNsejjjtgfeRb72EuIgcA/2V6JtALoE1djLBw8tuaixhn2ExSKQZBd9VgnXkcYHowln0LEj8cq+jtW0WuY+/6A64YNg95nzF2HljIVq2I70t+FljQRfep7h9yhAhDx2bjWPIJd/Dp2e5VTrjTv6vPWVVachmZgLP4ntMk3DKQ2OOVDtNQb0SYsHbHvUgmOIiTI1lKQEi1zPlrWQsARE7v4Dey2cqRtBmvk/CNa1sLgPReKiE5An3UX4etVeXmiJU4MqVArp7EiNLicnaHTu2jK/k6ktBBGFEIoabgSUTMcRUjQkieD24fdWEhg2w/REiZgVWxzZj3j5jLsBkeKMYma4ShCg8uL66p/BiMKu2on5qG/ITtqHMfw/PWRtk4RIdQMRxEytMx5uN/7PeyaPdDbhkgYj5a58FRnScUVhxIcRUgR0Qnok66PtBmKUYISHEXIsVuKsSt3Of2pEnPRJl4dtiZvitGFEhxFSLGOvYx54M/AQPZ36duIotec7G8VlHfFoZzGlzu9bcjOWrDDn4skO+uwDv4VNA1jxm0Yi+9HS8hBdtZiFfw57PYoIo+a4Vym2G2VmLt/hWwpAUB4YtHnrkPPXRU+G5qOIW0TPXcV+qy7ACcnyv/Kl4fO/lZc1qgZzuVIoAdzy6OO2HjiELHjkP2dmLt+iV1bED47LNP50zit5IE+0LHBGrwiXLiRo8SOKwU1w7kMsar3Inua0VLzcV39ZdBcA76UP2EVb0QbNycsdoiUySAEdtkWrMQ8hC8dq/D5gXNDZH+HAenvwTr8N6yyrWD2InwZTvHwQUovKEYWJTiXI121AGgZc4JtPbSshXDgT44/J0xoCTnok290EjZ3/fLUCbcPY+7Q2d8hRUoCW/8Xu3o3Qncj3DHIzloC7/4Il6arCoEhRgnO5UiM07fbbjjiFMbWdOy6g0D4e3ob89YjkqdgV+4Y2BafiJ6/FhEVH1Y7wFk+BTb/t+Ow1gxE8hRE6nS05MlYh5/GOvysEpwQowTnMkTLnA/RCdj1h/C//EXwxCFby0AI9LzrwmuMEOgTlqJPWBrecQfBKvgTdtkWhBCI5MngTcauK0BYfkBDdlSDtEEo12aoUN/sZYjwxOJa8QW0hPHInhZkaxnC7cWYfy9a1oJImxcZpI1d+g4YHkRKPlraDDw3P4KIikfW7AOzF6ITlNiEGDXDuUzRkvLQ3vMdZFcDMtCDFpd97jYgI4hsr8Qs+DOyqcgpap45D332ByOyjArS14E0+9HixoHuxm4pIfDWw8iuRuzmIqf/1fiRm4XZLSXOMrK/A5GQg5Z3reqpjhKcyxyB8KVfUAuQkUJ2NxJ489tIf8/AkV6s0newm0/gvvFb529oFyqi4hFuL7KvE2Phx7COvYTdVIRsOuZ0ishZiTHrjhEZyjqxEXPv7whGV7MFUfR33NdvgEiK7ihAzR8VI4p19EWkvwdt/FLc7/s57rX/i0jMRXbUYFe8GznDhECbfCMgMfc+jtOJyYL4bPS5d+Ne/SUYogLhxSB7mjH3/xEAPf8WjKs+iZY8GdndRGD/E8N+/lgnpIJzzz330Nd3qhtieXk5H/jAB1i6dCk/+clPQjm0IkLY7eUA6FNvQri9iJgU9LxrnHOtZRGzC8CY8X706beC0JFd9WB40affhnvlF0dsDLupCOwA+virMOZ8CD13FcbyB5xzDUdHbJyxSkgE55VXXmHNmjU88cQT2LYdPP7xj3+cu+++m40bN/LEE0+wbdu2UAyviCAiKgHgjJawsqPmjHMRQ9MxZn8Qzwd+gfu9j+D+wC9xXfXJYDnUC0OC2T/0aXsgcvl0f5nuAiEQtnmqhfEVSkh8OLNnz+Y73/kOe/fuDR5ra2vj+PHjfPCDHwRg3bp1vPTSS8G+xIrLAy1rEXb1Hsw9v8FuLEQEerCqdiM0Y/TskGkGInbcxd3j78I88GesinfB8iN8aRiz7kL7h+1+LXkyILArd2AlT0HEZWIdfwWkdFrjXOGlVUMiONnZ2WRnZ5/R9rOmpoZx4079kjMyMjhw4EAohldEED13JbK1FKvodezStwcOujEWfDTY62jsIfFv+yGy4QggwPAguxoIbP8xLt11RocJETsOffotWEdfOKO3lhOWoEqrhm2Xyuv10t7eHvzc09ODz+cLft6yZQtbt249676SkpKw2DcYkRz7XIw2u4TZi7tuN96+JmoqE+hPW4g2Ywp6ewnoHsyESdgyDsbo79LoKCO2bDe2J4Gu+Z/BcsfjqdqMt+R5zO2/p3N+4pk3xCzClRuFp3E/wt+J6cuiP/sa7IZuaDjbjtH2+wwlYROcrKwsOjs7aW9vJz4+nv3797Nw4an/M6xcuZKVK1eecc/DDz9MXt7gbWFDTUlJScTGPhejzS7ZUYP/zUehv4Oenh68Xi80b8e18otosz4cafOA4X9nVnEZpteLnreS5Pz5AMic8fjrNwEdpE6cePZSKS8PuC2kto1FoQrbtrjL5eKzn/0st956K1/60pd4++23ueOOkYl7UEQOc9cvob8DLX0WPZPf7zS7N/swd/w0IkW/QoHwpgAg2ysIxta0lTl+GW/KFe+XuRhCOsPZtm0b0dHRwc9f/epXufbaa2lsbGTDhg3O/w0VYxYZ6MVuKUa4vbhWfZH+skqMiROx2yuRHTXIzpqR8dvYAaxjr2CVvInsbUXEjsOYfvtZDttQoaVORXiTsZtPEHjtP8CXgV3n1BXSc9Smx8UQUsHJyck569jSpZFP4lOMEJbf2ebV3cEyGAiBcHmRgDT7RyTKObD7MayijQjD7cTQtFcR2P5jDNtEz115/gcMgV29B6v4Dad2UFwmWv5atKRJZ19oROFa/gCB7T/BbquEtkonKXXiavT8W4fxZhdha0sJsvkE6C70jNkwMOsaa6jUBsUlI6LiETEpyO4mzAN/RhfZWMePYbcUg+5CS5gwrOdLfw+BnT/H2vNrJ6kyaRLGtJsRCRMw9/0e8+BfL1lwThYkC37uqMaq2o1r1b8NWqBMJOXhXvMQdnMx9HciEiaEp9SHlJi7H8M6ueMHmJoLY95HQAtdD/BQoVIbFMPCWPAxQGAde4m4vf/rhPVL6WwBDzNvytz7G+zStwCBFpeFFp2AVfo2sr0a3D7obQF/18U/ONCDefCvIDSMhffhXvMwev4tgMTc97uh79NcaKn5aNmLw1ZXyCp5E6v0bYTbiz75BmcJJy2s/U+gd9WExYaRRM1wFMNCGzcX9w0bMI+9glVViJY5CWPyDYj0WcN6rvR3O9nWbh9a2nREQg7Gss8Q2LgBu3gTUtMRuvsio4Qd7LYKsE20tOnok5z6QMacD2GXvu2kPPi7HEELGiOxyrdiFf0duhsgJg0972roacFuLQeXF238VejZi8+wX+juYWfo27X7HPsWfvxUCVS3D6voNVwthcClLykjgRIcxbARSXm4lv0rHSUlpIzQlr3sqgMp0ZMnIwO9yM5azJ2/QPY2Y7dVoiVPRp907aXVrzlZJsLf5fighECa/Uiz32lD/A8zM6vo1WBCJgC9RfgLX0TEpCN8qQDYlduRU9egZczG3P+HgXQOgUifiWvBRy8+svkkfZ0AiJjU4CERk+b8GbiE2V2EUUsqxahE+NKdAuwdNU6ZUl+6U0qipRRhm4iM2RgL7r2kZ2vx2U5FxLZKzHd/iHX8Vcx3HgbLj5Yy7QzBkbbpLL8QGEv+Bff7fo5IzAXLRPq7MFZ8EWPhfQjDg3XkGQJvfgfZUYNw+0A3kPWHCLzzvdPKdVzk95DgbLxYhS8ie9uQHdVYJW8AYMdmX9IzI4ma4ShGJcLtQx+/BKtiO+a7/+e0upE2xGehT7wG1+ovX3r8i+bCteRTmNt+gFW1C6p2OcejkzAW33/GpbKjFqwAIn48es4K56AdQLijEHGZaL5URNZ8ZEcV5s5fgduLMf8ejHkfBrOfwOb/xm48hl21Az3v2os2VZ9xO3bVTqyqXY6tJ18hcSL+1LmX9v4RZEwLjmnZHChuoL6lC6/Hxay8VFLiVWzP5YK+4D4QGlbFu05NZlcMxrSbMebfM+xgOy1tBu41jzjP7mlGxGU6gvIPyykRFQeA7Gt3emnpLkCA6fTcEp5Y50LbQlp9CGIGds4EGFFo2VdhNx5Dtlddkp3Cm4Tr+v/EOvhXrMZjCMODljkffeb7kVUNl/r6EWPMCk5PX4Bfv7yfls5T9Xa2HqriluWTmTspvJ0JFKFBuL0YSz6FMf8e7K4GtNiMS3ISD0lUPPrUNee2ISoeLWkSdksx/k3fREufiWwpQdoBRF8bVv0h6G7CKt0Mugfh8WE3nUAfWArZzUXOc07zwVwsInYcxvIHBvnHqgQnbLy6s5iWzj7GJfuYPyWDxrZudh+r4+XtJ8jLTCQ2OkKlLBUjj9uHluQ7/3UhwljyLwS2PIpsK8dqK3cKsUcnQlQC5o6fBa9zzfwAVs1ezL2PY1XvBn+XU8DeFY2erdrPwBgWnNLaNgA+dO0M4mKcXYeO7n6OVbZQ2dDBjJyxGYl5uSK7G5HdjWi+tGFHycr6QwSOvoDsrEHzpqBNug49Z+UFLbNkTwtC05H+LszCl5z0i+hk9EnXog2xlS9iM3Df9CBWzX7obkT40tDSZmCVvoNsLQV3DFr2VWip+YhjL2EeegpZf8i5OToBY9E/jdnI4JFmTAqOlBLTkggh8Lj14HGP24l5MM3LI2nwckD6u5ye5tWnirHp2Ysd5+ylxNBU7yaw9QenPve2YTefgO4m9JnvH/q++oOYex5HdtUj/V3I9hpnt8rtRVKMXbUTY/5H0afcOPgDNNeZcTa9rU5fcpcX4csI7ibp09ai5a5GtpWB7kFLzI1c4fhRyJgUHCEEWamxlNS08uK2EyyZkUlTey9HyhoRQFZqXKRNVAxg7noMu3ovwh2DSMjBbi3DqtqFFDquZf968c/b/wcAjNkfRM9dhd1UiLnj55hHnkWbcqOzHf0P6D31BPb90nH6RidASzEEuiHQhfva/8DuqsM88GfMA39Cz11xXiGU9Yfwb/3+GaVGxdHncV37NURsBsITi0iffdHvdiUwZuNwblqch8vQOFzWyK9fPsDzW49jWjbLZmWTHBd9/gcoQo70d2PX7EG4onHd9F1c13wV900PIQyPE0Vs9p3/Iac/r78T2d2E8MQ5xdCjE9DGL0Wk5oO0ka3lg97nqd0OVgB90nW41zwMSZMQvjSIScPuqEKfdjMibTrYAWc37FzYAQI7fgZmP/qEpRgL7kVLm47sa8fc/auLep8rkTE5wwFITfDyqdsXsvVgFfWt3XijDObkpTMjV62VRw3djU7NGF8GItopoC68SRCTBu2VyO7GYPkK6e9xGsVp+pCPE64ohGaA2etc7/YCEtnb4pz3DO5Y1rrrnT+zFyM0F0LTISoOpHTa+0JQ/KTuPmeGu91ehexrRyRMwFjy6YGs8avpf+5TTp8rs181vDsHY1ZwABJ8UaxdNjnSZiiGwAnnF9gdVU46QsJ4ZFs5srMOoRkIXwZ2xXbMg39Bdjc6fpIJS9DnrkN4BlkWay5E2gzsugICb30HLWshsqnIieyNSYW4rEHtsKOSwV+N3XAULX0WWvIUrBOvIzvrEan5TspESwnCE3v+DPeBZZRweU85qTUDobud1AjLfyp1QnEWY1pwFKMcw4M+6Vqs4jcIbPw6wpuM7G4CaaNNvQm7/iCBHT9x8plcXrD6scq2ILsbcV3z/w2aJ2Usvp/AWw8h2yqw2iqcg544jKWfdmY/g9CfsQi6DmMdfR5ZdxDZ347sbQdpIesOYBlRoLswFn/yvA5ekZADmgu78RhW0WuI1GnYZVuQ/Z1B/41iaJTgKEKKMW896C6sE5uQXQ3OLGbqGozZd+J/8ztOKYt5H3EC8Hrb8L/5LScVoPEYWtr0s54nohNx3fQgdvUe6KgBbzJa1qKB5dXgWHE5GIv/CXPfE9itpQDoOcvQMmY5wXreZCc6+AK2roUrGmPu3U49nn2/P+2Edsm5XVcSSnAUoUV3YcxbjzHnbmRvK0QnBmcist3xnwRzjKIT0MbNwyp6zUkFGERwAIRmOLWTceJ77PKtEOhGJOWhpc8eNB5Hz1mBnr0Yu70ShOZsiWuXVjpCn/IeRGwGVvEm6G6GuHEY09Y6SZ2Kc6IERxEeNOOs8H4Rk4Jsr0K2FCPSZgAS2VISPHc+rLLNWHt+48TDnBwmfSauVf9vcDHR3YOXEL0EtIw5aBlnVwaU/m6sI88N9LACkTYDfcbtCHfMiIw71lGCo4gYes4KzIIn8W9+FC1jFrKzznEAe5Odre5zILsbHbEZqGssvClYZe9g1x/GPPwMxuwPhuktTsPyE9j0TWRn7aljbeXYdQdw3fBfavcKJTiKCKLnr3WEo/jNYCSyiM3AteRTCJcTS2VX78Eq2wJ9bYj4CejTb0HEpGI3HEFaAbQJSzGu+mfn3nFzCWz6JnbNfjhdcKSFXbMf2d2A8KUh0mcN6WAeDtaJjcjOWkRiLq6F9yGRmHt+i2wtwy7eiD5t7YiPOdZQgqOIIAJj4X3o+bc6PZ/csWhJE2FADMxDT2EdefbU5c0nsMq34Lp+A/i7nSdEJ5162sDPcuAcAD1NxO35HwKcqo4n4jJxrfq3YWVwD4Y9sBzU829BJOUhAGP6rQS2/RDZUjqiY41Vzhtp3NXVRU3NmcWaDx8+fNYxheJSETEpaJkL0FKmIAO9TsRvdxPW0edBMzAW3ofr+m84fagsP9b+3yMSnY4FVvlWZ7bT3Yh18EkA9ORTfprA9p+i9zQg4rMdZ298NrKjxmnUN9LvcTIloq8jeEye/Nmlot/hPDOchx56iK9//etYlsWNN97I008/jc/n44477uDuu+9mw4YNYTJTcbljV+3CPPAnJwBQaBCTAmYfWu6qYKFzLWEC/VW7sJuLcaVMRctaiF29h8BbDwWfIwwP+uy7ACcVwm46jm14cV+/AQwP0uwn8NIXsZuKoK8douJH7B20jNlYJW9iHvwL9LUCYBa97tg1iIP5SmTIGU5JSQlf+9rXuPfee/nGN77B9u3bWbduHVLKcNqnuAKQDUecZUd3oxM4J3Qngri1BPpPLcS8Va4AACAASURBVI+k2Q/SRugGCA3Xsn/FmP1BZzvam4I2fonjnD1ZsLyv3bnPExeM/hUna9kwUMVvUINsZEe1U6nv9OXZedCyF6NPut5pdXz0BcyjL4DZhz7p+jMyza9khpzhFBYWIqXkpz/9KW63m6VLl7J27VoeffTRcNqnuAIwj70ESIxZd6JPvw0Z6Caw6ZtYbeVYFVsR+36PiB+PVfqWk5uVOsOJtREu9Om3OomcgxGb4XSq7GrAqtyBlrkAWbsPu+Ew9LZg7n3cyYnKXxsM+pNtFQR2/hx5MopZc2Hkr0WfdccFvYux8GNoOSucMQAtbSZaypRhfkOXD0MKTt5Au49169bx1a9+lTVr1vDggw/y5S9/GZdreL12FIrTCQYATlzltAp2+9AmLMNuOIKw/FhFrwWvFd5kXBcY0Ss0A2PWHfDurzHf/ZEzVlcDsqMaEZeF3XQcmo5jlb6D+/pvIHxpBLZ+35lpxaQgopOwW4oxjzwL3kT0vOsuaFwtZYoSmSEYUnDy8/P5/ve/z7e+9S1WrFjBokWL+MpXvoKUkgcffDCcNiouc0RMKrKnGaupyIkgltIJBoxOQp91J9gB6OtAxGejTbz6opIj9ak3093UQUznAWRnDfS2IhJzcC39DCJpIlbxJuyK7QT2/BYj/xanKmHyZIxr/wOhGcGCX1bJ2xcsOIqhOafT+IEHHmDdunXExp5KSPv3f/93Pv/5z3PkyJGQG6e4MtByVmA3FmLt+Bl26TvQ14rdVgmeOPRJ1w6eOf6PBDsq/ANC4B+3BPeKddi1+wlsfhRt3Dz0ae91xk6eRH/VHmRrKbLLCdgTyVOCcToidSC9oqt+yKHt6t3YtQVO+5iUqei5q89ZZuNK5rxxOJs2beIHP/gBv/nNb8jPz6e0tJRPfOITrF69mgULFoTDRsVljp53DbK7AfvYK9h1BYAz6zGu+udzio30d2MV/AW78l2k2YeIHYc+686hHbQns8/tU6kQ2BZgg9ARsU55C7uuAOl/H8Ltxa7a4dwalznoI/1//yp28UZkoB90FyImBbvkLVzXfe2Sc7UuZ84rONOmTaOiooKFCxeyfv16/vCHPyCE4HOf+1w47FNcIRizP4ic/B4nANDlRSTkIM7Zl1tivvsj7JPFynWXE1/z7g8RK/8f2rh/aBIX6MGq2o3ddAy7/hB2YyHGzPdj1x8G20IbNxORPhORMAHZVoH/xc8hPLHONj0M2k7G3PkrrKMvOHlicZkgNGRXPVbZFrRjrw7tzL6COa/gzJ8/n9dff51Vq1bxi1/8Aq/Xy6ZNm1iyZEk47FOMJD1NTqeC9iqISsDIuxoxRKeCSCCiE4KVAc+HbD7hiE10Ep7rvw7RyVjHXsIseBLzyDO4Txcc23I6YDYVIWLSkG0V2FW78dfsQ0uZhvClYeTfinX0eaf0aHejU2jd7EN4YtFn3YmWfdVZNlhHnSho1/yPYKz4PHZjIYGNG7Abj2PVHVSCMwjnjTTevHkzixYtwrIsvvKVr+D1ernuuut4+umnw2GfYoSQbRX0v/IVrBMbsRsLsSu343/7YaxjL0fatEvCbqsEQM+c52xpC4E+5T3OyX/oculuOeyITWwGnvf9lKh1f3GKpXvinY6bcz5M4J1HsI48i1212wkYNE306bfivunBYODh6cjeVmSgBww3Im0maC609NkD3RskDJQ9VZzJeQWnurqa1atXc/DgQb773e9y+PBh1qxZQ0FBQTjsU4wQ5t7HwfKjTViK69r/wJj3EaeN7sG/InvbIm3eRSO8Tt6U3VbpVAwE7JOxM6flVwHoXQPb7rmrEfHjEWn5uK79OlpiDhgezP2/dxJBJ16NnrkA2V4JjUcwt/wP/pf/DevEprMNcPsc/5JlEjjyHHb5VuzKncH+3yJ9RojefGxz3iXVzTffzN133x38nJaWxlNPPUVdXV1IDVOMHNI2sVtKnbiUxZ90fCOp+diNR7Gr9yJbSxDRY2sDQCRPRUQnIpuL8G/8T0TsOGTtfgD0CcvOuFa6nOLqJ/0xp/8spQR/F1rSJPTclQTefBCRMBFJORIbGejH3Pc7tMQJiORTsTVCd6FNXI3sbkJW78bffBzZ3w2BHjRfKq55Hwn1VzAmOe8MJy5u8F2CjIyMETdGERqE0BCahpQ22GbwuDT9zg8hKNUQaoTbi7Hss04Rr9Yy7Apnp0rLWYE+/bYzrg0kTgPNwCp5i8D2HzvlQXf+HAAjdRoA0vBgV+0EJMbsD6CnT0dLykObdjNIG6ti+1k2uBZ9An3y9QhfBlimI0LpM3Df8TjoqvbNYIy9v2mKi0doiJRpyLoCzK3/iz7xauzWUmTDYTA8aEl5kbbwktBSpuBe8wh20zHo70IkTBhk+1qCtNDzrsU68Tr2acKh5yxHn3s3VvlWZMNR7LYKpL8Hu+4g0t+DljAePXUq9vGXkT2D+GSi4nHdsAG9/jCyo9bJes+YpbbDz4ESnCsE16L78G/cgN1wFLvhqHNQ0zEW3geDdKscM+iuIXuCy/YqzB0/I67mKJbXC9JCJE1Cz1mOSJ6Cluy0GNLnOEXR7eYiZGcdVledE2087yOYx14FQMRnD2GAcMa/kN2+nibs5mKn3U3q1EG7hF7uKMG5UvCm4FrzCHbpW9htFYjoRKc05xC9nMYK0go4fa7cMUFHMuCU+9z2f8jOWixPIiJt0kAGehmMXxIUGxgoih6XiX3sFcyjzzsBgpafwDv/jbRNJ+J58g3DstM8+FesYy8NBBoCLi+uRfcBacN67lhDCc4VhHB70afdzOUSdG8dewnz8DPB5nRa6jSMRfcjYjOwm44jO2vREnLonHQvKZOmYNcfJvD2d7HKNqPn33LGs7T0Wc5/c+7GOvBH7MbCgeOzMeZ/BDGMujlWxfZgMTF9/BKnBXL9Icydv0DP/2dgbC5pLwUlOIoxiV22GfPAn0EIp6Nnbxt24zECW/8X943fcnpgASJpElI4EqulTgUhkF1OC+LB2sloCePRrv6KM7OBEal9bFc66RGuhR9zkk8Bc/evsUrexGg8CFw5QbRKcBRjkpOxMa4ln0KbsGygY8J/YbeVYzccDS4V7YZDiNiFzs8V20FKtPjMQcXmdEa0yHqvU/3vZB/103/WAx2D3nK5ogRHMSaRXU4cWDBnSncj0qZDWzmysxZ98o1oKVOwm4qI3/Ed/EUZwZ0mbdrN4TU2PhtairGKN2EkjEcGerErtgFgeq+s8JLzxuEoFKMR4XP+odq1B5wDlh85sPsmYsc5O3DLHkDLXoywLWRPC8ITi7HgY+g5y8Nqq5G/FmF4sErfof/pT+J/7jPYzScQsePwp42tgMvhomY4ijGJPvl67J3FBHb8FFH4IvS2Ifs7EHGZaGlOWoGITsC1/AHa0m4iMSsVohLPu5QKBSJ2HK6rv4p54I9OZ1HD7dTkmbsO6oaoq3yZogRHMSbRcldh9HdgHX4mWH9YS5mKsfj+swpxSc11Vn5VuBHJk3Bd93WkbSI0HTgpfEpwQkZHRwdNTU3Bz5mZmURFRYXTBMVlhD5tLfrk9zitdd2+M+NwhoFsLsYs+DN2SzHCiELLWoQ++84Lqzx4HkLR8XMsEda3/+EPf8jGjRtJTXU6Hn7zm99k+vTp4TRBcbmhuxAJE0bscbKjGv9b33FKluIEFlolbyJby3Bd/59jMu9sNBHWb+/EiRP88pe/ZPLkyee/WKGIAObhZ8EKoE+6DmPeR5B97U7xrtZSrOo9TpF3xSUT1l2q4uJivv3tb3PNNdfwyCOPYNt2OIdXKM6LbCsHBkqK6m5ETCrahBUD5yoiadplQVhnOIsXL2b9+vVMnDiRD3/4wyQlJXH//fcDsGXLFrZu3XrWPSUlJeE0cdSMfS5Gq10wem27ULt8/QJXTw+NhbvwJzsJmTFlBbh7euhp76c/BO83Wr+zUCBkmHr3WpZFT09PsOXMH//4RzZv3sxPfzp0U/mHH36Yr3zlK+Ew7yxKSkqCzQBHE6PVLhi9tl2MXVbxJsw9v3XKdmRfBX3t2HUFCMOD66aHEDGpEbNtJO+NFGFbUvX09DBz5kwaGhqwLIvnnnuOFStWhGt4heKC0POucxreWX7sss1O2xqXF+OqT4642FyJhG1JFRsby49+9CPe97734XK5uO666/jwhz8cruEVigtDCIxF96FPvQm7uQjhinYCCa/A2jWhIKw+nNtuu43bbrvt/BcqFBFGxGWiD9H8TnHpqFwqhUIRNpTgKBSKsKEER6FQhA0lOAqFImyoxBDFqELaJvbxV538pZ4WtLhx6Pm3OlX9FGMeJTiKUYV14E9YRa8FP9ttldjbf4JhBdAnro6gZYqRQC2pFKOHvnasE6+D7sZ1zVdx3/kbjEWfAMA69NcIG6cYCZTgKEYNsr3SKXKeMg0tbQZCM9DzroGoeGRvG/RdWcWqLkfUkirClNW1U1LTiiYEeVmJTEgbfpGnMUtUAgCyu2GgMp7hiIy/G6G7kO4Ywl8gVDGSKMGJIM9uOU5BcX3w8zsFFVyVn8maJZMiaFXkELHjEHGZyI4aAhs3oCVPcnKZbBMxfmlIquXJ5mLs+oNgBxCp+Wjps0d8DMUplOBEiILiBgqK64lyGyyZnoktYcfRanYW1jAxM4Fp45MjbWL40XRcyz5LYOv3kW3lWAO1abSUKRgL7x3x4cyCJ532u8GCCc+jjV+Ca+m/RqTY+pWAEpwIUVLjNEd7z+I85k1OByDaY/DarhKKq1uvTMEBRHw27jUPYdcdRva2OF0YUqaNuADY9YewCl8E3e30FtfdWCdex67cgZU+w8kYV4w4SnAiRH/AaSUb7TnVYcA78LPftCJi06hBc6FlzgvpEHbdIQCM6behz7jdORg7DnP7j7FrDyrBCRFKcCJEZkosxypbeOdAOb5oF7Yt2Xa4KnhOEWICvc6fUae+ay1qwGFv9kbAoCsDJTgRYsmMbAqKG6ht7uKxl/YHj2ck+Vgw5XJq/yqx648gu+oQ0UloGbNAc53/thAjkvOg5A2sY68iYrPAcGMe/BsAWvKV6bQPB0pwIoTb0PjE2nlsLqigrK4dIQSTMhNZPisLQ79MwqP6O/BvftTpNjmA8KXjWvlFRIRrzWg5K9CK38RuKSbw5reDx0VMCvq0tRG07PJGCU4EiXIb3LhobNWkvRgCux5DtpQgfOloGbOxm4qQbeUE3v0h7psehAhG1QjNwHXNVzELX3RqFtsWInWa489xeSNm1+WOEhxFaLADyLoChO7Cdf03EJ5YsE38r3wZ2V6F7KiN+CwHw4Mx6w6YdUdk7biCuEzm7uFFSklDazcnqltp7eyLtDmjk/5upG06LXg9A45ZzYCBQuSyry2CxikihZrhXCQdPf08/c4xKupP5fXMyE3h9hXTcBkXp98V9e0UVbVg2ZLxaXHkT0hGXC4BZ9EJ4IlD9rZilb6NnrMcu7EQmosAgTaC7XkVYwclOBeBlJKn3y6koqGD2Gg36UkxVDZ0cqSsiWi3i7XLLryF8eu7S3j3cHXw8/Yj1UzKSmTddTPRtMtDdIzZd2Lu/jXmrl9h7vpV8Lief4vqgnCFogTnImjt6g+KzaffvxCPy6Cls4+fPLubgpJ6bl466YJmKGW1bbx7uBq3obN0RhYet8H2I9UUV7eys7CGpTOywvA2oUfPuxZ0D1bhi8jOOkRMMvqk69En3xhp0xQRQgnORdDW5fhrUhNj8Licry4pNoqYKDedPf109fqJ9XrO+5ySWsd/sWxmFlfPywEgOS6aP79xmJKatstGcAD0nOXoOcsjbYZilKAE5yJIjY8GoKapk+aOXpLjoimpbaOr10+U28AX7R7yXsuWHC5toKGthxNVrZiWjcd96uuPcuuASmtQXN4owbkIYr0eZk1M5VBpIz99bg8xUW66ev1IKVk+K3vI5VR3X4DfvnKA5g4nZL6z1099Szev7yohOS4aj9tg454yALJTVVrDqMO2sMrewW44itB0RPos9AnLVUb5JaAE5yK5ZflUoj0G+4rq6ezpJ8ptsHxWNitmjx/ynld2FNPc0UtGko9ZeanUNnfx2q4STtS08ofXDwWdxAm+KFae4zmKCGBbBN7+rrPDdpKyLdiVO3Gt+LwSnYtECc5F0NMX4FhlM75oN+9flU9WWiyxUa5zOoqllJTUtCKEYP2Ns/BGOXlEHT1+Ck7UkRQXhS/aQ05GPMtnZhHlVr+S0YRVvMkRG2+KEyRoB7AOPYVdsxe7crvqJnGRqL/dF0hxdStPvVNIn98MHstM8bH+xtnnFAlbQsC00QR4Bvw0ALFRLhJjo7lhUR4zclJCarvi0rEbjwFgzL0bffwS56CUmHt+g91YqATnIlGRxhdAn9/k2S3H6PObTBufzKo5E0iKjaKmqYu/7yw55726JkhP8mLZktd3ldLc0cuh0kaOV7UggMzkweNROnv97CuqY8fRaqoaO0LwVooLQeBUAxRikH8qwUqBigtFzXAugJqmTrr7AkxIj+dD180AYP6UdP7vqV2cqG457/03LZ7E438vYGdhDTsLa4LHV8weT4Iv6qzrD5Y08NK7J87YsZqRm8odq6eNwNsoLgaRMhWqdmEWPIkhLceBfORZALRU9fu4WJTgXAA9/c4yKs57ats71utB1zT6/CaWbaNrQ08Wx6fFcf/a+WwpqKC+tZtYr5t5k9OZnZcWvKa+tZutByupbOzgaHkzcV4PV+WPw+f1cLCkniNljYxLimFcTOjeU3E2+uQbsCq3I5uLCbz74+BxLWMOmoovumiU4FwA4waWPcerWjhR3UJ6oo/tR6qwbJvMFN85xeYkGUkx3HnNdAD8AYt3D1fxh42Hceka8b4o9hyrxbJt2rudAEJNQEqCl2vm5TAlO5GfPLObZzYfIzPRxYwmycrZ488Z96MYITQD97Vfwyp+E7vxKAgNLWM2eu4qIlleY6yiBOcCSI6LZuHUcew5XssfNx4OHhfADQsvrp5Nf8Dkly/up6XjVBnLioYOdE2was54pIT9J+qxbZvNBypYMCWDbYeqqG/rwW3oGMKi62gNh0sb+eRtC4hVohN6NAN9yo3oU1RKxnBRgnOBvHfJJFISoikobqCr1096oo9Vc8Yz/jyN6/ymRVtnHz6vG6/HxZaDVbR09DIu2cd183Pp6vPzvT9tp7vPz1v7y7FtSU+/SbTbINrjYtuhSt49XI2uCRZNG8e88VGUNNuU1Lbx5t4yblsxNUzfgEIxfJTgXCCaJlgyPYsl0y8sz0lKyRv7ytl+uArLlkgpiYl2U1HfQWtXH1EeJ37H5dLp7vdjWTYtHX1oGgQCNj19Adq7+2lo66a9y4/HraNpgs7eADddNZWfPrdH7V4pxhxKcELE5oOVbD1YiaYJUhO8HCptoKm8iX6/jUSy/VAVO49UEx1lAAKEwDA0UuKjaWzppi9gYdk2rZ19CCHwRbloaO2mrLqHNr8TPOgy9HMboVCMMlQcTojYXVgLwEffM5sbFk7EF+3BsiQSiQBM23aWWx19WKaFoWm4dZ3ungCWPBUxL6XjQ0pL8JKTkUBPv8nfd5USMC3yMhMj94IKxSWgBCcE9AfMYAb5hPR4qhs76fObuAwNAbgMjXivx+nOIEACui7wRrmJ9XqI8biI8hjomoaUkuQ4L919AbYdqqShvW/g2S5Wz1FV8xRjCyU4IcDjMoiJctHnN6ls6MBlaARMm/6BtIhZeancfcMMYr2eYB6WP2DR1N5Nc0cPNmAIDW+UC10TNLX1YOgaUW4DIQS6JpBILNuO4FsqFBePEpwQsWjaOAB+9/eD7D5WS1tnL6ZtY0tJeV0Hf9p4mK5eP/pADypLSgKmTXdfgD5/gMS4aOK8HlyGQXNnLwHLJj0xhhiPQZTHoD9gsbmgIpKvqFBcNMppPEJ09vRzpLyJrh4/qQkxLJ81nv6Axa7CWtq7+4nzeejuC2BLSWNbN0II5ICvRggRDCETAmKi3MT7PExMT+DN/WXYtk1tUxcNbT3YlkVKgo+6lm7KTyvkrlCMBZTgjABFVS089fZR/OapJc6WgxV8bM1crp43geb2Xp584wjxvijKalrp7beCeX9SSjQh0BBYSGwb2rv7CNRYVNZ3EuV2YdmS6CiD6RNSSfVBdZvF0Yomqho6I/TGCsWloZZUw+RkJrnftJmZm8r1CyeSlhhDU3svL+8oxuMyiPdF0dnrJz0hhh9+fg1piTEITQDS2YYSYEnbESHhHOr3W/T5TTRNYOgamibwmyYFpS1UNLQTMC36A+b5zFMoRhVqhjNMapu76O03mZAezx1X5wMwKzeVH/xtByU1rZwsYNDV46eyoYNvPb6Flo4ebMtGaM6ySgNMAUiJPlD9T9MdQWrp6EUIQZ/fpLe/Cdu20XQdy5L09AUi8coKxSWjBGeYnCzIFRPl4khZI5v2llFW1051Uye+KDdNbd109vhpbOuhs9ePLW0sWzpxNtL5UyIQUiIB2waP2yDabdDR3Y8tJQKB0MCl6czMSUDzeDlwopauPj9v7SsnKT6aiRnxF9QxQqGIJEpwhsm4ZB8C2He8ju1Hqmlq76HPb+I3bUyzjw2/eYcJGYn4LQspJbYcWEohEUJzBEdKTh6WtkRK6OjuPyVMAjQhMG2bgvJWJo93Ioz7+k1e2nGC2Gg3bkPnvUsmMXdyeuS+DIXiPCgfzjBJ8EWxbGY2je09lNe10+c3ifN6mJAWT2qil5bOPrYfqqLPb+Jxa7g0ga5rzqxFnGwPI3EUR4AAv98Mio3LcIL/3LqGz+tGAi5NoGkCXdeZk5fG9JwUAqbFS9tP0NTeE+FvRKEYmrAKTktLC5/73Oe48847efbZZ8M5dEi5fmEuSXFRICDW6+b6hRP59w8vI39CMv6AhYWNP2DR1WfSb1rY0h5YPkn8AQlCO7O0igBDF/ii3WSlxCIGZjeGLvB5DBo7ejEtSV5mAh+6dgZ3XTOdBVPHYVo2RVXnr0CoUESKsArOJz7xCXJzc/n2t7/NN77xDQoKCsI5fMgQQpCZHEuUSyc13sviaePQdY3DpU1OnI10rjGEhq4L5MDuuZTOcsrQBQkxHrLTYtE1DZeh4xmIKrYsC0N3nMvtXf109gXo6O7H0AS3LZ+K2+Usr5LinCZ9Xb3+SH0NCsV5CZsPp6urix07dvDUU0+haRr33nsvTz/9NHPmzAmXCSElwRdFn9/iWGUL3/rdZiSCnr4AuqYREDYCQVqSl/bufvr9FlgSKSTZqT5Wzc1h1ZzxVNZ38MOnd6HrGtmpcdQ2d9LRa2LbErehk5niY0KSB83lpa61i/L6dmbkptDdF2D/iToAxiWrRnqK0UvYBKeqqoqsrCy0gXKc2dnZvP766+EaPqRUNXZQ0dBBoi+K1u5+uvoCWJazGxXvi0ITgn7TJMEbTUdXP9EeF5oA07K5el4udw2UHo2N9pAa76Wpo5f2rj68UW6a2508qinZSdy/dh52bwsJqeP4xQv7OFjSwMGShqAd45J9TFctZxSjmLAJjtvtpqfnlEOzr6+P6Ojo4OctW7awdevWs+4rKTl3G5ZQcqFjv1vYSHd3NzfMTSM+xsPuE43UtPRR39aLLmx0tyBgCcrqWzF0QbRH0NNnIoBEt5+9Bwt5t7CJ6uZuLMsEKenudergJHhdZCZH84FlWdi9jn+mrbGW62cksrOoifq2PgxdIycthsVTfJSXlYbwGzk/kfx9nYvRaheMbttGmrAJTlZWFk1NTfT19REVFUVhYSGzZs0Knl+5ciUrV648456HH36YvLyLqxk8UpSUlFzw2Hsq+vF6/cydPpH8CSmsWQnHKpv51Yv76e7144t209HTQn/AImAKJ+5G05mXl87yBTP42fN76eqVeGNiiIkRxMUG8Acsls/MJndcArMmpmLozm7V69sKqGqH1s5eEmJjuXvxdGbkjo5ZzcV8Z+FktNoFw7NtLApV2ATH4/Gwfv161q9fz+rVq3nuuefYsmVLuIYPKakJXo6WQ0FxI5OzkgAoKG4g1utm9ZzxxHo9FNe2cri0kb5+i5hoF1flZ3LHNflsO1RFV6+fyVlJ3Hl1PkIInt1yjKPlTcTGeJh3WlzNzsIa3iiow+v1AtDdF+Bvbx/l5v7Jwex0hWI0E9bAv+9973s8/fTT1NXVsWnTJhITL4+KdQunZrCrsIbCiiYe+VMLUkq6+wIIAVPHJzN1fBLXzM8Z9N6G1m4A5k5OC+44zZ+SztHyJupbuoLXWbbNG3vLEMDtK6cyJSuJ41UtvLCtiI27S1gwJQNNU21LFKObsG6La5rGnXfeyWc+8xkyMjLCOXRIifV6uOc9c5iQHk+/aVFW305jew8gePLNI/z21QI6ewbfro4daK5X39IdPFbf6vi64mNOdeVsav//27vXmKjSO47jX2ZguO3AiuIuFgQ6gq6yu7QShLKgEifxhhpvkSBBTTQSieGFEtGoLzQREy+k8UVBI25DxARt0MSkthDQalLCmNhFaMJFZcFZUGAUQUBwpi+os1yGttvAOQP8P6+Yh2H45SHzm3MO55ynl4FBK7P07nxr+AIvDzciF3yB/+defBi00jFs2RkhnJVc2jBBvvTzZtfqb/j+3g9ogFl6T4Lm+vBj21uaX3Vx+1EdO40RY37uq+A5/L3WzKOnLbR2dqPRuFDfPLTu+JJQf/vzvDyGbpze2z/I4EcrrloNgx+t9Pz7vJtP3xfCmUnhTKD3/QM0tb7Fy0PHvqTf4KFzpe/DIL+/VcXzn97Q2z+Ap/vIYgj092FtjIG/Vj2n4aUFAFethsTfhhDypa/9eXpPHQGzP6Ox+T3X/vwPfh0wi0azhZ6+AQL9ffCWwhFTgBTOBOrq6QfA11uHh25oaj10rvh+5k5bZw9dPR/GFA7A0vAAvpo/h+bX77BarQTN9XG4jO/mhEX84U8WzO3dmNuHju/46T3Z9J0shieme36LIwAABhFJREFUBimcCTTH1wutRsPrN728+OkNIQGf88xsof1NL1qNC7N9Pcf9WS8PNxYG+f3H15/t48n274IZcPWlo+s9s308CQ+aPbT6gxBTgBTOBHLVavhdRCB/++FH/viXavtqDQAJ38yfkGLQalwIc5LzboT4paRwJtjyb+ejc9VQ+U8z3b0f0HvpWLb4V8Qu/t+WCBZiOpPCmWAajQtxXwcR93WQ/b9JQogh8m6YRFI2Qowk7wghhGKkcIQQipHCEUIoRgpHCKEYKRwhhGKkcIQQipHCEUIoRgpHCKEYKRwhhGKc+tIGf39/zp49q3YMIZzS4sWLnfbm8OOyCYdycnLUjuCQs+ay2Zw3m7PmstmcO9tkkF0qIYRipHCEEIqRwhFCKEYKZxxxcXFqR3DIWXOB82Zz1lzg3Nkmg4vNZrOpHUIIMTPIFo4QQjFOfR6OGmw2G9evXyclJcU+9vbtW4qKirBarSQnJ6u6RHFHRwe3bt2yP46OjiYyMlK1PADt7e3cuHEDnU5HSkoK3t7equb55OXLl9y9e9f+OCEhgUWLFqmYCLq6uqioqGDDhg32MbPZTHFxMXq9npSUFNzd3VVMOLlkC2eY2tpajh8/zvnz5+1jNpuNtWvX0tbWRnd3N0ajEavVqlrGqqoqbt++jcViwWKx0NfXp1oWgIGBARITE+nr66OlpYWNGzeqmme48vJySktL7XPV39+vap7KykoyMzMpKiqyj3V3d7Ny5Uq0Wi1Pnz4lNTVVxYSTT7Zwhrl//z4dHR0jxkwmE25ubpw8eRKAsrIyKisriY2NVSMi9fX1pKWlsX37dlV+/2ilpaWEh4dz6NAhAJYuXUp9fT1hYWEqJ4OGhgb2799PYmKi2lEAuHfvHj09PWg0P3/Ol5SUsGLFCjIyMgAwGAy8fv0af3//8V5mSpMtnGHS09PJysoaMVZTU0NExM9rgoeFhdHQ0KB0NLvGxkZycnKIiooiJiaGuro61bKA883PcA0NDRw5coSoqCiWL19OS0uLqnlOnDjB7t27R4zV1tayZMkS+2ODwcDz58+VjqaYGV04jx49wmg0YjQaMZvNDp8zevfJxcVFiWgj7Nq1C6PRSG5uLuvXr+fmzZuYTCYyMzPJzMxUPM9wzjA/49mxYwclJSWYTCa2bdvG0aNH1Y40xsePH51qzibbjC6cyMhI8vLyyMvLG3cTNjg4eMQnTlNTEwaDQamIAJw6dYq8vDx27tyJTqcjNDQUGDpg3NraqmiW0YKDg3n27Jn9sRrzMx69Xs+8efMA55grR0bPX3Nzs/3vOx3N6GM43t7e//Vq24SEBPbt28edO3cAePHiBcuWLVMinl1QUJD963PnzmEymUhKSuLMmTNs2bJF0SyjrVmzhsOHD1NWVsarV6+w2WyEh4ermumT7Oxs9u7dS1xcHDk5OarPlSObNm0iNjaWrVu3Ul1dTWho6LQ9fgNy4t8YnZ2dFBYWcvDgQftYbW0tFy5cwM3NjaysLFU/gSwWCxcvXqSpqYn4+Hj27Nkz4iCkGkwmE5cuXcLHx4fs7GwCAgJUzfNJa2srubm5tLW1sWrVqhGnOqilrq6Ox48fk5ycbB978OABly9fZu7cuRw7dgw/Pz8VE04uKRwhhGJm9DEcIYSypHCEEIqRwhFCKEYKRwihGCkcIYRipHDE/6Wmpobq6mq1Y4gpRgpH/GJ1dXWsXr2a06dPqx1FTDFSOGKMd+/eUVxczMOHDwF48uQJxcXFtLe3k5SUxMKFC1W/EFJMTTP60gbhmF6vp6CggIqKCkwmE+vWrSMkJITNmzeTn59Pb28vMTExascUU5AUjnAoPz+fiIgI4uPj6evro6CgAK1Wa79sQavVqpxQTEWySyUcCgwMJD09nc7OTlJTU53mgkwxtUnhCIfa2tq4cuUKXl5eFBYW0tjYqHYkMQ1I4QiHDhw4QE9PD+Xl5Wi1WtLS0lS9l7OYHuQYjhijpaWFBQsWcO3aNaKjo7l69SpVVVU0Njba71WckZExrW8UJSaH3J5CCKEY2aUSQihGCkcIoRgpHCGEYqRwhBCKkcIRQijmX3m+IVUWdKFtAAAAAElFTkSuQmCC", + "image/svg+xml": [ + "\n", + "\n", + "-10-50510x1051015x212y\n" + ], + "text/plain": [ + "@vlplot(\n", + " mark=\"point\",\n", + " encoding={\n", + " x={\n", + " field=\"x1\"\n", + " },\n", + " y={\n", + " field=\"x2\"\n", + " },\n", + " color={\n", + " field=\"y\",\n", + " type=\"nominal\"\n", + " }\n", + " },\n", + " data={\n", + " values=...\n", + " }\n", + ")" + ] + }, + "execution_count": 34, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "dfBlobs |> @vlplot(:point, x=:x1, y=:x2, color = :\"y:n\") " + ] + }, + { + "cell_type": "code", + "execution_count": 35, + "metadata": {}, + "outputs": [ + { + "data": { + "application/vnd.vegalite.v4+json": { + "data": { + "values": [ + { + "x1": 5.306005021959495, + "x2": 7.335482624189757, + "x3": 9.944601000458027, + "y": 2 + }, + { + "x1": 5.147568921504009, + "x2": 5.8813034532746995, + "x3": 8.840963053930292, + "y": 2 + }, + { + "x1": 3.3411808726853685, + "x2": 9.366172631347517, + "x3": 12.152884110801548, + "y": 2 + }, + { + "x1": 2.6932427492747086, + "x2": 3.8755332877752684, + "x3": 9.270813880679283, + "y": 2 + }, + { + "x1": 0.6770531437203839, + "x2": 10.264623631901205, + "x3": 5.085967793710564, + "y": 2 + }, + { + "x1": 5.968852193249184, + "x2": 8.357575341247632, + "x3": 8.872844466144551, + "y": 2 + }, + { + "x1": -4.39743258847105, + "x2": 0.22579121587189077, + "x3": 2.9775513794984425, + "y": 1 + }, + { + "x1": 4.09852883091066, + "x2": 7.612602535987916, + "x3": 8.060627087119165, + "y": 2 + }, + { + "x1": -7.324367023029809, + "x2": 0.0778041721398871, + "x3": 2.0349068187426385, + "y": 1 + }, + { + "x1": -8.745561356435438, + "x2": 2.5529094716793113, + "x3": 1.5204328242390648, + "y": 1 + }, + { + "x1": 7.616977847853697, + "x2": 9.82225276655119, + "x3": 7.172031495878239, + "y": 2 + }, + { + "x1": 8.071772326373031, + "x2": 8.983164044631483, + "x3": 8.557783745260094, + "y": 2 + }, + { + "x1": -7.046450150439305, + "x2": 0.24482682433383396, + "x3": 0.3932962560518285, + "y": 1 + }, + { + "x1": -4.705496488341051, + "x2": -0.45049666203100913, + "x3": 1.9262734168981595, + "y": 1 + }, + { + "x1": 11.595152560575603, + "x2": 9.228140189126826, + "x3": 9.288065928559037, + "y": 2 + }, + { + "x1": -6.069424960021441, + "x2": 1.2911725382085457, + "x3": 1.0907380932065402, + "y": 1 + }, + { + "x1": -5.7449277814355835, + "x2": 1.0472264031263927, + "x3": 0.16999694438193202, + "y": 1 + }, + { + "x1": 1.1113880031932513, + "x2": 11.133362944448933, + "x3": 9.214199993916857, + "y": 2 + }, + { + "x1": 6.159361484032356, + "x2": 3.3863641163542564, + "x3": 7.53031742921594, + "y": 2 + }, + { + "x1": -8.105345303776742, + "x2": 0.5952895660270243, + "x3": 1.7554179358559807, + "y": 1 + }, + { + "x1": -6.423032897347717, + "x2": 0.5814779750175166, + "x3": 3.085181299091594, + "y": 1 + }, + { + "x1": 8.372217288708146, + "x2": 6.63661376428365, + "x3": 10.84923190020915, + "y": 2 + }, + { + "x1": -5.348993383818792, + "x2": 0.3297705489333681, + "x3": 1.5587688704737914, + "y": 1 + }, + { + "x1": 2.085881445011414, + "x2": 7.9335745372738335, + "x3": 6.05406927966831, + "y": 2 + }, + { + "x1": -9.017987265056075, + "x2": 1.8509357716604753, + "x3": 0.0913476444073924, + "y": 1 + }, + { + "x1": 9.646253407877666, + "x2": 5.80000520980094, + "x3": 8.972265910935798, + "y": 2 + }, + { + "x1": 8.864702033702638, + "x2": 8.92532437360878, + "x3": 8.900877681863578, + "y": 2 + }, + { + "x1": 0.18156583554744632, + "x2": 13.549088006915632, + "x3": 7.285014638622502, + "y": 2 + }, + { + "x1": 1.1439631988552952, + "x2": 8.310479261886254, + "x3": 9.460835048564668, + "y": 2 + }, + { + "x1": -6.496162315707228, + "x2": 0.7798105520168815, + "x3": 1.6795365742316954, + "y": 1 + }, + { + "x1": -4.942995188595747, + "x2": 0.4505625884730579, + "x3": 1.2216877281296248, + "y": 1 + }, + { + "x1": 0.9927022823824796, + "x2": 11.965635610307858, + "x3": 6.59138407635763, + "y": 2 + }, + { + "x1": 4.037814391736782, + "x2": 5.694494099269116, + "x3": 9.530437252586479, + "y": 2 + }, + { + "x1": -7.022998297300603, + "x2": 0.052246945630883945, + "x3": 3.186839613706088, + "y": 1 + }, + { + "x1": -6.33290824566598, + "x2": 3.4766182520062405, + "x3": 2.525264364835934, + "y": 1 + }, + { + "x1": -5.885140734550758, + "x2": 1.2215416489037356, + "x3": 2.493864702782236, + "y": 1 + }, + { + "x1": -7.813223110136804, + "x2": 0.23525894794594449, + "x3": 2.3814224877045964, + "y": 1 + }, + { + "x1": 8.304706959809739, + "x2": 6.574644565329651, + "x3": 10.016530896276773, + "y": 2 + }, + { + "x1": -6.603423692500952, + "x2": 1.425112146628306, + "x3": 0.8244441447758177, + "y": 1 + }, + { + "x1": 5.519754886868661, + "x2": 6.480775124256417, + "x3": 6.503306958983235, + "y": 2 + }, + { + "x1": 7.514858472294074, + "x2": 9.001407841014714, + "x3": 9.811963528463925, + "y": 2 + }, + { + "x1": -6.123682614912434, + "x2": 1.227163884479407, + "x3": 3.5355119070179524, + "y": 1 + }, + { + "x1": -6.726198326624127, + "x2": 0.4092968630902092, + "x3": 2.3954114685515053, + "y": 1 + }, + { + "x1": 5.930194311170686, + "x2": 10.72008381949118, + "x3": 6.86470761326764, + "y": 2 + }, + { + "x1": 4.106202272427967, + "x2": 6.580252386026229, + "x3": 8.164325778675643, + "y": 2 + }, + { + "x1": -3.317973494422599, + "x2": 10.912676814784314, + "x3": 3.8953603521967715, + "y": 2 + }, + { + "x1": 6.909354062954444, + "x2": 5.0462303493528715, + "x3": 12.058630792439827, + "y": 2 + }, + { + "x1": 3.0718701931049344, + "x2": 7.367189629352266, + "x3": 7.31926454176149, + "y": 2 + }, + { + "x1": -5.984616236345019, + "x2": 1.521501511114606, + "x3": 2.2892600135996544, + "y": 1 + }, + { + "x1": 5.49440038119045, + "x2": 8.299688301816495, + "x3": 7.640181394759286, + "y": 2 + }, + { + "x1": 1.94809604268616, + "x2": 14.10842713282619, + "x3": 1.8126501801237964, + "y": 2 + }, + { + "x1": -8.018951433514307, + "x2": -0.14395932002221212, + "x3": 3.1714601355230707, + "y": 1 + }, + { + "x1": 7.445510067342759, + "x2": 2.048325157857418, + "x3": 12.350596266460862, + "y": 2 + }, + { + "x1": 6.7997073839834, + "x2": 6.248836131705375, + "x3": 6.062424512606552, + "y": 2 + }, + { + "x1": -5.01437330203226, + "x2": 0.8863392889119668, + "x3": 1.0448890951995615, + "y": 1 + }, + { + "x1": -5.690331552772256, + "x2": 0.4227513966241664, + "x3": 1.7938515639111308, + "y": 1 + }, + { + "x1": -6.416782685318507, + "x2": 0.4677530983954018, + "x3": 1.9298335856148408, + "y": 1 + }, + { + "x1": -6.724581625064847, + "x2": 1.071390768637628, + "x3": 0.010015938629739152, + "y": 1 + }, + { + "x1": -4.993266900110802, + "x2": 0.6802932861694558, + "x3": 0.7781437764257733, + "y": 1 + }, + { + "x1": 10.579278179278752, + "x2": 14.017196007306799, + "x3": 6.618059604222563, + "y": 2 + }, + { + "x1": -6.907622479405076, + "x2": 0.7024973864090679, + "x3": 1.437015733177828, + "y": 1 + }, + { + "x1": -7.006771533069767, + "x2": 0.06122210887553037, + "x3": 1.2912968442745845, + "y": 1 + }, + { + "x1": -7.417712978154991, + "x2": 1.1938244408633398, + "x3": 1.4153950234578692, + "y": 1 + }, + { + "x1": 6.461708647462787, + "x2": 6.362326553372559, + "x3": 3.6333771701021025, + "y": 2 + }, + { + "x1": 5.805648358564471, + "x2": 6.915095182581987, + "x3": 4.201497474748288, + "y": 2 + }, + { + "x1": -6.167624147587743, + "x2": 0.5810652676434077, + "x3": 2.194440109706654, + "y": 1 + }, + { + "x1": 1.0867390890964237, + "x2": 13.428551195047657, + "x3": 2.114490759258274, + "y": 2 + }, + { + "x1": 6.073832196564226, + "x2": 9.262124089761777, + "x3": 7.214356250800773, + "y": 2 + }, + { + "x1": -7.023691799257531, + "x2": 2.1259607415695045, + "x3": 3.066403827273544, + "y": 1 + }, + { + "x1": -6.066202061212535, + "x2": -0.3499305761941507, + "x3": 1.7616878518799626, + "y": 1 + }, + { + "x1": -6.9600298177887625, + "x2": 1.1603286676752858, + "x3": 2.2958061252417594, + "y": 1 + }, + { + "x1": -6.838167309401546, + "x2": 1.128604338975403, + "x3": 2.2180103336448695, + "y": 1 + }, + { + "x1": 9.046786149782797, + "x2": 10.234066855370795, + "x3": 7.287214057986282, + "y": 2 + }, + { + "x1": 3.757268865767819, + "x2": 8.367724450303331, + "x3": 6.872632413480283, + "y": 2 + }, + { + "x1": -2.7341183060205756, + "x2": 6.375070057367467, + "x3": 11.75140673036356, + "y": 2 + }, + { + "x1": -6.680099152743671, + "x2": 1.0233772778875447, + "x3": 2.0903190377434813, + "y": 1 + }, + { + "x1": 6.831981924130954, + "x2": 6.959582329903835, + "x3": 7.3768596542269425, + "y": 2 + }, + { + "x1": -6.509179841060516, + "x2": 0.43981444843331874, + "x3": 2.2104958957226026, + "y": 1 + }, + { + "x1": 0.11350607972926241, + "x2": 9.599540589097526, + "x3": 3.5284608613258026, + "y": 2 + }, + { + "x1": 2.6713741129457, + "x2": 12.39558366743411, + "x3": 6.579381228370869, + "y": 2 + }, + { + "x1": -5.525790421154029, + "x2": 1.6679081000809108, + "x3": 1.686566619576914, + "y": 1 + }, + { + "x1": 4.412278147541436, + "x2": 12.400023042361985, + "x3": 7.627176318873834, + "y": 2 + }, + { + "x1": -6.2940462712919265, + "x2": 0.7010010269441812, + "x3": 3.105554381298532, + "y": 1 + }, + { + "x1": -5.00586372428988, + "x2": 1.4289343309266256, + "x3": 2.034829163708235, + "y": 1 + }, + { + "x1": -7.005120953894531, + "x2": 0.3762386917608474, + "x3": 1.6388536125619892, + "y": 1 + }, + { + "x1": -5.640186654696778, + "x2": 0.1697165413221181, + "x3": 3.0447935484190976, + "y": 1 + }, + { + "x1": -7.926168775237008, + "x2": 0.28087436813656774, + "x3": 1.6883969652247877, + "y": 1 + }, + { + "x1": -5.9288954177643305, + "x2": 0.5069727367142964, + "x3": 0.9633394865473247, + "y": 1 + }, + { + "x1": 2.35487952738382, + "x2": 7.125062279061147, + "x3": 6.113245425203135, + "y": 2 + }, + { + "x1": -5.199101651306389, + "x2": 1.1615957466336222, + "x3": 2.2130749590766348, + "y": 1 + }, + { + "x1": -5.845188039061578, + "x2": 0.6062148079988804, + "x3": 2.4124295012622223, + "y": 1 + }, + { + "x1": 5.706468508158918, + "x2": 10.558914570024262, + "x3": 4.552052458812694, + "y": 2 + }, + { + "x1": 3.0919754182494783, + "x2": 9.204285616073179, + "x3": 9.058192770669272, + "y": 2 + }, + { + "x1": 8.02444912973628, + "x2": 8.24945263157183, + "x3": 4.699164177191914, + "y": 2 + }, + { + "x1": -6.446671601336954, + "x2": 2.489917331971359, + "x3": 2.3908297780791345, + "y": 1 + }, + { + "x1": 8.880477907180222, + "x2": 7.554181597572289, + "x3": 5.584436336204944, + "y": 2 + }, + { + "x1": -12.607510334560356, + "x2": 8.610727493054435, + "x3": 15.99470940274475, + "y": 1 + }, + { + "x1": 1.0250686299014449, + "x2": 2.924546519579291, + "x3": 7.228835230974251, + "y": 2 + }, + { + "x1": -5.647135589823031, + "x2": 0.24647446204594714, + "x3": 1.671781045300742, + "y": 1 + }, + { + "x1": -7.695274267358119, + "x2": -0.6810815053343753, + "x3": 1.1778108940061394, + "y": 1 + } + ] + }, + "encoding": { + "color": { + "field": "y", + "type": "nominal" + }, + "x": { + "field": "x1", + "type": "quantitative" + }, + "y": { + "field": "x3", + "type": "quantitative" + } + }, + "mark": "point" + }, + "image/png": "iVBORw0KGgoAAAANSUhEUgAAARwAAAD3CAYAAADPAOsVAAAABmJLR0QA/wD/AP+gvaeTAAAgAElEQVR4nOzdeXxdZZ348c9zlnuTe7PvSZulSdt0oy10oS0pmyBaYFAExVFnwG3054wKvhRndxwchhnFGUfHUWFQgRFEFmkLRbbShdI93dO0SbPve3Jzt3PO8/vjhNvW7m3W5nm/Xrxozj33PM+9bb8951m+XyGllCiKoowCbaw7oCjK5KECjqIoo0YFHEVRRo0KOIqijBoVcBRFGTXGWHfgUkQsh12VzbR0BUiIM5lXnEVOmn+su6UoyhmIiTot3j8Y4X9fKac3EI4dE0Kwatl0Fs3MGcOeKYpyJhP2keq17VX0BsJMzUziI2UzWTFvKkjJa9uq6DshCCmKMn5M2Eeq2pZeAD5+w2wS4j0AdPeHOFTbQUN7H3P8mWPZPUVRTmNcB5zVq1czd+7cU45LKent6ydqS+rravGaOgC9Pd0MDg5S39BEnNM/2t1VlFFXXFw81l24IOM64Bw8eJDbb7/9tK/NOhbkaGM3e5uiLJudRXvvIG0DEr/fz5IFM0lLjL+ktqurq8flb+Z47ReM376N137BpfWturp6mHsz8sZ1wDmbW5aWUL+mnMN1nRyu64wdv3Z+wSUHG0VRRsaEDTjpSfF8+Y6rePdAA63dAfxxJlcUZ1Ganz7WXVOUCeUzn/kMBw4cYMeOHWiaxq233kpcXBzPP//8sLc1YQMOQJLfy4eWlox1NxRlQluxYgVPPfUU27dvp6SkhHXr1vHII4+MSFsTdlpcUZTh8YlPfAKPx8PatWtZt24dUkruvvvuEWlrQt/hKIpy6dLS0rj55ptZs2YNc+fOZeXKlRQWFo5IW+oOR1EUPvnJT1JeXs7q1au55557RqwdFXAUReGOO+7A5/MxODjIXXfdNWLtjFjAsW2bZcuWEQwGY8cee+wxSkpKYv9t27ZtpJpXFOUC+P1+pk6dysc+9jEyM0dulf6IjOH86le/4jvf+Q41NTWcuDd09+7dvPDCCyxYsGAkmlUU5SIEAgGmT59OS0sLv/71r0e0rRG5w/n4xz/Orl27yMk5edd2VVUV5eXlPPzww+zatWskmlYU5QJ5vV6+//3vs23bNpYuXTqibY1IwImPjyc1NRVNO/nyvb29NDc3U1xczD333MP69etHonlFUS6AYRh86lOfYsmSJSPf1oi3cIItW7bEfh0MBnnuuee4/vrrAdi0aRObN28+5T1juV9kvO5VGa/9gvHbt/HaLxjffRtuoxZwuru7uffee3nxxRfRNI2amhoKCgpir5eVlVFWVnbSex555JEx23Q3Xjf8jdd+wfjt23jtF6jNmyMmNTWV4uJiysrKmDJlCq2traxevXq0mlcUZRwY0YDT2Nh40s8//OEP6ejoIBKJkJeXN5JNK4pyERo7+jlwrJ3OPnc5S3pSPPOmZZGXkTAs1x/1rQ0ZGRmj3aSiKOdg2Q5r3j3C3uo2TEMjK8UtRlDT0sN7BxuZX5zFbStmYOiXNs+k9lIpisKaLUfYV93G9VcWsmzOFDyGm0UzYtm8d7CRd3bXgoCPlJWe8Rq/+93v2L9/P9/5znfOeI7a2qAok1xDex97q9xgc+38gliwAfAYOtfOL+C6hYXsrWqjob3vlPeHw2Guv/56PvOZz5y0s+B0VMBRlEnuYE0HHkNn+dypZzxn+dypeAydgzUdp7zm8Xh48cUXefjhh8/Zlgo4ijLJdfYFyUiJP+v4jGloZKT4YoPJJxJCkJqais/nO2dbKuAoigKIc54hpUSc+7SzUgFHUSa59KR42nsCRC3njOdELYeO3sFLLlCgAo6iTHJzizKJWg7vHmg44znv7q8najnMLbq01BVqWlxRJrkpmYksmJ7NhvJapCNZMW8qnqHikpGozbsHGti4p44F07OZkpl4xut88YtfPGdbKuAoisJty2cggA1763j3QANZqe7Cv7buAJbtsHB6Nrcun3HJ7aiAoygKuib4k2tmsmRWHgeOtdPeOwjAklm5zJuWRW76BN3aoCjK+JWbnjBsweV01KCxoiijRgUcRVFGjQo4iqKMGjWGoyhKjNNVhVO3FdnfBIBIzEMvWIZIG56MiSrgKIoCTpTo9sdxajeD7kFLzgfAbnsTu/JVtMJrMJd8DjTzkppRAUdRFKztj+PUvosx72NoMz+MMLwASCuMU/kq1v4XsADj6i+d8t5oNMp9993HwYMHCQaDfOtb3+K+++47bTsq4CjKJOd0HsWu3Ywx7y70OXec9JowvOhzPoIE7P3Po02/CS19+knnPPfcc1iWxa5du+js7GTOnDncfffdJCScOr2uBo0VZZJz6re6gaV01RnPMUpXIQwvTv3WU14rKirim9/8JgAJCQkIIYhGo6e/zvB0WVGUiUoOtEDSFNDPMj6jexBJU5D9Lae8tGLFCsAtW/P5z3+er371q6Smpp72MirgKMpkJ88nGw5I6SDOkBDn0Ucf5dlnn+UHP/jBKfXlTqQCjqJMciIxB7vtINgR0D2nP8mOIPua0DJnn/LS6tWr2bRpE5s2bcI0zz6LpcZwFGWS0/OXgR3Brlh7xnOsijVgR9Dzrz7ltZdeeok9e/awfPlyFi9ezOLFi2ltbT3tddQdjqKcLysEQjvzXcAEJdJL0ItWYh14ESkdjFm3ghHnvmiFsCvWYh/8PXrRSkR6ySnvf/zxx8+7LRVwFOUcnI4j2OVP4XRVAwItYwbGVX+OSCkY664NG2PxZ0EI7IMv4Rxeixha+Cd765F2FH3atRiLTr+25oLaueQrKMplTA60Et3wb2CFEGY80rFxOiqJvPMInlv+BRGXPNZdHB6agbHkC2jTbxra2uCW6dYyb0YrWIaWOm1YmlEBR5kQ5EAbdu0mCHYjEnPRpl2P8Jy7LMmlso++CVYIrfAajCWfR0hJdMt/4TTtxq5+B2POn4x4H0aTljpt2ILL6aiAo4x7TuMurPd+jLRPWEx2+BU8N/wdIjFnZNrsPobsrMapfw/sKHrRNQjN/euiF6zAadqN7KsfkbYvZyrgKOOatMJEt/8C7CjatOvQ0qdj121Bth3E2vE45g1/O9wtYm37BXbNRven3npksBv78Dq07CsAcLqOAiD8l1bBYDJSAUcZ12RPHUQG0NJKMJd8HgC98BrCL/0FsvMo0o4izrZC9gLZlX/ArtmI8PjQCpbjtFdiH1qNVf4kMtTrLu/vqATNdKeTlQuiAo4yvtkR9/9m3PFjuoHQDGQ0BI519iX5F8hp2gmAsfRLaHlXAhCxo9hH/oDTuAPhz0R4Ei67WarRogLOZGJHsGs2InsbIC4ZvWA5IiF7rHt1ViK1CDQTp/UgduU6tPQZ2Mc2IKNBRHI+wry0SpB/TIZ63XZP+F70aStx+hrQ85egT78FkVY87O1OFirgTBJysIvoW99FDnbGjtkHX8Zc9iW0qUvHsGdnJzx+jAX3YO1+Eqv86ePHNQNz8aWvCzmlvZQCZF8TVsUazCs/jQz2YNdsQmgGWuFKtOy5w97mZKICziRh7folcrAztqpUdtdgV693B16z5iI8/rHu4hnpMz6IlpSHVfU2MtiJljQVvXQVIilv2Nsy5nyUSOMunJqNhIcGjgG0tJLYI5Zy8VTAmRQkTttBQGCu/GYsuDgDbci2g8iuKkTO/LHt4jmI7HmY2fNGvp2kPDw3/j3WvueQXUeRehz61MXoc++MTYsrF099g5OBY7v/aXosdSSAMH1IOD4wqwDuuJF57TfHuhuXJbVbfDLQDERqITgW1s4nkF3V2NVvIZvL3SCUduqGPEUZCeoOZ5Iwr/wzIm89hH1sA/axDbHj+ryPIeJPn51NUYabCjiThEgrxnPL97AOrUb2NSLiU9CLrkWbsmisu6ZMIirgTCIiMRdz6RfHuhvKJKbGcBRFGTUq4CiKMmrUI5WiXCorhOyqRtoWWloReJPGukfjlgo4inIJnMYdWDueQIb73AO66VawPEtRuclMPVIpykWS/c1E3/spMtyHlj4dkT0PbAtr7zM4zeVj3b1xSd3hKMpFchp2gB1Bm3ZdLFePfeQPWLufxKndjJa7cIx7OP6oOxxFuUhysAMALb04dkwbWrXtBNrHpE/jnQo4inKRRNJUAJy6rWCFwbGxa9xV3NpQmRXlZOqRSlEukla4AirW4LQdJPL7L4NmIKNB0D3oMz881t0bl9QdjqJcJOHx47n+24jseUjpIK1QbKf5SOTquRyoOxxFuQQiMRfPdQ+6ydylfbxErnJaKuAoyjBwK0cMXzL3y5V6pFIUZdSM6B3Os88+y5133olpupHftm2ee+45Ghsb+chHPkJJiUr8pIyioayHw3rJpnLsw2uR/c3gS8eYfhNaYRkIMaztXC5G5A6nqamJxx9/nPvuu49o9Hh51gcffJDVq1eTlpbGhz70IRobG0eieUU5Tjp4GzYSXnM/4d/dS3jN/diVr4F0LvnSdv1Wopt+gNNegQz1IruqiW77Ofahl4eh45enEbnDKS8v58iRI+j68X9NIpEITz31FDU1NcTFxVFfX88zzzzDN77xjZHogqIAYB16GV/1y+DzuXc3gx1Y5U9BdBB97kfP/0JS4nRVQaAd/JloaSXY5U8BYCz8FFrBcmT7IaytP8M6+CLa9JsRHt8IfaqJa0QCzqpVq1i1ahVPPvlk7FhtbS25ubnExbmj+DNmzGDDhg1nuoSiXDrHwj74MlJomGXfQMtbiNO8h+jmH2Idehl99m2gnXugVwZ7sLb8CKfjSOyYSC7ACXQg/BnoMz/kHstfhjj6JrK9Atlbh8icNWIfbaIatVkqxzn7LeymTZvYvHnzKcerq6tHqkvnNJZtn8147ReMr77pgRaSBnqRCbnUhJKguhpIJFEmYgSaaDmwFdt/7vUyCXt/gdlTieNNIZpUhNF7DL3pIMZAE1YUGisPutPh0iGprQ49OEhfazd2//l9F+PpOxtpoxZw8vLyaGhoQEqJEIK6urqTBo3LysooKys76T2PPPIIxcXFf3ypUVFdXT1mbZ/NeO0XjMO+RbIIH/IRCPUwbWoOwuNDRoNE9lrg85Eycz7iXLlrIgOEdzQgUjIxV/0A4fEjIwEiax/ACbdimuCveRo97yqc9gocMYjILiZ17tUgzj1Eeinf2UQMVKMWcBITE1m5ciUPP/wwN910E7/+9a9Zs2bNaDWvXIbkQCvWwd8ju2vAm4iefzV6yQ3A0AyRJwGRNQdRs4PoG/+AllmK034YIgNoWXPOHWxwSyQDCF9GrICg8PgR/gy05AJEXCqypw6rp859LT4FY9n/O69gMxmNaMD5yle+EpsSB3jsscd46KGH+PGPf8xPf/pTpk2bNpLNK+OBlNjH3sE+8hpyoA0tMRt95ocveepY9jcTff3vkVY4dsxqO4jTVR1LFQFgLv0L7I4W5EAr9kAr4Ba6M5b+xfk1lJiL0AxkXyNO20G0rDk4rQegrwnMeDy3/QC7eS+yvxnNn+lWwVCrjc9oRAPO3/zN35z0c1paGo8++uhINqmMM3bFGqx9v4397PTU42z7OUYkEBtsvRjWnt8grTBa/jL02X8CAy1YOx7DOfYOcvpNiNQiAIQvjb6rvkpaQgQZaEckZKFllhK7CzoHoZtopauwD71MdP3DoHtilUqNOR8BIx49/+qL/hyTjdraoIwcJ4p14EUQGubyr6DlLsRp3EX0vf/G2vcc+vSbT7sQTw524dS9ixzsRCTmok+79pS7BqfLHb8wr/qMm0M4JR+t9QB21Zs4XVXoQwHHJdCyZgOzL+pjGHPvBCMO58hryFCvW9Nrxi2jl0Y0MuDWE+uoBMOLlrsQbfpNo9P2MFMBRxkxsq8Z7AgCsCvWYu39LSKlEBGfggx2IwdaEElTTnqP07IX690fnfSoZFesxrzh7xAJ2bFjwohD0osMD8TGYmQkEHttWGk6xuzbYfbt7t2N7hne65+FjAwQee1vIdgVO+a0HkBr3Qd5d45aP4aLCjjKyPEmIXtqccIDaEgQGrK/Gdl2CJFefEp1A2lHiW79GVhhtIJlaBmlOPVbcdorsLY/hnnD38bO1XLmYR9tJbr5P9BLbkD2teDUbwXdRGSUjtxnGsVgA2AffBmCXWgZMzEW3IMMD2DtfAKneS+mdyZMsO1BKuAoI0YOdiDtMCARngRE7nzsynVIO4Kwoghv4slv6K2HcB8ipRBz2VdAStB07Io12K0HwAqhz7oNLf9q9Cs+gdNxFNlTi1X+f+77NQPjqnsR/oxR/6wjRXYdBUBf8ElE+nQEoPc3Y+35DUZf3dh27iKogKOMGNlVjZZShLQjyOggsu49hCcBacRBQtap5w89Rr2/JcA+vBZr77NuFj1p43QexdnyY4xoAL34Rjw3/RNOwzacrmOIuCS0KYsQibmj+hnPW7AH68CLyK4jSM1Ez7vy/MaAdK/7/+hg7ND7j45Sn3jpMFTAUUaM8PjdQc7CazCmXYsMtCMdG8qfRsSnnnp+SiFoJk5bBdbhV7F3P4UMtKMlT0VkzECffQfWzv/F3vssevENoOloBcvRCpaftR96uBunqxqRmIsw492D0UHs6vXuLu/4VPSilQh/5iV/ZqenHnvfs+42CMODnncV+owPEl3/MDLUGzvP6qp2p9cLPnHW64msOdC6n+j2xzFKP4yMDGBXrgMhsFJnXnJ/R5sKOMqIEZmzQPcgm3ZiG15EQhZOzUYQ4rQlVITHhzH/E1jlT2HteAzZftgNWslTMZd8AZE1B/vwK+44UH/LOe9m5EAr1rafk1RXTtTnc6e4Z92Gnr+MyNvfg/eL1wFOxRqMFV+9pNIucqAN6+1/du/IwA1qVW9hH16D1OPRc+ZhLPhTZHQQa/svcNorMOP3Qsn0M15TL/0wsnU/TttBrPKnh74ogV56K1ZC0UX3dayogKOMGOFLx1j8OXd9TO3xfXJaZinGFXef9j36zFsQSblYh1Zj99RDfDLmB/4BkVbiPnKF+9wFg388/vPHnCjWu//prvsxE9DSinC6a7APvIhz+FWwQmjZc9ELluN0VGIf20B068/w3v6fFz0wbFesRkaD6AXL0K+6153Ofvc/sY++Cb4M9Hl3IVIK3HGY6TdjlT+N0X/2cRihGZjXfxu7fhuy8whoJlreVWgZM4b2hk0sKuAoI0ovXIGeORO7cRcyMoCWVoyWs+Csq4y1nPl4cuYTiQ4ie+qwdz2JkzUb2bIPGQmgZZYiPAlnbdfpOILTU49ImkJvyb2kz5iF07Sb6MYfYLfsQcueh1n2DdBNtGnX4fQ2ILuqcXrq0NLPfMdxNnJoe4NWusq9M/P40Yquxa5eD1YQTnikev/xSp7XFL5wFxdeBgsMVcBRRp4vA33GBy/4beayrxDZ8O9uHpquKsBNWn4+2xJkfwsAWkZp7I5Fy7kCiQQr5OYg1k/44z/0F18MrSI+5/WtME7VGzhdNQjTh15wNcQNrQfqb4XUacf74U1COg7RnU+g99ZDZAD76FuAIJo6glP445AKOMq4JZLy8H7433CaduEMtCESc9DzrgTt3H9s3x/fcdoPQZJ7Z+A07UIIDXzpyMgg1s5fohddi9Nx2F0bpJuxLRFnI8P9RN/4R+QJ1TXt6rdi+W/c8adDyPAATsN2t9+p03A6j2Lvf/79HmLM+xh2XOGFfSkTnAo4yvimm2j5V19wLlwtYwYipRDZU0vKtn8hciwf2eemtDUWfRan6i13QLfqreNNzf8kmOfO0mftfdadPUsrQS/9EDLQgbX/BWTbYbQpV+E07z5+Xd2DcdWfoU+7DqdlL7LjMOhxiNz5aCmFE3Ic5lKogKNcnjQDT9nXiW5/HHFsG7K3AWF40WbfgTH7dmTJB7Aq1iD7m9B8GWjF16NlzzuvS8uOwwAYV/9F7E5KDrRjV7+FljMf44q7cDqOIAyvmx5jaAmAljMfcuaPyMedKFTAUS5fvgzM6x6kN2cvydmpaEm5sccxkZSHufSLF3ddKd3/n5jzRhv6tXQQyfnoqrb4aV1QwOnt7aWvr4/8fPVlKuOD07gDp+495NCWCL30VkR8ysnnmAloKcP3Z1bLmIk90Iq1/TH0Wbe6dzfHNoAQiMyJtxhvNJ310fgnP/kJ06ZN47rrruOJJ54gNzeXgoICysrK6OvrO9tbFWXEWft+S3TD991Vycc2YO3/HZF1DyKHEm2NFH3+JyA+Bae9gujGH2Dt/jXYETexWMrkGgS+UGcMOMeOHeNrX/saxcXF9Pf389nPfpYVK1bw0EMPsX//fpVISxlTsr8Za9eTyI4KQIIRj+xvQ7buJzpUvmWkiLhkPLf8K/rcj6LlLUQvvAaz7AGMBZ8c0XYvB2d8pDp06BAej4fXX3+dxsZGCgoK+NGPfsScOXOoqanh0KFDo9lPRTmJ07TbLcUSl4Ix80PgS8Oq3oCsexen6m0oG9l6Z8LjdxNzKRfkjHc4JSUlhEIhvv3tb+Pz+aiqqqK0tJRnnnmGNWvWqHEcZUzJ3npwbETyFIwVX8VY+GnMpV9wXwz1jG3nlDM6Y8ApLS3l0Ucf5ec//zkNDQ0UFxej6zrf/OY3SU1N5f777x/NfirKybxJbkKvgTasfb/Fqd2Mc+AFgFh1BWX8Oess1de//nU+9alPkZycHDv2zjvvUFhYSF3dxEv+o1w+tJwrEEl5bsDZ+yxC97i7yHUPWvENp32PdCwYaHNnkxKyVSmXMXDOb/zNN9/kuuuuo6KiAgAhBDfffDO/+tWvRrxzinImWtZc9Bk3I1KnQbAH2VmJ09+GcKLYrfux9jxzcl7khu1E1t5PZN2DRF79FpFXv4VsOziGn2ByOuc6nNLSUurq6li0aBGf/vSnefrppxFC8LWvfW00+qcopycE5rK/REspwD7wEvZgJ5ovBZILEAjsw2sh1I1x9ZfRBxqwKp8Ax0Ik5iLtKHKglcjGH6AXrUT21LrjQZmlGHPugHPsRFcu3jkDzpVXXsnrr7/OypUr+fnPf47P5+PNN9/k6qsn/lZ5ZYIzvOjz7saufRdNaJhXfwmt8BpkbwPRtx/Crn0Xfe6deFu2gWOhl64amrqWRN/7KfauX2H11iESctzrdR/DadyJefNDsTSnyvA65yPVxo0bWbx4MbZt8+CDD+Lz+bjxxht54YUXRqN/ioIM9bqzUqdJHSGjQWSgA2HGoxVeA4BInorIdGtQyd4GtMEOwB33cQmE0JCRAEL3YF7/15gf+Ee09Ok4A21E3v4ukdf+msjqvyK68QduKWFlWJzzDqexsZFrr72WX/ziF+Tn5/PAAw/w5S9/mb1793LnnWodggI4Nnb12yQcWk+0PhEtY6abINzwxk6R0eDxfMLnK9hFdNtjOK373J91E33WbRhzPhpL4CWMODB9SCuIHGh1B4MdK7YzHF8aTnwGRBpxmvcMbdCU2PVbAdAKV6BlzXHPnXcX1ktfgq5qSHfLr8hgOZHWA3hu+kc35/L5kBKnfit27WaIDCCSC9Bn335ZVZO4WOcMOKtWreKee+6J/ZyVlcXzzz9PS0vLiHZMmSCkJLrpUZyWvZiDgzhBH07rfuy6LXhu/i529XqsijUQ6kV4fGjFN7oL5s5RcUA6FtbmH7nJt0wfwpeO7GvEPvCim/Dq/TLBQqDnL8Oufovom/+EyJ6H7KlF9jcjkqYgkvOJ5C2D/gPYletwmna7Aam7FrCxm/ch196PSMhFmF4IdiMTsvHc+Pdo/iysgy9iV72Fte+3GCu+Dp1H3H1byQWIpLzT9t3a8xvsylePH+g8il23Gc/N/zx+q0qMknMGnKSkpNMez8nJGfbOKBOP07ANp2UvIj6VQNFdJBdMw9r7W2RPLdG3v4cz9DgizHhkZBC7Yg1EAhiLP3v2C3fX4nRVIRKy8dz8XTB9OC17iW74d+yjb5xUl1xf+KfIYBdOczmybovbXlIe5oqvIjQDy5+HueKvsHb9KrbPSphepAOy/RDOYAdwANnbADjoBcvQMtxNmPq8u7Cr3kI27ye67sHjSbeEQCtYgbnkCyeVK5Z9TdhH1oFuYi66DxJzsQ+/itOwDWv3U5jXfnNYvveJSqWnUC6J0zlUqK30ViLGDLScYgyhE13/r9jHNiBSCzHLHkDLXYjsqSXy1j9jV7+NccVdp1TePOm6/c2AuzP7/aRYWs4VCM1ADrQhHQvxfqoJw4u58hvInjr3USo+1S0ad0JmQC3vKjy5C5EDrUgrSPTN76LZFtLwIkw/TqgLsJFSnvToIwc73JpYPdUIHHdzZmIusmUPTu1mLH8mxryPHT+/qwqkRMtfhla00m17yecIN253vyspz5rP+XKnAo5yaYb+UksrfPxPkx0BJwpOFBGfGiu9IlIK0dJn4LTux+mpR8uee8bLikT3DtrpqIyN/zgt+9xAk5B9UjCJvSelAJFScOa+Cs2dFm876E6Dl9yI54q7cJr3grSJ7vsdtB/Gqd+GteXH4M/Eqd3sFp7T49DSp2N+4B8AgeysIvLmd9zywicEnPcfFYUVOn7MCoGUbh7lSRxsQAUc5RJpmbOxK9bgVK4lLq0DW9a45VI0A+JSkKE+5GAXwpeGtMKxwdxzDaCK1CJEWjGyq5ro2gfAnxGriqBPKxuqT9WC8KWhFZZd0ICs8PjBjiJb92NnzEBLLUJkzkZUroO0IjDjY4PKAFpiLo7HHfyFoYCRUgBCDN0BHb9rEekzEbqJ3bDDLfiXlDuUMB3EeWYUvJypgKNcEi13PnrJjdhVbxFfvRarxX380bPnQFwKTt0WIq//PXrWbOzOKgh2I9JLEP5TS/2eSGgG5vK/wtrxOE7rfne2RzcRhSuxK19HnlDEjkMvY17zdTeF53lw+tvcwehoENl9DOnxI6REJGShF5ZhLvkCdtMuiA6ipRYhdS/Rt76L01KODLQjfBk4R193H52SC066axG+NPQrP4O161duhcz3jyfmYCz81AV8s5cnFXCUS2Ysug9tyiLC+94kMS0FkTETvYrgeWAAACAASURBVHAFMhLEigZwmvcen4ZOK8Zc/pfn9Wgh/G6KUII9yFAPJOYSfecRZLgPLXseWv7VyI7D2DWbsLb+DPO2/3AfW84mMoC983G0lHxkqA8QMNiNtEJgxGEs/hzEp6CX3Hi8H0h39qt1P5G1D7hlZ+yIO0M29yOnNKEX34CWPgO7/j0I9SPSCtEKV567b5OACjjKsNBy5jM4mIBRXBw7Jjw+zJXfxOmuQXZXu2M4acXEHkvOV3yKmzbUjiC7qhCGF6PsAfcvcPH1bhG77ho3P05ayVkv5XQcdYvpTV2KsfyvcBq2QU8t1r7fIVILEL6007xLYC7/S+y9v8Wp34K0QoikPIx5HztjaWCRPBUj+a7z/ogy0OGWtHEstIzSM065T3Qq4CgjJzJAtPz/cOq2gGNBXDLG3DtPunv4Y9IK41S8jF23FSIBRGohxry7EeklSDsKSNDMkwaNhRGHBLCj5+ySjAbcX8SlIDx+9OIb3OtWvY2MhkA6p91FLjx+jMX3weL73LubiywHfDr2kT9g7XnGHWh3W0OfdSvG/E8MWxvjhQo4ysiQ7n4lp2WvO5XtTYJQL9bOJxC6GZsy/uP3WFv/G6dx1/FDrQeIth/GvOHv3LGfhBw3vejuX6MXluF0VOC0H3bHd862EjjUi7XvOeyaTTjNe3C6jyGSp6IXLMc+/IpbbSGl5PxSVgxjsNH767Aqn3Z/XbAMdA923RbsijWI1CK3xO9lRAUcZUTI3jqclr0Qn4Lnpn+C+DSc2s1Et/4P1qHVeE4TcJyOwziNuxDxKRjX3I9IyMI+8AL2kdexDryAee03MRZ/lug7/4p99A3so2/E3qsv+NQZt04IaRHZ8O/urnDcgV0Z6CD62t9gZ84EIx40HWPhn47Ml3EWZscBkA7G7NvRr/i427+0EqydT+A07lABR1HOh9PbAICeNRfi3XERrXAFbH/MrbftWKeU7HW3G+Aumktzx4L0OR/FPvJ6bAOlljkLz80PYR1+JVbETi+5EfH+fqjTMNv3I3tqEclTMcu+AaaPyBv/iFOzARkeQC+8Bn32HbE2R5NuuY94IiE7diy2/SHUO+r9GWkq4CgjQvgzAdytDUPBxek65v46Pu209cFFnJtZ8sQyL7FfexOPn5c89YKK2OmBJgC0ghWx9TqesvuJDLQgkqZgXjN26XItXy707sOuettdp6N7YtPp571ZdAJRAUcZESJ1mpsCtK+RyLoHEYl5yHa30oc+7TTjN4DInAW6idO4k+imR91Hqrr3ANCmLLrovjieoS0U/U2xY7LPvQNjqAzvWAnnLEEM7MPpqiKy5uux48Ljxyj98Bj2bGSogKOMCKGbmCu+RvS9n7h7nAbaAIE27Tr0OSevXXF66nEq12HXbEAGu5F9DciuY4iUfNA9aLkLMeZ+9KL7Ek2bBW3vYNduRob6EHHJ7nQ4oE9dcsr5Tk8tss0NjlrWnLNvl7hUQ/l47H2/xW7ei5A2InOWO0MVf7op+olNBRxlxIikPDw3/zNOT62bniI5H+FLj70uHQtr28+wj/4B2Vnt7jfy+NHSStwpaqFhXv9ttKwz77k6H058BubSL7gDsS173+8d+vSb0P8o4bq15xl31gp5/LwRnqIW8akYS/9iUvxlnAyfURlLQkNLnXbal+yDv8epew8GexDxqYiUIkCC6UeLT3H3XUUCw9INLX8ZZtYcnLZDYIXRMmackpvGadzl5kLWPejTrgUpsWs2YFesQcsoRcs7/SI/5fypgKOMmfcfa0R6CdhRvLf9kOjW/8Fpr0BkzkT2NSL7ms5xlfMnvElnnWZ2WvYAYFzxcfSZt7jvSczGKv8/nObdKuAMA1WYRxk7wW4AN8cM4HQcccdtANle6Z4zNNs1KsL9AAj/8cc+4XNntWR4YPT6cRlTdzjK2EmaCp1HhnIfS6Lv/ifOQCuE+hApBWjJU857B/hwEKlF0LAdq+IVzKQpAEPjOaClnf6xULkwKuAoY8acdyeRd/4Np/UAcqAd2d/sbjGIS0JLn46x9IuIE9bfjDSt5CZE9Xpk5xEir34rdlwkZKGVfOC8r+N0VceK7ImsOWOyoHC8UgFHGTMiex7mygew9j3nZuNLzkfLmO4Wo0ufcdqsfiPaH48Pz43/gHXgeZzWA27e4ux56HPvPO+KE1b509hHXnOTcoGbwmLGLSoXzhAVcJQxpeUuxJO78LRbHcZEfIqbE+ciOE27sCvXIQxvrL65U/02duU6tKzZaHlXDWdPJ6Rx8DusKIyPYHOJnGZ3lkufd3dslsv2Z2LtfhKnqVwFHNQslaIMn6E1Q8J3fLuEGNo6ISNqlgtUwFGU4TM0vW9XvILsbcDpqceuWAu4e8sU9UiljJTooLtT3PC6icYnQT5ffcYHcWo2uhsxX/vr2HGRkI0x4+Yx7Nn4oQKOMuzsI3/A2vdbsMKAm/DKWPy5UV1TMxaE4cW88e+w97+AMzQtrmXPRZ/7UTDixrh348OoBpzHHnuMhx9+OPbzb37zG5YuXTqaXVBGmNOyF6v8KQC0zFL3TqenHmvLjzE/+L1YnpzLlfAmYSy6d6y7MW6NasDZvXs3L7zwAgsWLBjNZpVRZB/bCFJiLPxUrP539L3/xqnbgtOwHb101Rj3UBlLozpoXFVVRXl5OQ8//DC7du069xuUiSfQDgzVBB+iZc4CQAbaxqRLyvgxqgGnt7eX5uZmiouLueeee1i/fv1oNq+MgvfrKdl1W9zVtnYUu2G7+1rilLHs2vCyIzgN27ErXxvKsSPP+RYFhJRyTL6pX/7yl2zfvp2f/OQnAGzatInNmzefct7dd9892l1TLoE+0EDi7h8jpI1jJiCkhbBCOJ4k+hd/A8fwjXUXL5k+0ETCwV+ihbpjx6yEKQSu+DyOmTCqfSkunlj7tEZtDKe7u5t7772XF198EU3TqKmpoaDgeOrGsrIyysrKTnrPI488MmZfaHV19bj8zRyv/QK3b4Xzr8XJTMba/eRQAnQNbcqV6Ff+OelDqSfGol+x7yw6iH3sHWRfi1sHfNrKC0vlKR0i6/4HqYUR+Vegpc/Aad6D7G8mpf1NzLILS8h+Kb+f1dXVF/W+sTRqASc1NZXi4mLKysqYMmUKra2trF69erSaV0aRlrsAT+4CCPchhYHwjI+7GtnfTOSthyDcFztmHXoZ85qvnfeUvexrQvY3IxKy3eJ8mgFz7iCy+qvuo5UTBe3yX3N0sUZ1luqHP/whHR0dRCIR8vIuz9rJygm8SRdaRXxEWdt+DuE+tOx5aPlLkO2HsWvfJbr1Z3hv/4/zCxRDwUr4M47vZvckgMcPwR5kZDBW7kY51agv/MvIyBjtJhXFrVneVQ2GF6PsAYRuQvGNOL2NyJ5aZE894nzy1iTnAwLZUYnTvActcxZ2zUZksAcRn6KCzTmolcbKpCCcKEgHNNOtdR7oQIZ63F8D0o6c192Y8Caiz7oVu2IN0Y3fP+k1ff49I9L3y4kKOMrk4ElAJOYie+oI//bTSCsI4QCyvxktMRvtAmpPGfPuQsSnYle9iRzsRCRNwZh1G9ppalwpJ1MBR5k0jIWfIfzSFyA8AJqBMLwASGFgHfkDxh8V6DsjTUef8UH0GR8cwd5enlTAUSYNEZeIljwVJxzAyFsEidmI9GLsPc/gVL0F5xtwlIumAs7lLDKA03YIaYXR0opjq4AnKznQCkY8xrQbMJd/JXbcOfAiMtgNdgR0zxj28PKnAs5lymnYjrXjcWSscqVAn34jxpV/DmI8TVaPnvcrbcrOSmS4H+FNRLbuR1phtwSxCjYjTgWcy5AMtGNt+5l7Z5N9BSIuCadxB/bRNxFJ+ejTz7/kyeVEpOSjZc7Caa8gsvYBhD8D2dsIoMZjRolKMXoZcpr3usGmYDnmdd/CuPpLGFd/yX1tqLzu5CQwln0FbcoisELI3gbQDfQ5H0Ev/bBbOSLY5U6fKyNC3eFchuTQpkIteWrsmBiqJCmD3ad9z2Qh4lMwr/m6m9Q82INIzEZaUawd/4tdsxEcG4w4jNm3o8++HcbVWumJTwWccaC6uYfN++rp6A2S5PewZFYeV0zLRFzkWItIdjdJ2rWb0QqvAW8SduU697ULWG9yvpyuauzDr5LYUEG0uQR9+k1o2fOGvZ3hJDwJ7pYEwN7xU+yG7e4iwPg0CHZj7XsOicCYffsY9/TyogLOGDtU28Fz6w/Ffu4fDNPYfpie/hDXLri44KBPWYSTOg2n+xiRNV8HobkldHXTrWo5jJyWvUQ3fB+QGIODOI09OI07MRbfh15847C2NRJkoN3N1+NJwLzpnxAJWcjW/UTeeQSnch2ogDOs1BjOGHttm5ti4AOLpnH/3Uu589pZ6JrGhr11hCLWxV1UMzBXfgN92nXuv+RCR8uchXHdX8fufoZLdMcTgEQvvZX+RfdjLPgkCIG1+yl3mnmck31NAGgZ0xEJWYBbghhvEjLcd9LOcuXSqTucMTQYitI3GMYfZ3LNPHe8Zd60THZVNlPT0ktL5wBFuSkXd/G4ZIwln4cln3cHQcXw/9siQ70w2IGIT8FYcA9WdTV6cRlOw3aczqM4PXVo6dOHvd3h9H5Sd9lTh4wGEWa8G4TC/W49cW/iGPfw8qICzjAKhKI0dfQDMCUjEV/c8XQHtiM5cKyd9p4A/ngPc4syiPeaaJogHLUJRSziPO5GwoGge2cQ7x2mvCojEGwAN52DEEgr4uaBAZASGR10mx3aOjCeiaRctIwZOB1HiK77FiIpH6ezEpBoRStRg8bDSwWcYbKzsoXXt1cRsdwpVY+hc/PiaSwqzWUwFOWX6/bS0TuIIyWaEKzfXcOd180mOSGOitpOHnpyE3MKM7AldPQGSU2MIzNlfCSuOhPh8aGlFuN0VRHd+CheYypW6yvIviZEfCoicSKsbHanyq2tP8VpP4wM9oAQaIXXqN3fI0AFnGFQ39bHK+8dBaBkiltLurqph1e2VpGZ6mf7oSaONXczGLLweAxs28GyHP7lqXexbItg2KKpo4/K+i6S/B7y0hLxx3l48g/7yE5LYMXcKST5x+fdgrH0i0Te/h5O6358g9uwfT53WvnqL4Omj3X3zovwpWNe/7fI/mZksBMtacqFpR1VzpsKOMPgYE0HUkpWzi/ghivd+tLvlNfyzp46DhxrZ9eRFlq6AhTnpeI1daKWzYGadsJRhySfh9L8DLoGBukfCBOJ2nT0BTEM9zGotrWX8qMtfP7WK8lIjo+12dkX5K1dNTS092MaGqX56aycn0+cZ3R/S0VSHt5V/459bCPhY/tIKpyFVliGiL/IsaexIgQiKW/S7zcbaSrgDIO+QbekbU6aP3YsJ91d49EzEKS1awCA25ZPZ8msPHZWtrD3WDtIybxpmTz4pyuobu7hZy/v4mhjN4ausWrZdDJTfGw71MSh2g7Wbavi0zfPi7X3+Nryk2axthxooK61l3s/PB9dG+XJR9OHPvMWBo0Z6OM0wbsyPqiAMwwyk30cAvZWtTF9insrXn601X0tJSE27NjRF6KzL0htay/CkSAE/nh3w2BBVhK24yClQ0Kch8Wl7kbDnDQ/FbUdNLb3xyofrd9dSyhiMbswg1uWFjMYivL8OxU0dvRTUdfJ3KLLu5yuMnGpgDMMFpXmsrOymcP1nfzbb94F3Fkpn9fk6jl5bNlfT2VDF5v31rP9UCPBsIWDuwiqtSvAkYYuWrsD9PSH0IQgwXfC7JbtBiZNE25hOaBl6I5p5fx8knxeknxerpyZwxs7jtHcOaACjjJuqYAzDBJ9Hj7zwStYt62K+jZ3WrwoJ4kPLS0hMd7Dktl5hCIWvYMh4kydqCVJiPcQidrUtvbwL09tJmo7SMBjGmhC4/Ud1WSl+NlZ2YKUkmk5ybGtDr6h6fLeQIScobHN3oGQ+1qcKlGijF8q4AyTrFQ/f3bLfGzHvQvRtePrN25aNI3eQJjDdZ2xY4U5yST5PeyvbqNnIIJH15lXnMnC6dls3t/AlgONsXOT/V5uWVoS+3lmfjrVzT28vLmSBSVZDIaj7KtuR9c0pueljsKnVZSLowLOMDsx0LzP0DU+ccMc2roDtHQFSPJ7yc9K4sCxdjp6gyT6IuiaoLs/RPnRVjyGTtS2mZKZxOyCdK6amYPHOD7FvGR2Hg3tfew/1s57BxtjbdyypJisVP8p7SvKeKECzjBzHMmRhi46+4Ik+b3MzE/HMzTFnZXqJzPVz76qNv7vjf1s2t+AcCSZqX76BsM0dwxg2RJfnIFp6HT3h1g+O++kYAPu2tc7r53F0tl51Lf14zF1SvJSSEmIG4NPrCjnTwWcYdQ/GOHpN/bT1h2IHUv0ebnnxjnkpicQCEX4+ZpyGtv6aOsJ0D8YwTR0hCboC0QIRi1MXcdrGsR5DVq7Azy2dg/f/3+nz9A3NTOJqZlJo/XxFOWSqYAzjNZsOUJbd4CMZB8z89NoaOujrq2P59+p4LqFBTz9+n7qO/qQtiQYsdA0d3zGdhwCoYi77QF38Dg3I4HegTAt3QPsONyCx9BI8HliY0SKMhGpgDNMLNuhuqkHQ9f47KoF7kZMKfnvl3bS3BXg+XcO0dodIBJxMHWB7UhsW9IzEMKyHWzH3YPlSIj3GITCFhHLImpJ/u+NfbHHJV1GuTcliykZE2sXswz3uzl5VCncSU0FnGESidrYjoM/zoxtLxBCkJwQx9HGbqSURG0br6lhGhqBsIWNJBSxh64gAIkEbOnQ2NFPKGwjNHcafH5xFs2d/VQ1BPjd+kN8+SOLY2ND45nTVYW14wlkTy3gVk4wrvpztOy5Y9wzZSyM/z+xE4QvziTJ7yUQivLewUYGw1F2H2nhvQMNNHX00dQZIByxcRyH3oBbx1ofCjJw/DFJ16C9N0h3fwhbSjyGxp3XzqIvGKGtN0hHX4jdR1tZX147Vh/1vMnBLqIbvo/sqXUTgXmTkP3NRDc9Gkt8pUwuKuAMow8ucfcR/WF7Nd9/5j3+5/e76AmESUvyoQmwpSQYdXCGVgyfOBojBJi6RmaKD+lIbNtBF4Ks1AQ276+nprmHONPAa+oEghGe+sM+Hluzm6aOgTH4pOfHrtkEkQH0qUsw/+S/8N7xY/SSG8GOYFe9OdbdU8aACjjDaE5hBp/8wFwKspLQhMCWDkXZyXzvCzcwrzgTQxPYtuPuUBAgEO7qYSGQUiKR7jEk/jgPBdlJeAyd6qYepmQksuKKqQyGLTRNYNkOOw438/ja3VTUdYz1Rz8t2e/exYgpixCaAQj0/Kvd1/rc9UOytx6nbgtO64HjSbyUy5Yawxkmrd0B9lW1MRCKMDM/nYXTc3j53UrmFmWSlhjHX925hL9//B1qWnqQUoIETRMIoSElOI6DI6G5awANid/nxdR1mjsHcKRECNiwtw7LkcwtyiQUjuK4l2HtlqOU5qdfdJWHkRJL39l+GAqvAcBpG0oY78vAevdHbgLz989PyMZc/peI1KLR7qoySlTAGQZ7jrayessRwhGbzr5BIpaDoQt8XpOmjn4ilk2y38tnVy3ge09uxrZthKbh3twIIhEbTQikcBcOmh6dUDhKm21j2TbhqEPPQBgkZKfEserqEl7ZWsW03BT6AmE6+4J09YdIT4o/d2dHkV6wHPvwK9jVb+N0HgXdRHZVu4m5wv3YTbsQngREzhXI3gZkbz3RLf+F55aHVdndy5QKOJdoMBzl1a1VhKM2/cEwvjgTKxBhYDBKT3+YqOXwkxd3ELFsDtV2YugCiYZlOeiahq6DYQjivSaOI4emyCVBy8J2dOyhKfPBUIR4j0EwbPPq1ioAFk7P5s1dNYC7tWG8EUl5mMv/kujO/0X21rvHPAnoCz+Fte9ZAMwb/haRPBWkQ/T1f8DpqcXpqkbLnDWWXVdGiAo4F6GlK8DOymb6AmEcxw06UcsmLTGeedMyuWpmDj95cSet3QOEozb1bX209QwigMLsZHoCIVo6A4AkK8VP0LKJRCxCUQsJGJqG0A3SkuPo7Q8TsSIEwxZR28Gja7R0B1gxdyqHajvpC4RJT4oneZymINXyrsST/Sj01iOl45ap0XTY/guEZiCS3YqgCA2Sp0JPLXKw8+wXVSYsFXAu0P5j7by48bA7DoNbYaGlawCf14PPa7Jq2XQMXSMlwUsoYuGPN3EkZCTHs+rqEgxd56VNlcR5DXQhCEYtQhEb09DQBDhSYBgaHl1DOpKkhDhCEQvHgXiPiWlARrKPqia3ZK/XNLijbOZYfiXnJHQT0opPqn8gEnOQfU3YVW+hF9+IHGjBadkHgDbMtbOU8UMFnAsQtRzWbjmKlJJrrsinICuJHRXNvLK1is6+QeK9Bkcbuqio7yRq2wSCUboHQuCAg+SZtw6Rm5ZAXyCEdCTogqxUP/2BCOGoTYI/jt6BEIPBCIOaIEG65Wb6A2FA4o83SPLqXLuggEAwQlpSPFfOyME/AXPgGHPvJLrlx1g7f4lV/jTY7gyVNmXxiJQjVsYHFXAugPuIZDElM5EPXFUEwPQpqew+2sKxll7aewb54e+2kez30t0fJGJJvKZGTqqfI409hCNhIpZNSmIcwbDl1qMKW+RnJbGvug3HkehCw5I20obo0CC0poEvzkNuagLBUJAZU9MoyJrYmza1/KsxpYN14EXkQAvC40crWokx72Nj3TVlBKmAcwHE0EOBOOHhQEr3EUcAUdthIBglbLnBxJFw6/JS0pO8VPxup7vwL2wBITfDn5S0dgeI9+jkpCUwNTORwbBFdVM3Hb2DhC0LLSgwDY2ctASaOgfoD4T43fpDLC7NZcW8qeNysPh8aQXL8RQsd+9u9Il3l6ZcOBVwLkBWqp84j0FDex9/2F5NYU4K+6paCUUsSqak8tlVC3nvQCOr3z1ChxPENHQOHGunZyCEaWpgSbymTlpiHH6vyUAwTLypM6cok0WleeiauwCwdzDMCxsOU9/ah6ELcjISONbUgy0l8V6dgWCE9eW1dA+EuOOa8T1+c15UsJk0VMC5AKahcdvyGbywsYL3DjbGsu15DI3bV8zE0DWONHbhjzdJT/KhDy3E6+4PYTsSf5yJL87kEzfOYWdFC9sON5GW7GdGfhovbaw4oWqnxkfLZtHaPUD5kRZqW3sJWzaZKfFMy4inaEomlQ2d7DnaStkV+eNu/Y2inIkKOBdoTlEG6clXsvtIK72BEJnJPpbMyiXR527crG/rwx9n8rlbF7J2yxEcCf3BiLuexnbQNXhzZw0tXQEEMLswg9e2HQMk8V6DnoEwTR39/OatA3z1zsVcv7CA+3/yhrvPKtlPW+8gA9F2+gZC+H1emjsHVMBRJgwVcC5CdqqfDy09teDbwGAEcLP8LZqZg9fU2bi3jsp6nbAjyUrzUTIllc6eIJomKMpJJtnvoa7VwTR0gmELr6nj9Ri09wT4xZrd/OnN87AsG10TfHBJMZFAN5VtUepae4lYDgnxakWuMnGogDOM0pPjMXSN1u4Ah2o7mFWQjiagrXuQxo5+UhPi6O4LEbFsLMshHLF5e3ctg+EIKf54pmYlcvPiYpo7Azz71n66+oK8U15HQrwHQ9dYt7WaeMPCEV5CUZs4j0FeRsJYf2xFOW8q4AwjQ9e4dkEBb+2q4bn1h2LHhYB7P7yAFL+HI/VdbNpfT0ZKPF7TIGzZ9A9GCQSj/EnZTErz0zlQ005Gio9gyKKnP4QvzqS9O0BHXxDbtkEIHEcyqzD9lATrijKeqYAzzK65Ip94r8HWg030DIRISYhj+dwpLJyejRCCqqYeEn1e5pdkc+uy6XT0BnjwZ2/SPxjlN2/sZ/O+eizbwWsYeBJ0knwejjR04khJUU4KVjRIS3cEy5bjbne4opyLCjjDTACLZuayaGbuaV9/v6LD0ll5mIZGbnoiH1w8nZc2HaZnIIQv3kNuqh9HQjhqkZuRQGKDl7Bl4zE0rIhgSmYibd2DbhlgRZlAVMAZZQk+d5C3vTcQG39JS4ojK9WPaWgk+zwEIxYABdnJFOUkczDFR2F2CtlpPlpa27liZhFr3zvCYEglrFImFhVwRkl/MEL5kRZ6+kN09QX5/aZKqhq7CUdtjjZ2MzUzkQ8sKvr/7d19UFT1Hsfx97IuD4sLsiGKgIArRIBPV0O4CCrJvT6kMVrdHHK0ZmqynIaZSrOm+iPvqE2ZfzjToI42xihzqUlNm+sdDCWY5LKWiVACiyALAS6syNPytOf+ge4FxOf2nEV/r788Pz2czx5nvpzz298Dlqud2CWYPMGH6VMCqG8eWEL0SmsHf3syHIMezG3dAEzUi102hdFFFBwZXG66xsHcC/T0DuzQYLdLmOqvYuvpw9N9DO4aNUvnTmW6IeCmc4P8dUSH+VNWbWHPsV/o7OxEqx2YSrFgVpi8H0QQHpAoOE4mSRLf5v9OT28/0WHjmRrkx8XaZkqqruAxxo1VC54gdILvbWd8r0iM5DEfLedNjfR0dxES4MPCWWGETRR7PAmjiyg4TmZtt9Ha0c24sZ6smh+FCphhCKDe0k5bZzfB/rohxab6j6sU/VaPtd2G31hP5kYHETbRl4WzQlk4K5SqqiqmTLl50KEgjAai4NwlSZI4b2riV1Mj7V29TNR7kzgthAl+t+9HufFN0hi1m2OOuUqlQqMeOOobtHXv+aomDv940XHcZO3gYm0zK5OjiA0f/+d+IEFQgKwFp7+/n5ycHOrq6khLS8NgMMh5+QeSe7aan0rNjmNLaye/1VhYt2TGbbfd9ff1wstDg6W1k7xfapg8wYfjP1VwvqoJ9zFqzlc28NdpIWjUbvz7+lrFT/0ljIhgPeXmFn74uZrvz1QSEz4eMepGGO1kXUxl06ZNfPfdd+j1ehYvXkxdXZ2cl79vLW1dnCmrY4x6YBfM19NmMzsykH67xH+Kq257rkqlYln8VFQqFad/vcw/vyrgv7//QXdPP2O93Mk/X8uBEyU0xWO2kwAAB6NJREFUXe3E1tOHv6+WxGkhBPh5M29aCP6+Wmw9fTS3dsr0aQXBeWR7wunp6SErK4vq6mo8PT2pra0lOzubt956S64I9+2P5nYkSSJqsr/j1ebvcVP4pbJhYN8ou4Sb262fP6LD/PHxns6/8spQu7kR7O/NP1Ki8dN58f2ZCuotbZTXDiwc3t3b7/h5drtE9/Vvttw1YgqDMPrJVnBqamoIDAzE09MTgIiICPLz8+W6/APx0Azcpo5BA+26uvuQJNBo1NzNDIPg8T5M8tfR3tXL6qdiiAjWA5AYG8KRwnJartnw9/XC0tpF9g+lTA3SU1lnpa2zG39fLT5a19yVQRDuhWwFx2633/bvCwoKKCwsHNKm1WrZvn27M2PdlX7JjUudj2EyuVF8pgBPt35a+zyx2cfgO8bGJ5+cvqufU9ftS2ufJ/tqf2acxgZAc6+Wxh4d1poSxmlsVHf5YjKpOXH9HDV2JK+rbL+Y66RPJ4xW0dHRo+4bS9kKzqRJkzCbzUjSwKTDy5cvD+k0njdvHvPmzZMrzh1t376dTZs2OY5/v2zh2x8v0nt9Vb6xDKxlvG7JdLQed7dE5rnKRo4WlqP10BAfE0Rfn52i3+oZ29vneOrp6u7lfFUT1jYbep0X0w0BeLr//79peC5X4qrZXDUXuHY2Z5Ct4Oh0OpKSkti6dSuLFi3iwIEDHDt2TK7LP7Coyf68nqajrPoK7V09TNCPJSbMH7Xb3fe7zzAEUFVv5cKlK/xwfcdMgCejAh2vWF4eGuY+EfRnxxcElyDr1+J79+5ly5Yt7Nq1iy+++ILw8HA5L//AfL09SIgJvu/zVSoVK5OjmGGYQHVjK24qFVMCxxEqRgwLjwhZC45er2fHjh1yXtIlGYL8MAT5KR1DEGQ3ejc1crLExESlI4zIVXOB62Zz1Vzg2tmcQSXd2CRbEATBycQTjiAIshGTN4eRJImDBw+Snp7uaGttbeXQoUPY7XZWr16Nn59y/S/Nzc188803juO4uDhmzpypWB4Ai8VCdnY27u7upKen4+3tGguD1dXVcfz4ccdxcnIyUVFRCiaCa9eucerUKVasWOFoq6+vJycnB51OR3p6Oh4eD+8gT/GEM0hZWRkffPABn332maNNkiSWLl1KY2Mj7e3tpKam3nEQozMVFxdz5MgRrFYrVqsVm82mWBaA3t5eUlJSsNlsmM1mnnnmGUXzDJaXl0dubq7jXnV3dyuap6ioiIyMDA4dOuRoa29vZ+HChajVai5cuMCaNWsUTOh84glnkNOnT9Pc3DykzWg0otFo+OijjwA4efIkRUVFJCQkKBGRiooK1q5dy/PPP6/I9YfLzc0lMjKSt99+G4DZs2dTUVFBRESEwsmgsrKS1157jZSUFKWjAHDixAk6OjpwGzR26/DhwyxYsIANGzYAYDAYuHLlCuPHP5zLkYgnnEHWr1/Pxo0bh7SVlpYSGxvrOI6IiKCyslLuaA4mk4lt27YxZ84c4uPjKS8vVywLuN79GayyspJ3332XOXPmMH/+fMxm851PcqIPP/yQl156aUhbWVkZMTExjmODwcClS5fkjiabR7rgFBYWkpqaSmpqKvX19SP+m+GvT0rsBbVu3TpSU1PZuXMnTz/9NF9//TVGo5GMjAwyMjJkzzOYK9yfW3nhhRc4fPgwRqOR5557jvfee0/pSDfp7+93qXvmbI90wZk5cyaZmZlkZmbe8hE2NDR0yG+cmpoa2RcO+/jjj8nMzOTFF1/E3d3dMUI7Li6OhoYGWbMMFxo6sOzpDUrcn1vR6XRMmjQJcI17NZLh96+2tnbUjcC/F490H463t/cdZ9smJyfz6quvcvToUQCqq6uZO3euHPEcQkJCHH/+9NNPMRqNLF++nK1bt7Jq1SpZswy3ZMkS3nnnHU6ePElTUxOSJBEZGalophs2b97MK6+8QmJiItu2bVP8Xo0kLS2NhIQEnn32WUpKSggPD39o+29ADPy7SUtLC1lZWbz55puOtrKyMnbs2IFGo2Hjxo2K/gayWq18/vnn1NTUkJSUxMsvvzykE1IJRqORXbt24ePjw+bNmwkMHHnXUbk1NDSwc+dOGhsbWbRo0ZChDkopLy/n7NmzrF692tGWn5/Pnj17CAgI4P3330ev1yuY0LlEwREEQTaPdB+OIAjyEgVHEATZiIIjCIJsRMERBEE2ouAIgiAbUXCE+1JaWkpJSYnSMYRRRhQc4Z6Vl5ezePFitmzZonQUYZQRBUe4SVtbGzk5ORQUFABw7tw5cnJysFgsLF++nMcff1zxiZDC6PRIT20QRqbT6di/fz+nTp3CaDSybNkywsLCWLlyJbt376arq4v4+HilYwqjkCg4woh2795NbGwsSUlJ2Gw29u/fj1qtdkxbUKvFXufCvROvVMKIgoODWb9+PS0tLaxZs8ZlJmQKo5soOMKIGhsb2bt3L1qtlqysLEwmk9KRhIeAKDjCiN544w06OjrIy8tDrVazdu1aRddyFh4Oog9HuInZbGbq1Kl8+eWXxMXFsW/fPoqLizGZTI61ijds2PBQLxQlOIdYnkIQBNmIVypBEGQjCo4gCLIRBUcQBNmIgiMIgmxEwREEQTb/A7HlIVy+FsfnAAAAAElFTkSuQmCC", + "image/svg+xml": [ + "\n", + "\n", + "-10-50510x1051015x312y\n" + ], + "text/plain": [ + "@vlplot(\n", + " mark=\"point\",\n", + " encoding={\n", + " x={\n", + " field=\"x1\"\n", + " },\n", + " y={\n", + " field=\"x3\"\n", + " },\n", + " color={\n", + " field=\"y\",\n", + " type=\"nominal\"\n", + " }\n", + " },\n", + " data={\n", + " values=...\n", + " }\n", + ")" + ] + }, + "execution_count": 35, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "dfBlobs |> @vlplot(:point, x=:x1, y=:x3, color = :\"y:n\") " + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Generating concentric circles\n", + "\n", + "```@docs\n", + "make_circles\n", + "```" + ] + }, + { + "cell_type": "code", + "execution_count": 36, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "

3 rows × 3 columns

x1x2y
Float64Float64Cat…
1-0.342997-0.006299560
21.020850.04372881
3-0.218680.9580611
" + ], + "text/latex": [ + "\\begin{tabular}{r|ccc}\n", + "\t& x1 & x2 & y\\\\\n", + "\t\\hline\n", + "\t& Float64 & Float64 & Cat…\\\\\n", + "\t\\hline\n", + "\t1 & -0.342997 & -0.00629956 & 0 \\\\\n", + "\t2 & 1.02085 & 0.0437288 & 1 \\\\\n", + "\t3 & -0.21868 & 0.958061 & 1 \\\\\n", + "\\end{tabular}\n" + ], + "text/plain": [ + "3×3 DataFrame\n", + "│ Row │ x1 │ x2 │ y │\n", + "│ │ \u001b[90mFloat64\u001b[39m │ \u001b[90mFloat64\u001b[39m │ \u001b[90mCat…\u001b[39m │\n", + "├─────┼───────────┼─────────────┼──────┤\n", + "│ 1 │ -0.342997 │ -0.00629956 │ 0 │\n", + "│ 2 │ 1.02085 │ 0.0437288 │ 1 │\n", + "│ 3 │ -0.21868 │ 0.958061 │ 1 │" + ] + }, + "execution_count": 36, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "X, y = make_circles(100; noise=0.05, factor=0.3)\n", + "dfCircles = DataFrame(X)\n", + "dfCircles.y = y\n", + "first(dfCircles, 3)" + ] + }, + { + "cell_type": "code", + "execution_count": 37, + "metadata": {}, + "outputs": [ + { + "data": { + "application/vnd.vegalite.v4+json": { + "data": { + "values": [ + { + "x1": -0.3429973289986776, + "x2": -0.006299563562939361, + "y": 0 + }, + { + "x1": 1.020851097085077, + "x2": 0.043728796294943945, + "y": 1 + }, + { + "x1": -0.21867985215237407, + "x2": 0.9580608508854831, + "y": 1 + }, + { + "x1": 0.9625640311412708, + "x2": 0.21084553537124323, + "y": 1 + }, + { + "x1": -0.2374389587127787, + "x2": 0.9803255854713364, + "y": 1 + }, + { + "x1": -0.24595828848918444, + "x2": 0.9111329686909976, + "y": 1 + }, + { + "x1": -0.6153573100129112, + "x2": -0.7174777312043913, + "y": 1 + }, + { + "x1": 0.29983681490799097, + "x2": -0.16393266640813678, + "y": 0 + }, + { + "x1": -0.6632463716368422, + "x2": -0.8001109689169298, + "y": 1 + }, + { + "x1": -1.0100656696395762, + "x2": 0.09435336737910159, + "y": 1 + }, + { + "x1": -0.08936277940202794, + "x2": 0.31082908269227055, + "y": 0 + }, + { + "x1": 0.0014616299487905382, + "x2": 0.2912518595769784, + "y": 0 + }, + { + "x1": 0.32794851838748895, + "x2": 0.00982268552859051, + "y": 0 + }, + { + "x1": 0.17351832880979648, + "x2": 0.30214573327773553, + "y": 0 + }, + { + "x1": 0.22716768591438416, + "x2": -0.11774922836321489, + "y": 0 + }, + { + "x1": -0.679973863644328, + "x2": 0.7221401632284198, + "y": 1 + }, + { + "x1": 0.9520012124705264, + "x2": 0.2534620436014916, + "y": 1 + }, + { + "x1": -0.29464854561206044, + "x2": -0.15685435652734164, + "y": 0 + }, + { + "x1": 0.2700982666499536, + "x2": 0.00888445334085276, + "y": 0 + }, + { + "x1": 0.18592993013624848, + "x2": 0.09115204832655024, + "y": 0 + }, + { + "x1": -0.2935080433318936, + "x2": 0.1412751463717719, + "y": 0 + }, + { + "x1": 0.7978191852896912, + "x2": 0.6162813194908285, + "y": 1 + }, + { + "x1": -0.22254688768213504, + "x2": 0.1605507152220613, + "y": 0 + }, + { + "x1": -0.15881180400189826, + "x2": -0.9822445412304502, + "y": 1 + }, + { + "x1": -0.8563020605700551, + "x2": -0.43527403789979235, + "y": 1 + }, + { + "x1": 0.32978189481436077, + "x2": -0.9789875458306052, + "y": 1 + }, + { + "x1": -0.7706670237593738, + "x2": 0.606821111398671, + "y": 1 + }, + { + "x1": -0.4659477289814586, + "x2": 0.9435043814406232, + "y": 1 + }, + { + "x1": -0.11003684705292904, + "x2": -0.9689640456114402, + "y": 1 + }, + { + "x1": -0.23107183282863042, + "x2": 0.059953932947462346, + "y": 0 + }, + { + "x1": 0.6330557548781504, + "x2": -0.8626540953154966, + "y": 1 + }, + { + "x1": 0.42427802614154514, + "x2": 0.9201088595955302, + "y": 1 + }, + { + "x1": -0.5147670225012002, + "x2": -0.842291406477388, + "y": 1 + }, + { + "x1": -0.01993920866796471, + "x2": 0.28221604812323003, + "y": 0 + }, + { + "x1": 0.709895106742534, + "x2": -0.6157240147949496, + "y": 1 + }, + { + "x1": 0.2065567105053752, + "x2": -0.08766340966874338, + "y": 0 + }, + { + "x1": 0.44803825736454, + "x2": 0.9554995429808866, + "y": 1 + }, + { + "x1": -0.25080081682843997, + "x2": -0.9449314723920158, + "y": 1 + }, + { + "x1": -0.15046593100431926, + "x2": -0.29935140872031396, + "y": 0 + }, + { + "x1": 0.9103006206565706, + "x2": -0.47309282712544315, + "y": 1 + }, + { + "x1": 0.24875943962936292, + "x2": 0.9395316192709823, + "y": 1 + }, + { + "x1": 0.5565044880529034, + "x2": 0.8547081318747849, + "y": 1 + }, + { + "x1": 0.16269085289063295, + "x2": 0.27477718218329916, + "y": 0 + }, + { + "x1": -0.9629756398871605, + "x2": 0.08201822136517878, + "y": 1 + }, + { + "x1": -0.10563104657650116, + "x2": -0.23703709130305953, + "y": 0 + }, + { + "x1": 0.8754787545857909, + "x2": -0.5021627706867883, + "y": 1 + }, + { + "x1": 0.9750663880678216, + "x2": -0.08368111134366801, + "y": 1 + }, + { + "x1": 1.0182378700635522, + "x2": 0.36229863353152647, + "y": 1 + }, + { + "x1": -0.315687363784698, + "x2": 0.20279779525392258, + "y": 0 + }, + { + "x1": -0.2831956070540199, + "x2": -0.1592405085003608, + "y": 0 + }, + { + "x1": 0.16026997020032946, + "x2": 0.31023859757986355, + "y": 0 + }, + { + "x1": 0.6072258206079085, + "x2": -0.7912818372492608, + "y": 1 + }, + { + "x1": -0.20535694466674845, + "x2": -0.25656850846296153, + "y": 0 + }, + { + "x1": -0.9641499344391675, + "x2": -0.14049208516067613, + "y": 1 + }, + { + "x1": -0.6112661531754814, + "x2": -0.8715321600810764, + "y": 1 + }, + { + "x1": -0.08293498344071384, + "x2": -0.4129736360639079, + "y": 0 + }, + { + "x1": 1.0385999790988483, + "x2": -0.14017000537719834, + "y": 1 + }, + { + "x1": -1.0076641324920224, + "x2": 0.027335332349792345, + "y": 1 + }, + { + "x1": 0.29817424822287086, + "x2": 0.04286355625553197, + "y": 0 + }, + { + "x1": 0.013231245537377022, + "x2": 0.20147488297588945, + "y": 0 + }, + { + "x1": 0.3668421122289275, + "x2": -0.04557981331553901, + "y": 0 + }, + { + "x1": -0.37401001184436083, + "x2": 0.9203530401307064, + "y": 1 + }, + { + "x1": -0.9267344270799769, + "x2": 0.1793091020021044, + "y": 1 + }, + { + "x1": 0.026954417409516207, + "x2": 0.33310043765475605, + "y": 0 + }, + { + "x1": -0.18268204747176656, + "x2": 0.23856662483059662, + "y": 0 + }, + { + "x1": -0.1894173950825171, + "x2": -0.14474247785644406, + "y": 0 + }, + { + "x1": -0.5425176533238742, + "x2": -0.805946500350001, + "y": 1 + }, + { + "x1": -0.9650769024788493, + "x2": -0.02232220001476625, + "y": 1 + }, + { + "x1": 0.7112706527628312, + "x2": -0.7211592660571973, + "y": 1 + }, + { + "x1": -0.28491815276811977, + "x2": -0.07054596159092547, + "y": 0 + }, + { + "x1": 0.2756079094350364, + "x2": 0.20235536126443116, + "y": 0 + }, + { + "x1": 0.2240703193325335, + "x2": 0.9519657879984283, + "y": 1 + }, + { + "x1": 0.9519454027127411, + "x2": -0.45357197969470997, + "y": 1 + }, + { + "x1": -0.3333244695089689, + "x2": -0.2077895015728271, + "y": 0 + }, + { + "x1": 0.14365013577653737, + "x2": 0.05716772625951774, + "y": 0 + }, + { + "x1": -0.2650023883873516, + "x2": 0.022792590129607198, + "y": 0 + }, + { + "x1": 0.20635596343573775, + "x2": -0.013425305228230607, + "y": 0 + }, + { + "x1": 0.055374276623968224, + "x2": 0.24613958883085488, + "y": 0 + }, + { + "x1": -0.25047895138585263, + "x2": -0.11926289997230519, + "y": 0 + }, + { + "x1": -0.30135495282946084, + "x2": 0.18616473647811618, + "y": 0 + }, + { + "x1": 0.14532109194406506, + "x2": -1.040895677739072, + "y": 1 + }, + { + "x1": 0.7081062660338171, + "x2": 0.6874607762309212, + "y": 1 + }, + { + "x1": 0.8390174045057971, + "x2": 0.4620999203625478, + "y": 1 + }, + { + "x1": -0.22652408835373633, + "x2": 0.16854637046802926, + "y": 0 + }, + { + "x1": 0.19740525848569687, + "x2": 0.13072625650144903, + "y": 0 + }, + { + "x1": 0.26732684137604423, + "x2": -0.13719514797253204, + "y": 0 + }, + { + "x1": 0.3116056484318315, + "x2": 0.06498420835031596, + "y": 0 + }, + { + "x1": -0.24960279415133863, + "x2": 0.05663660712253064, + "y": 0 + }, + { + "x1": -0.23582753668725392, + "x2": 0.18412066290791876, + "y": 0 + }, + { + "x1": -0.23403324429917724, + "x2": 0.8403381480195693, + "y": 1 + }, + { + "x1": 0.8784733597241043, + "x2": 0.6607865891460041, + "y": 1 + }, + { + "x1": -0.2675150513505535, + "x2": -0.22733219426709703, + "y": 0 + }, + { + "x1": 0.13987364887016432, + "x2": -0.32668574207030815, + "y": 0 + }, + { + "x1": 0.3460921745748985, + "x2": -1.0154928903409635, + "y": 1 + }, + { + "x1": 0.16520144729846079, + "x2": 1.034839139002008, + "y": 1 + }, + { + "x1": 0.24883005682836432, + "x2": 0.08659666670503414, + "y": 0 + }, + { + "x1": 0.020733754198625186, + "x2": -0.273474754725191, + "y": 0 + }, + { + "x1": -0.06869982763841025, + "x2": 0.2517322619869118, + "y": 0 + }, + { + "x1": -1.017808261449678, + "x2": 0.3029801396536927, + "y": 1 + }, + { + "x1": -0.014847635093359055, + "x2": 0.22143466631904055, + "y": 0 + } + ] + }, + "encoding": { + "color": { + "field": "y", + "type": "nominal" + }, + "x": { + "field": "x1", + "type": "quantitative" + }, + "y": { + "field": "x2", + "type": "quantitative" + } + }, + "mark": "circle" + }, + "image/png": "iVBORw0KGgoAAAANSUhEUgAAASIAAADyCAYAAADp98gtAAAABmJLR0QA/wD/AP+gvaeTAAAgAElEQVR4nO3dd3Rc1bnw/+85Z4o06r1ZlixbrpJ7w5boNgbHFFNNSS4hBUhg5YY3F7LuXTfJCwQICSFvEhIILcAl5OdLIKETHDAu4IarXGRLVre6NGqjKeec3x9jjzSekSXbmhlpZn/W8lqePfuceTQzerTPPrtIuq7rCIIghJAc6gAEQRBEIhIEIeREIhIEIeREIhIEIeQMoQ7gXLzzzjvMmjUr1GEIwphVUFAQ6hDOyrhMRAcPHmTNmjXndY7Kysox/2GNhxhBxDmaRiPGysrKUYomeMSlmSAIIScSkSAIft1xxx3Mnz8fTdMAWL16Nddff31AXkskIkEQ/Fq2bBm7d+9mx44dtLa28uGHH3LBBRcE5LVEIhIEwa+bb74Zk8nEe++9x4cffoiu69x4440Bea1x2VktCELgJScns2LFCt59911mzZpFaWkpeXl5AXkt0SISBGFI69atY8+ePbzzzjvccsstAXsdkYgEQRjSNddcg8Vioa+vjxtuuCFgryMuzQRBGFJMTAwTJkxg3rx5pKWlBex1RCISBMGv3t5epkyZQmNjI6+88kpAX0skIiGodEcPrq9eQW86AOY4DLOuQ85dGuqwBD/MZjO//OUvmTp1KosWLQroa4lEJASV66tX0Gq+cD+wd+Pc9kdMcdlIiRNDG5jgw2AwcNtttwXltYLWWX3HHXfQ39/v97nq6mrWrl3L0qVLeeaZZ4IVkhACWvPB0wpU3zIh4gQ8EX3wwQesWrWK1157zTNU/HTf/OY3ueWWW/jkk0947bXX2Lp1a6DDEkJEMsX6Fprjgx+IMKYEPBEVFxfz6KOPDtnj3tnZSXl5OTfddBOxsbGsW7eO9957L9BhRSRdc6HVf4VatQm9ry0kMRhmXQvSwNdOSpyIMiGw/Q/C2BfwPqIJEyYwYcIEjEaj3+cbGhrIysryPM7MzGTv3r2BDivi6KoT56ePoLe7l4iQFCOG5T9Azpwd1Djk3KWY4iegneyslicsBsX/d0OIHCHvrLZYLFitVs/jvr4+YmMHmu+bN29my5YtPseNxpor42HdltGK0dS0i5i6AwBIzj5kZw+uj/4v1gt+hm6IPu/zn3WchqmgAtW1A2WaC+Tz/0oq3bUoDiuu2Fw0c8L5xRkCYy1GXdfZV9nMziMnaGrvBSAjOYaF07KYXZCOJEnn/RohT0Q5OTl0d3djtVpJSEhgz549LFiwwPN8SUkJJSUlXsc88cQTo7J41PhYJGsSWvNhcPUjpU5FMsWc07lc9jJUiwW9rw291/3Lb1R7iCl/HvMVj4LRcp5xnvt7qXefwLX9ObT2CjDHY5hzK0resnM6l3PbH9Gq3X+4JMWIYcnd7lbXKMQZDGNtYTQdeGvTEQ4cb/Eqr2/ppr6lm4r6Dq67cDr+UpGmadx///1s2LABi8XCs88+y8KFC/2+TsimePzwhz/kzTffxGg0ct9997FmzRp+9KMfsXHjxoCteTLuaC4cGx7G+dnPcW5+Cuf7/wet/dy+ZHLadJAk9N7mgUJTLPS1otbtGKWAz41r+3NobcfA5UBv+Arn3+/B8dZ3UKs2n9V5tNajniQE7stR1+7XRjvciLLnaKNPEhrswPEW9hxt9PvcO++8Q1VVFQcPHuS5557j3nvvHfI8QUtEW7duJTp64BLgoYceYuXKlQD8+Mc/5he/+AUXXnghmzZtwmI597/O4cTUtAu97ajnse7owbV/vVcdva8drWoTWv1O96XNEOTUQgzzv+G+9JFkiEpASpjgftJpC0j8I6K5PMlVt9ag93ehaypaVwOuHX9C66wd5gSDTlW/C72vHVx2T5lu6wTNOephR4rdR5vOuc7HH3/MTTfdhCRJLFiwgPr6enp6evzWDdql2enLB6Snp3s9XrpUjK49nWzv8C3sHfjrpLUcxvn5k6A6AJAScjFd9hMwmH2PUx3gsqOkF6FZa5Fi0kCSQDYEvcPai2xAiopHt3WiO3oHyhUj6Bp6azkk5p75HLqO88vfox3fiG6tRUdHSshFsqQgJ+WDLDrDz1VzR+8I6vhPLi0tLaSmpnoeJyUl0dHR4dUHfIqYfT+GqfG+a79IKVMGni/7mycJAejWWtRq3459dB3H5l/j2vsXdJcNNCdabzNy6lSMJf+OFJ8dkPhHyjDnViTZ4EkYUnQSkjHW8//h6O2VaLXbwBCFnDABJBm9qwEpIRfDku8GNPbwN4Id6YforI6Pj6elZeAPZ3d3N4mJiX7rikQ0hjmTZ6DMuNrzCypnzMI4Z53ned3W6XOMbmv3Letrdc/tApBkpPgc5NgMd0duKFtDJ8kTL8C46gmMS+9BTpuBlJQPEsjpM5Gz5gx7vG4bNCbKkoKcORs5cxbGy36CFJ8TuMAjQHqSnwGop0lL9N+VsnTpUr74wj2dp6KigpSUFOLi4vzWDfldM+HMDMU3osy6DklzgSHK6zk5bRpq94nTyqb7nmSovqMz9CkFmxSbjmHhXSjTv4bechiiEpGzZoPf+zGnHZtU4E7Wg/qC5OQpSP4uUYWzMq8wg7qWrjPWmV+Y6bf81ltv5YUXXmDNmjVUVFTw29/+dshziEQ0DrgvW3w/KmX2LeiOPrSG3UjGaJQZa5AzinyPj8tESsxD76weKEvMQ4rz/wUKJSk2Ayk24+yOiUnFsOS7qHtfR+/rQE6ZjGHxdwIUYWSZW5hJdZOVfRXNfp+fXZDO3CESkcViYcuWLZSXl5OdnU18/NBTeUQiGsckUwzGZffhvo4/U8tBwnjhj1DL3kK31iIl5KLMum6YY8YXJXcJSu6SEQ2K1JoP4tr3V+htQUqZgmHe15FiUs94TKSSgGuWT6UgO4mvjpyg8eSAxszkGOZPy6J4UtoZv0WyLDN9up9W+mlEIgoLI7h8iUpAKVqLeuhddGsd6pH3McxY4x5LFE6GG5lt78K15Wn0k0MW9IbduOzdGC/7SRCCG58kSWJ2QTqzC9KHr3yORCKKGDquTb8aGBDZtB+t9QimS38y5F2PcKS1VXiSkKes3V0mGc9/qotwbsRdswihdzf6jMrW2yq8R1pHArPvXRtJMYmO7RATLaJIIfn/myPJSpADCS05ZTJy5my0xn2eMmXGNUO+PwKg62jVm3FVbEDvrAHcy7cYJl+GnFcyKi1qkYgihBSTjpw2Ha3lsKdMTp8J0SkhjCoUJIylD7jn1/W2IKVORU6dGuqgxjAd57Zn0Gq+9C5tq8DZVoHcuA/j0ns5Uz/l//7v/3LgwAF++tOfDllHJKJIIUkYSn6IevQjsNa5FyQrXBlR/UMekuy+wyYMS63c6JOEBtNqvkRNn4VScLHPc3a7nSuuuIJt27Zx//33n/F1RCKKIJIxGsPMa0MdhjCOaMc3jqiOv0RkMpl46623+POf/8yJEyd8nh9MXBgLgjAk3Tr86gfaEHUkSSIpKWlEq2mIRCQIwpBGMOWV0RgYKxKRIAhDkhOGWYIFkBLOf2Kx6CMKEK3lCOqB/0Xva0NOm4YyZx2S2DZHGGfkgovdq2eegVJwyXm/jkhEgWDrxLX5V54RvGpvC7q9G2Pp/wlxYIJwdpRJF6I3H/K/zhUg5y1HmXThGc/xne8MPwFZJKIAUFuP+Ewj0JvKRm2XCkEIHgnD4u8iZxajVvzLs4KDlJiHMvlS5InLGI0+IvFbEQCyv502DGaQImsUc9jQNVx7X0c7/jkAcn4phrm3Rc5obElCzluOnLc8YC8RIe9kcElp05GTJ3uVKdNWR+bgwTCgHvsEtfwjdKcN3WlDPfox6tGPQx1WWBEtokCQDRgu+U/0mq3ova3uxJQxK9RRCedo8LSYwWXK1FXDH9t6FPXI++DoQc6ei1x4hXuhO8GLeEcCRFKMSJMuCnUYwiiQon0XfJei/C8CP5jefQLnxsdAdS9hq7UcxuDsQym6cdRjHO/EpZkgDMMw7SoYPPTCHI8y7aphj9NO7PEkoVPUmm2jHV5YEC0iQRiOJRXTlb9wJxZAzprrf+tvV7/3BgeSn18vReyx5o9IROdBd9rQO6rAFIs83CaAwrgmmWJQhrhrpDXuw7njBbC1I8WkYVj0beT0GSgTFuEq+xs4BjYgHI3Bf+FIJKJzpLUdw7X51+h291Yr8oTFGJd9n3BakF4Ynu6y4/ry9+Docz/ubcH1xW8xrfl/EJ2IeeXDuI5+MtBZnbMwxBGPTSIRnSPXntc9SQhAq9uOVr9LfNEijN7dgH4yCXnK7N3oPU3uzR0tqRjm3BKi6MYP0Vl9jvSeJp8yrevMa64I4Ue2pODTCpZkiBp+q2xhgEhE50hOzvdTVhD8QITQMsdjmHXdoMGqEobiG5FMw6/BIwwQl2bnyDDv67hsv0HrrAVZwTB9tRi0GKGUWdch5y5xb16ZOBEpLivUIY07IhGdIyk2A+PKn6P3tbtv5YrtaCKaFJ+NFJ8d6jDGLZGIzpNkSQ51CIIw7gUlEXV3d9Pe3k5eXp7f5+12O319A3ce4uLiMBhEjhSESBHwzuq3336bRYsWcc8993DZZZdht9t96jz99NMsXbqUFStWsGLFCvbv3x/osARBGEMC3ux44IEH2LBhA/n5+dx11128+eab3HrrrV51jh07xttvv82MGTMCHY4gCGNQQFtEdXV1mEwm8vPzASgtLWX79u0+9SoqKnjttde4/fbbef311wMZkiAIY1BAW0RWq5X4+IFZyzExMfT09PjUy83NZenSpdx8883cfffdGAwGbrrpJgA2b97Mli2+6+VWVlaed3yjcY5AGw8xgojTi66h9DaCYkSNTjvrw8fLezmaApqIUlJSqKur8zxub28nIyPDp96f//xnz/+/973v8emnn3oSUUlJCSUlJV71n3jiCQoKzm/wYGVl5XmfI9DGQ4wg4hxMt3Xi/PwJdKv7ey9nzsaw/AdII5x1PxoxjsdEFtBLs8zMTFJSUti7dy+apvHWW2+xYsUKAI4fP05bWxtWq5Xi4mJPS2nTpk3MnTs3kGEJQsCoZW96khC4Z+ZrFRtCGNH4EPC7Zr///e+58cYbKSoqYvbs2Vx88cUAvPjii2zdupWEhATuu+8+li9fzrJly9A0jTvvvDPQYQlCQOhd9SMqE7wF/K5ZaWkp5eXlPuUPP/yw5//f+c53RrT3kSCMdVJCLrQe9S0TzkhMehWEUWSYtRYpcWDgrpw9D2XypSGMaHwQw5cFYTRFJWBa+TB6d6O7g9qSGuqIxgWRiARh1EliBv5ZEpdmgiCEnGgRCcIYoatOoiv+gf1gNZIxGmX66iEX7A83IhEJwhihlv2NqPpNYLGgA67tzyJZUpHTpoU6tIATl2bCeVM1nX2VzXx5sJ6mjt5QhzNuaSf2ehfoOnrTvtAEE2SiRSScF6dL460va+hX3V8lSZK4rnQaRZPOfo5VpPO7aaMpNviBhIBoEQlDUjWNXUdO8O4XR/nyYD1Ol+ZT50htG+3dDs9jXdf5fG/NkOd0uFS2H6rnnzuPc6SmLSBxj1fK9NXuHUBOkqKTRB9RuFErP0M9/A44+pCz50FiyfAHRbi3Nh3hYFWr5/GR2ja+fsVsr81z+vqdPsf5KwP3JdyfP9xHVaOVVmsfDqdK4YRk7rlmAUlxUX6PiSRy1ly65v+AeEMbGKKQ85ZFTIsoIhKR1nIE164XQdcBUKs2YYm3wtSZIY5s7LLZXV5JCKC60UpLRy/pSTF099nZuLeGvceaqWvrI6FfJz3JgiRJFGQn+j1nTZOVhtZuGlq7caru1tXhmlb+suEA91yzAEkSu+SqMVkoBZHRChosIhKR3nLIk4ROMVqPhSia8cGlqn7LnarGp7ur+XDbMerbelBkibgoIzaHi64+OyXFE1m12P8yFg6XRr9DxWZ3ounurcCMBoWWzj6svXYSY0WrKFJFRCLCHO9TpBn9dAwKHnEWMzlpcdS3dHvKkuOjQYdN+2ros7sA9+VWb7+LKbkpZKXEcl3p0Leac9Pj6Ortx+4cSHKKLCHJMmajErgfRhjzIqKzWp54gfeQe9lA/8QVoQtonLjl0lksmJZFSrwFs1EhMTaKLw/VAzqKMnAZ5dJ0dF0nNtqE3emivrWbXj/9RLIkkRQXjSXKiCRJGA0ykiRTmJNMtHlkC4eFC63jOM7Nv8Lx8X/i2v0qOPuGPyiMRUSLSDJGY1r5CFrdDnRHH3JmMc6WyP7gTzlW386x+g4sZiMLp2VhiRpICDFRRi6em8cfq3dhd6pUNnRgd6r02BwkxkbR1WvH4dIwKGA2GshLT+A3/7uDfocLWZa4ZF4+y4smeM7nVDVMRoVpuSl09zlwuFTMRoWS4pxQ/OihY+/CtfEJdId7zJXaWQO2DgzL7g9xYKETEYkIAMWEPPhWaMv4W05ztG0/3MCH2yo8j3eVN3L31fOJNg98LSrq271aN2ajgsulYFQUslJikYFp2RauvmgOf3p3D/0O9yWbpun8a9dxZuSlknzyjlhctIkJafHUtXQRZzEBkBwXRVZKXBB+2rFDaznsSUKnqA17UDRXiCIKvYi4NBP823Gowetxd5+dwzXed8pO6+MHICHWTKzFCEgYjUYUScJgUHwux3SgtdO75XnzpTNZMDWT7NRYZk/O4PaVxRiUCPsaGgZ1yjtt6PZuJEVGkiK3nyxyWkQRQtd1yqpaqWmyEh9jZsHUzCH7X/wNUHS4vO+WTc5JwmI20mcfSDJOVaer147ZqKBqGpsPNRMdW4nJqOAY1BEtAWmJFq/zxUQZWX1B4bA/R3NHL/srm5EkiTlTMkiJjx72mPFCTpuBlJCDdnwzen8nAJrmQus4HuLIQkckojDz6e5qNu+vRdfdSWXbwXruu34xJsNAq8PhUjEZFKbnpbB9UKvIoMhMyUn2Ol9stIk7r5rD1gN19NgcTM5O4rM91fQ7XLhUjZomK3ani417q7GYjZiMBiTJfa5L5+ef00DFmuYuXvloH5rmbo59ebCeu66aS0ZymNzpVIzIU69Cq9+NpBiRzLFgTkTd+xfIWxfq6EJCJCIAXUer3YbWuBfM8RgKV4zLlfV0XWdXeSNOVaOhpQu7S6O6ycqz//iKe69dQNnxFj7eUUmf3UlWSixrlhViNho4Vt9OTJSJ5cUT/LY8UuKjWbNsoBVTVtVCbXMX1h47rpPJwmRQMJsMTJmQxOXzJxEfYybKdG5fr52HGzxJCMClanx19ARXLplyTucbi6R+K1JsuleZ3tMYomhCTyQiQD3yHq59f/U81qo2Y7zi50hRCSGM6hxIEqqq0Wa1YR902VXTZGXzvho276+j22ans6ef2pYuWjr7+PFty7hkXt4ZTurrikWTeeNfZbSc7P+JjTJ47rbZHSrpSefXcvF3yWh3+paNZ1KS73suJeYHP5AxIsJ6Cf1TKz/zeqzbu9Dqd4UmmPMgAdMmpuBwDdx9UWQJS5SRyoZOevsdVDdZae7oo7Wzj13ljbyz9ejQJxxCdmos31+7kJsvncmkzARS4sye6RmTs5PO++eYNjHFp2z6xGQ/NccvOXM2yvSvgezuoJYT8zDM/3qIowod0SIC0P1MZ9D8T3EY61ZfUMixhg7Ka9rQdJ0okwGb3UVOahxfHT2B3aGin7wVpusaH26vYM2ywrOe52UyKJQU5xJlMvDJtkNEW8zMyk9jefGE4Q8expzJ6TicLvZVtiBLEvOnZjJ94vi7VB6OYfbNGGZeg+60IUWfSuDdZzwmXIlEBMi5S1EPvztQYIhCyZkXuoDOg8kgc/fV83nstS3UNHdhd6pomk63zU5qYgx1J6dsSIDJaMClanT09JMcd253pRZOyyLZaDunbZKdLnffT6vVRlZyLHOmpKPIMpIksXhGDotnRMBAR0MUkkHMsROJCFCKrkcyx6Ke2IdkjkeZftW47Kw+xajIRJsN5GcmIEsSBkWmrKqVa0umUl7bjqppKIqMBCTFRRNvMQc9Rh14/ZMDVDdZPWVVjZ2svXB60GMRQk8kIkCSDSjTVqNMWx3qUEZFn92JrrsvnwZLiY/m5ktm8M+dlWgaxMeYWHvhjJAMKGzu6PVKQgBlVa1cucTlNbJbiAwR9YlrLYfRGnYjGS1Icn6owwmYpLhokuOjae+yecriLCYyk2O5evlUlhXl0tZlIyPJErKlN1yqhtOl4nBpmAwKRoOMrusnlx+JqK+lQAR94mr1Flzb/uh5HO+UUWPuRe+sRjLHI+dfiGSynOEM44cE3HLpTD7cVkFTRy+pCRZWLirwtHxSE6JJTQjtSOXKE53Ut3bjcGlIQGqChel5KWz4qorKhk5io01cPDePqbnhdbdM8C9yEtHRj70eG7prcX7yE6QY9yLv0rFPMK54GMkYHlMJUhMs3L6yONRh+GWzu/hsdzU5qfG0WftwuFRcqkq02ci+imYAemwO1n92kLuvWUBKfDQ2uwtJ4pwHSQpjW+R8qq5+r4dyfxsYB0a26j1NaA1fhe1i5S5V43B1G/uPt2B3OElLtLB0lv+R1IHS1WvncG0bnT39uFQNo0EmM2VgTeaqE51e9VVN51hdBx81VHKsvh2AmflpXFc6DUUWy8qGk4hJRHLOAtSugXlVkq77jpx22ghHDpfKS+/vpayqhfbufmRJIislhkPVbdx9zXxio00Bj6G5o5cX39+Dw6Wh6To1zV3kpMZ6OtSjTAbMJgVrj93ruCO1bRysbkWRwGwycLCqhZzUOC6YFZ639iXd5d7fTHMipc8Kmxb6cIZNRD09PXR1dZGdne0pKysrIykpyatsrDPMWoskKagNXyGZYug3pGJ0DSx5ISlG5IxZIYwwcA5WtdLU0UtXr3vbH03XaeuyERNl4mhdO/MKMwMew9ayehwnp27IkkR6ogVbvwtTrEJstImrl0+lu8/Ou18c8wy4TIgxs/1IA53d7tZsjNlIdmocJ9rCdNCfo4e4nU/hlE6uVRSVgOmS/0KKC/znE2pnvG/72GOPkZiYSE5ODitXrqSnpweA66+/nueeey4oAY4aWUFXndDXhtZRjSsmC2XK5UhxWchp0zGW/NB7OdkwUtVopaWzD7vThX5yMumpgePB2jnDZvdeqygmysiy4gn8+41L+PebljAlJ4l5hZncddUcLpufz7UlU0lLsngtiNRrd9LdZ3evnR2G1KP/RLG1DBT0W1EP/T10AQXRkC2iyspK/uu//ot/+7d/Izc3l6eeeop169bxj3/846xeQFVV1q9fT319Pddeey2TJ08+pzrnS638FPXIe57H5hPbkPLvwnTlL0b9tcaS8tp2th9qoLPHjqrp2FUX0SYj0RYDFrORKTnnPzdsJCZlJXK0rt2rbEp2kmelxlOyU+PITo3j+IlOjtS0EW0y0mdw4nBquFQNm0Ml4zwn1Y5Vep/vhpN6b6ufmuFnyER0+PBhdF3nD3/4AyaTiaVLl7J69Wp+9atfndULPPjgg5w4cYLLL7+cVatW8dlnn5GTk3PWdc6X3nzIp0xrOYQy7apRfZ1Q6ne46Op1txhO3arfVX6CKJNCemI0bd392B0yiXFRrFw4iZLZE4PSPwSwZEY2ff1O9lU2I0sS8wozmD053W/dD7dVsP1wA80dfXTbHMTHmNF0J6quE2NWWP/ZIS6dn09JcW5QYg8WKcV3mRMpdfhF5MLBkIno1NyhdevW8eMf/5hVq1bx85//nP/4j//AaBzZjgsOh4PXXnuNqqoqoqKiqK2t5Y033uCBBx44qzqjIsp30z/JT9l4tfPICT7eUYlL1Yg2G7iudDpTcpJwnlxxMSE2ioSTgxcXT89m1ZLRb3WeiSRJXDo/n0vn53uVd9scfFXeiN3hYkpOEpnJsWw/7L6pkJZkcc+F67KhSBKpcdHEnEycWw/UsbxoQlhtyqgUXIS94iss1v2AjpKzEMPMa0MdVlAM2Uc0ffp0nn76aT7//HM+//xzwN1y+fnPf47ZPLK5SdXV1WRlZREV5f4FKCws5NixY2ddZzQohVfA4Ltkkgmtrw3Hhw/i3Pxr9K6GoQ8e47p67Xy4vQLXyd1TbXYXf998BB2YkZfmVVcCpueNjXl0PTYHz/7jKzbuqebLg/W89s8DniQEYJBlctPjyUqJZWJGAmknd5IFcLpUNH8Lao9rEn2Tr8W09k+Y1/4JwwXfByU4LdZQO+Nds/vvv59169YRFzewy8JDDz3ED37wAw4ePDjsyTVt+MWshquzefNmtmzZ4lNeWXn2u3BI0+/B2HYQCZ2o+k30Vm47+cwxtNr9WBf+x5j74Efyc9a19dHT470rRF8flB0qJ9mkMDcvhsoT3SiKzKzcBLS+NiorffsjAh3n6Q5Ud9Da7j3fbPuB40iaSm//wJpKsWYFTXfR1+fwlOWlx1BdVRWUOIPteFVNqEMIumFv32/YsIHf/OY3vPTSS0yfPp3jx49z1113ceGFFzJ//vwzHpudnU1dXR26riNJEjU1NT4d0cPVKSkpoaSkxOuYJ5544pyWnXCbBfYuOo78FYtl8JQOJ0mJIKed63lHX2Vl5Yh+zpQMOxsPdjC4fRBtNjJrxlQkIAB9/15GGufpTvTWYrF47/JhiYnm6xfN4INtx2hq7yUjOYZViyfjdKl8uruarj47aYkxOF0u/r6zhZSEaFYsLCBr0MDI0Y4zmEYjxvGQbE837LTradOmUVNTw4IFC/jud79LcXExO3bsYN684dfriYuLo7S0lMcee4zt27fzyiuvcP311wPwxhtvsGvXrjPWCRRdMeO+SPEmGcfnXLOEGDOXL5yEIrs/TrPRwNXLCv38hGNL4YRknxHSM/JSyUyO4c4r5/DQbcu488o5nkuzb6yazX1rF2Ht6ef4CSvdNgdVjVZe/+SAZ4ySMD4N2yKaN28e//znPyktLeW5557DYrGwYcMGlixZMqIXeP7553nkkUf43e9+xx/+8AcmTZp0TnVGk2QwY89agsW6b6AsfSZS4vi9C3PBrAnMnpxBZ3c/qd0YMjYAABupSURBVInRmI0GqhutbNpfS1+/k4KsRC6cO9FnaZBQSk+KYd1ls9haVk+/w8XU3GSWF535M+i2OWjq8L4M7e13cqK1m7zMcbbGuOAxbCLatGkTV1xxBSaTiQcffJAXXniBSy+9lFdffZW1a9cO+wLJyck89dRTPuW33HLLsHUCqa9wLWmGUvT2CqS4LJRJpfhrJY0nMVFGYk4uYt/R3c/rGw54FqJvbO+h3+niayPYUyyY8jITcKo6NruT/MyEYeeQRRkNSJLkGX19Skz0yO7kjjVaZy1a+fvo/V3ImcWgBPdu5lgx7KVZfX09F154Ifv37+fxxx+nrKyMVatWsW/fvuEOHeMklIlLMcy9DWXypSCPzy/yUCpPdPrshnGkZnQ7qM+XS9V46YN9/PVfZfxjSznPvL2LivqOMx5jNMgsmeE9tWjaxJRxuQGj3teO89NHUKs2ozXuw7Xnf4g+/kGowwqJYVtEV111lVfrJT09nTfffJPGxsjdg2k8MPpZddE4hi7LAA5WtdDQOjBvzKVqfLqnisnDjPZeuaiAguwkGtq6SY2PZkZ+2rhsy2qNe8Hp3Vlvav4qRNGE1rCJKD4+3m95Zmb4T8Qbz6bmJpMQY8ba657N3tfvINps4JWP9jEzP40F07JC/svbbRuYf9ZmtdFts9PQ1sPcKSdYOO3M8/6m5CQFbXpKwEi+fyz0CN3hK2KWAYk0USYD31kzn13lJ6hv6WFPRSM2u4uqRitVjVZcqsbSmaFdSmNiejySJNHeZaOt270Ei9kI7395DFXTyUi0kJkSG7aLoSlZc3GZ48He5SlzZi7y/F/XXEhyeP7sp4uMn9IP2daK89O/oLUdRbKkYJizDjlnYajDGlXRZgMlxbls2HWc6NN+mcuqWpg+MYXthxqw2V0UTkhiRl5qUKdM5KbHs2rxZF7+YA8S7s72tEQLje29vPrRPtKTYog2G7npkhnkZYThHbGoBEyX/QT16Edg70LKKMam56C1V6DufAGtsxYpLhPD/G8gZxSFOtqAithEFHv4L2iqe2az3tOM68tnMF75JJLFd5fR8cjhVPnyYB3ltR00dfRis3vvjqFpOn96dzc2u3sE896KJlYuKgh6K2nR9CyqGjs5WN2KhPsSsqvPTnKce8qPze7kg23H+PbX5nnGSYUTKTYdw7w7BgoqK3Bt/X/ofe6VCvTuRlxf/Bbj6qfDepG0yExEqhOluxYsAx+srjrR246FRSLSdZ1XPt7HriONdNscaLqOy6mRnRbn+QVPjI3iRFuP13FflZ8IyeXakpk5lNe2oWo6TlVHkSQSYqLQdWjq6OVYfQdtVhtzpmRw1ZIpyGG8TKxsa/ckoVN0Rx+6tRYpdWqIogq8yExEsgHd4OevS/Q47/w8qamj1zPyGNwrIhqNMqqmMWdKBrPyU2m12jhU7b3WjaqFZhLpxPR47r5mAYeqW7H29LPtUAMGRaa920ZXnx2LyYCq6XxV3khqgiXkfVuBpJvjQVZ8tjyXosN7N5Pwa+uOhCRhm3QlgwcwyjkLkFPG1mC/c6VpnNwfbIAkSSTFRrFi4SSm5CQzNTfFZ5T1rHzvmfrBlBIfTUlxLqsvKGTFyekqNrsLs0EmfdBCaHXNXWc4y/iny0YMRTfAoL46ZeoqpJixsWJCoERmiwiwZy3FVHwRWms5ckwaUvosrw9/PNI0nYqGDnpsDnenb1sv6skRyBazgezUOCxm98DN5Lgo7riimC/K6umzOynMSWLJGNlrvnT2RJbMzOGdrUc5UNns1YEeHxP87bGDTZn+NeSsOWgdVUjx2cjJ4T/aOmITEYAUn4MSPzZ++c6XS9X480f7qG9xDxDUgXmFmVQ1dqIDcyanc03JNK9jclLjuOGisbnXvMmgcPmCSdQ0ddHd5x4LlRgbxQVFE0IcWXBICbkoCeN37uPZiuhEFE4OVbd6khC4LzpjLSZ+ee/lbC2ro+pEJxv3VFNSnOt1qTOWJcSY+d51C6lsaEeSZAqyEjEaIrM3IdyJRBQmugctGnZKV6+dD7ZVsPvowHScioYO7r12oWdy7FB2H21k8/5abHYXUycks2rJ5JAMLDQZZKZPDO/+ESFSO6vDUO7JUcqDTUyP97kzZrO7qGzowO50cbSunapGq89M9trmLt794hgd3f30O1zsq2zmnzuPB/xnECKXSERh4tQoZYvZiCLLFE5I5sqlU5D9dMB32xz87m87+cuGMl75aB8vvL/Xa2Gx6ibf5FTZcOZZ8YJwPsSlWRhZND2LRdOz0DTdM+hv3tRMtuyv9dRJjI2iutFKb//AhNOG1m6vwYyn7qwNNtS2Q312J5/ub+TtnS3EWUxcPDePwgnhPeZFGH2iRRSGBo88vnR+Ptcsn8rsgnRKinO588o59Nh8+5M6B+05XzQpjdSEgWVzFVnmwjkT/b7We18c42hDNz02Byfaelj/2SHau2yj+NMIkUC0iMKcBMyZksGcKRmespzUOJ/pHclxUbz3xVFau2xkJcdy24oiqpus9NtdTM5JGnLhsYrTLtlcqsbxxs6w3RZaCAyRiCLQpfPz6eyxc6y+HUWWWTAtkx2HG2g72ZKpbrRS39rNnVfOGfZc0SYDnaeVDXdHThBOJxJRBIoyGbj18lk4nCqKItPS2cf2Q94bTNY2d9HVax92JHPp7Ims/9dAKspMjmFKjv8+opomK7XNXSTERjEjLyUsZ9ML50YkoghmMrrnmg21YL3Bz3Kzp5s/NRN7TxuqMZ44i4lZ+Wl+j9taVscng4YA5GcmcPuK4rCeSS+MnEhEAqmJFnLT46kdNKF0Wm4KlhFeYmUkRlNQMPR0BF3X2byv1qusqtFKTXMX+WILIAGRiATcHdq3XV7EtkP1tHXZyEyOHXbN6LOhajp2p+pT3md3+qktRCKRiATAfZlWOtv/LfrzZVBk8jITqDox0JdkMirkpvnfmEGIPKK3UAiK60qmMiMvFUuUkQlp8dxyyUziLP4HSQqRR7SIhKCIs5i58eIZoQ5DGKNEIhLOSNd16lq6UTWdCWlxI7qTJghnSyQiYUgOl8YrH+2lodU9CjsxNoqvX1FMYmxUiCMTwo348zYMreM46tGP0U7swb3uYeTYebjBk4QAOnv6+Wx3dQgjEsKVaBGdgVr+Ea69/wMnl8SQs+dhXP7v435t65Hq6O73KWv3UyYI50skoqHoOurBv3mSEIDWsButvQI5ZUoIAwuerJRYn7LsVN8y4expTWWo+9ej29qQ0qZjnHs7REXu4E6RiIagqw50p5/lLPpPn+IZvuYVZlDb3MW+ymZ0XWdSViIXz80LdVjjlu6yo7dXoDvtqNt+j+5yL72i13yJ09mHsfRHIY4wdEQiGoJkMCMnT0ZrO3ZaWWS0hsC9F9o1JVNZtaQATcNry2rh7OhdDTg3Po5u60C3daL3tbq3CTp5ma83HUTXXCGOMnQC3lldXV3N2rVrWbp0Kc8884zfOn/7299YsWKF59/+/fsDHdaIGJbcg5RRhKQYkeKzMVxwP0QnhjqsoDMbDSIJnSfXvr+i206u3aTIYO9Gt7UNVDBGI8mR+x4H/Cf/5je/yXe/+12uuuoqVq5cydy5c1m2bJlXnU2bNnHfffdRVFQEQHZ2dqDDGhEpNh3TRQ+GOgxhvNJ11CPvodZ8gVrzBZLBjGRJBWMcmCzgGlgVU5mxJoSBhl5AW0SdnZ2Ul5dz0003ERsby7p163jvvfd86h07dozU1FT2799PXFwcUVFinIow/qkV/3K3hDprkDQXemct9HciSRJyaiHK7FswFN2A8ZL/Qpl6ZajDDalRT0S6rtPR0UFHRwdVVVVkZQ3M4s7MzKSpqcnnmIqKCp599ll27tzJkiVLKCsrG+2wBCHotBO7Pf+X4nOQTDFo/V2AhDJ5BcZF30aZeQ1y2rShTxIhRv3SrKOjg5UrVwJw5513YrVaPc/19fURG+t7+/fLL78kPt49EzszM5M//elPPP300wBs3ryZLVu2+BxTWVl53rGOxjkCbTzECCJOf2K7bRj7+gYKLBNwZC6ib/I16IoZjvvfK268vJejadQTUXJyMjt37gTA6XTy6KOPYrVaSUhIYM+ePSxYsMCrvtVq5amnnuJnP/uZ55i4uDjP8yUlJZSUlHgd88QTT1BQUHBecVZWVp73OQJtPMQIIs6h6Am34Py00nM3TDKYSVz2daT4oftARyPG8ZjIAtpZbTQaue+++1izZg1Llixh48aNPProowD88Ic/ZPny5Vx//fXs2LGDO++8k9zcXN5++23ef//9QIYlCEEhpRRiXPkoas0XICsY8kvAIrbP9ifgd81+/OMfc8kll9DS0sJPf/pTLBb3flkPPfQQ0dHuLWfeffddtmzZgt1u50c/+pFXi0gQxjMpPhtD0fWhDmPMC8rAhaVLl/qUpaene/4vyzKlpaXBCEUQhDFIzL4XBCHkIncopyCcDc2JdmIvuOzI6bMicoR9IIlEJAjDcfXj2PAzdGsd4L77ZbzoQaSUwhAHFj7EpZkgDEM9vsmThMA9i9514M0QRhR+RCIShGHo/R2+ZX3tIYgkfIlEJAjDkNOm+ykTO5KMJpGIBGEYcuZslOKbkEyxIBuRc5dgmHNzqMMKK6KzWhBGwDBjDcxYA7oGkvj7PdrEOyoIZ0MkoYAQ76ogCCEnLs2CSLd3o+55Da2pDKKTMBTdgJw1J9RhCSPRb8VVsQH6rcgZRcgTFoU6orAiElEQuXa+iFbvXiKFfiuurb/BeMVjSLEZoQ1MODNnH45PfoLe515jWq34F4biG1FmXB3iwMKHuDQLFl1Ha/LeFEBXnWjNh0IUkDBS2om9niR0inrskxBFE55EIgoWSXLf/j29OCo+BMEIZ0NXHb6FqjP4gYQxkYiCSJl5tdd21VJSPnJmcQgjEkZCySxGMkZ7lcm5i0MUTXgSfURBpBRcipQwEb35EEQnIucuBdkY6rCE4UQnY7zoIVwH/45ut6JkFIv+oVEmElEwaE5PwpFTpkBK5OwWOy44+9xJpr0CKTYDZea1SDFpXlWk5AKMJf8eogDDn0hEAaT3tuLa/ixayxEkcyxK8Y0oBZf4rau1HEFv2gemWOT8Ur/9SUJgOL/4PVrjPveDliNozYcwrXocFFNoA4sgIhEFkGvXS2gthwH3GCLXzpeQkycjJU70qqce/xzXjj95HktHP8a44uGgxhqpdKfN925mbwtaWwVyupjYGiyiszqAtNYjp5XoaK1Hfeqp5R941+ptRavbEcDIhFMkWQEk3ycU0XcXTCIRBZAUnexbFpPiW9HVP/B/zYXe04xa+TlKX3MAoxMAUEzIEy/wKpIS85CT8kMTT4QSl2YBZJhzC86tv3N3VuNeTkLOnO1TT86a6x4gpzrQWo6ArqE3HSC+vgwt/afI6TODHXpEMSz6FlpKIXr7MaTYTJTCFSCLX41gEu92AMnZ8zFd+Qv01iMQlehOKJKErjpB15AMZgAMc9aBYkIt+xuSIQriMsEYBc4+1CMfiEQUYJJsQJlyGailuMrewvnZ42COc+9Lnzo11OFFBJGIAkyKSUWKObm7p67h2vki6vHPAR05ez6GJfcgGczuZKS5UI9+7H0CZ2/QY45Urj3/g1rxL89jreUwpiseQ4pNP8NRwmgQfURBpFZ8ilr5mXtxLV1Hq9+FevBtz/Nypu9MfClzbhAjjGxa/S7vAtWB1rjff2VhVIlEFERae4VPmT6oTM6ajWHhXciJuUixGdhyL8UwfXUwQ4xsJy+VB5OMUSEIJPKIS7MgkmPS0E4rO30Er1JwMUrBxQD0V1aCrAQjNAFQplyOa8/rnseSJQUpS7RIg0EkoiCSC1ci1W5D76p3F0Qno8y8NrRBCR7K1CuRYrPQmvYjRSUgF1yCZIoJdVgRQSSiIJJMMRhXPoLechg0DSltmufOmTA2yNlzkbNFKyjYRCIKMkk2IGUUBf11XYfeQT32MZLqQs5djDL3diQxelgYI0QiigBq3Q7U/f8fADrupU4xx2EouiG0gQnCSSIRRQC9+aBPmdZ0AIZIRLrThuurP6Of2I1usGCYuQal4NJAhzksXXMh2bvAnCA68cNM0BJRU1MT6enpSJKfCYZAd3c37e3t5OXlBSukiCFFJfgpS/T8X3fZUQ+8ida492RdHa3ZvWoAjj5cu15Gisv2u/VysGh1O3DtfAHd0Ytkjsew+DtiB5QwEpRxRB0dHSxevBibzeb3+bfffptFixZxzz33cNlll2G324MRVsRQCi5BsgyagKuYUGas8TxU976OWv4BelcDWvMh1LK/g2vQZ6Dr6E1lQYzYm+604dr+LLrDPcpct3fh2vaMZw6fMP4FPBE9//zzLFmyhPr6+iHrPPDAA3z44Ye8//775Ofn8+abbwY6rMgSlYDxiscxLv4OhgV3Yr7qSeTkyZ6ntYavvOtLMrq9y+ccIdN9An1QYtQ1Fd3ei94jVicIFwFPRN/61rcoLy8nI8P/3l11dXWYTCby8/MBKC0tZfv27YEOK+JIxmjk/FKUyZfC6cuTGLwXhicuw2t1QikuC3nisiBEOYSYNPdWz6oTve0YeuM+tKb9qBWfhS4mYVSNeh+RzWbj1VdfBeCSSy6hsLDwjPWtVivx8QNb6sTExNDT0+N5vHnzZrZs2eJzXGVl5XnHOhrnCLRgxGiOn4ulaWCqiRaVTs/MOzB01aAbonCmFqPXNZ7xHIGOMyrjYmL2PYdi7wRJQo1Kp3/v3+hxxeNMmTXi84jPfGwa9USk6zodHR0AI+rrSUlJoa6uzvO4vb3dq/VUUlJCSUmJ1zFPPPEEBQUF5xVnZWXleZ8j0IIWY0EBWuFc9KYDYI5Dzr+QlLMYUTzSONUj7+Eq/xBJdSLnLjm7sUwFBTisu9E6q5EM0RhPHhcXZcMwwvcoUj7z8ZjIRj0RWSwWHnzwwWHrHT9+nPj4eDIzM0lJSWHv3r0UFxfz1ltv8dBDD412WMIw5PSZEMB1j7T6Xbj2vgEMGstkisFQfNOIzyEl5CLZOrzLTpurJ4xPQZt9P3v2bGR54OVefPFFtm7dCsDvf/97brzxRoqKipg9ezYXX3xxsMISgkTzM5ZJb/ItOxND8Y0waHcTOXUqcl7JGY4QxougjSP64APvBeIffnhgl4rS0lLKy8uDFYoQAv7GMhF9dnfipKR8zKt/hdZSDsYo5LRp+F34Xhh3xHpEQlAoBRcjWQZtHKCYMEw/h91SjRb3xNS06YgkFD7EFA8hOMzxmFY9jla3A111omTP9R1GIEQskYiE4DFEIeeXhjoKYQwSl2aCIIScSESCIIScSESCIIScSESCIIScSESCIIScSESCIIScSESCIIScSESCIIScSESCIITcuBxZnZaWxhNPPBHqMARhTJo5c+aYX3fJhx6hHn/88VCHMKzxEKOuizhH03iIMRDEpZkgCCEnEpEgCCEnEpEgCCEXsYlo+fLloQ5hWOMhRhBxjqbxEGMgSLqu66EOQhCEyBaxLSJBEMaOcTmO6Fz09/fz7rvvcsMNN/h9vrm5mb/+9a9ERUVx2223YbFYghwhbN++nY0bNzJ79myuuOIKn+cHb14JMGvWrKA25VVVZf369dTX13PttdcyefLkc6oTaCP5LN99910aGhoAiI6O5o477gh2mADs2bMHg8FAUVGR3+eH+06Ei4hoEe3atYsHHniAl19+2e/zdrudSy65BJfLxfHjx4dMVoG0ZcsWvvWtb5GRkcGTTz7JSy+95FPn6NGjvPrqq3R0dNDR0YHNZgtqjA8++CDvvPMOycnJrFq1ivr6+nOqE0gj/Swffvhhz/totVqDGiO4/zC+//77fPvb32bXrl1+64zkOxE2Qj2QKRgef/xx/fbbb9dXr17t9/m3335bX7dunedxUVGRXlVVFazwdF3X9VtvvVV/6623dF3X9bKyMn3JkiU+dd58803917/+dVDjOsVut+sZGRm6zWbTdV3Xf/azn+m//OUvz7pOoI3ks3S5XPpFF10U1LhOV1NToz/44IP6woUL9ZdfftlvnZF8J8JFRLSIHnzwQe6+++4hny8rK/NqGhcWFnLs2LFghOZx8OBBZs2adcbXP3bsGC+88AILFy5k/vz57NixI2jxVVdXk5WVRVRU1JAxjqROoI3ks6ypqaG8vJylS5cybdo0nn766aDGCJCbm8vjjz9+xkvrkXwnwkVY9hH94x//4Le//S3g7gswm81nrK9pmtcutJIUnP2yHnjgAfbt28fSpUvRNM3rOX8xlJaWctVVV1FUVMTnn3/Ot7/9bfbs2ROUWE+P71zrBNpIPkuLxcIf//hHrr76anp7e5k7dy5XXnkl06ZNC2aowxrJdyJchGUiuvjiiz1/FU0m07D18/Ly2Lhxo+dxdXV1UDpZH3jgAfr7+7FYLOzZs4fjx49TWFhIbW2t30mLsiwzZcoUABYvXkxTU1PAYzwlOzuburo6dF1HkiRqamp83qOR1Am0kXyWNpuNOXPmABATE8PMmTNpamoac4lo4sSJw34nwkVYXprFx8dTUFBAQUHBGf+K/Od//icOh4PVq1fz8ccf89lnn/HKK69gsVjIz88PeJzZ2dkUFBSQmZnJrbfeypNPPsnevXv5yU9+wu233w64LzVOdbK/9NJL/OxnP+Po0aP893//N2vXrg14jKfExcVRWlrKY489xvbt23nllVe4/vrrAXjjjTfYtWvXGesEy5k+y1Ofd0VFBTfffDMHDhzgvffe48iRIyxatCiocQ5l8Oc91HciHIVli8ifCRMmDHkHJTk5mfXr1/PMM8+QmJjI+vXrgxwd3HLLLXR1dfHYY4+xbNkyvv/97/vUeeqpp/jNb37Dww8/zIIFC7j33nuDGuPzzz/PI488wu9+9zv+8Ic/MGnSpHOqE0gj+Swvu+wyuru7efLJJ8nIyODjjz8mOjo6qHEOjiUnJ8fvcyP5ToQLMbJaEISQC8tLM0EQxheRiARBCDmRiARBCDmRiARBCDmRiARBCDmRiIRRVVZWxv79+0MdhjDOiEQkjJry8nJWrVrFI488EupQhHFGJCJhxLq7u1m/fj2bN28G3GvprF+/ntbWVtasWcO0adOoq6sLcZTCeBQxI6uF8xcXF8dLL73EZ599xs6dO1m9ejX5+fmsXbuW5557DpvNxtKlS0MdpjAOiUQknJXnnnuOoqIiSktL6e/v56WXXkJRFLKysgBQFCXEEQrjkbg0E87KhAkTuOeee2hvb+eOO+5g6tSpoQ5JCAMiEQlnpampieeffx6LxcJrr71GRUVFqEMSwoBIRMJZ+d73vkdvby+ffvopiqLwjW98Y0wsiCaMb6KPSBixuro6pkyZwssvv8zixYt58cUX2bFjBxUVFRQWFgLw/e9/P+hLfwjjn1gGRBCEkBOXZoIghJxIRIIghJxIRIIghJxIRIIghJxIRIIghNz/D1eY94jas5BbAAAAAElFTkSuQmCC", + "image/svg+xml": [ + "\n", + "\n", + "-1.0-0.50.00.51.0x1-1.0-0.50.00.51.0x201y\n" + ], + "text/plain": [ + "@vlplot(\n", + " mark=\"circle\",\n", + " encoding={\n", + " x={\n", + " field=\"x1\"\n", + " },\n", + " y={\n", + " field=\"x2\"\n", + " },\n", + " color={\n", + " field=\"y\",\n", + " type=\"nominal\"\n", + " }\n", + " },\n", + " data={\n", + " values=...\n", + " }\n", + ")" + ] + }, + "execution_count": 37, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "dfCircles |> @vlplot(:circle, x=:x1, y=:x2, color = :\"y:n\") " + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Sampling from two interleaved half-circles\n", + "\n", + "```@docs\n", + "make_moons\n", + "```" + ] + }, + { + "cell_type": "code", + "execution_count": 38, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "

3 rows × 3 columns

x1x2y
Float64Float64Cat…
11.24069-0.66751
21.241-0.6322591
30.06990981.062160
" + ], + "text/latex": [ + "\\begin{tabular}{r|ccc}\n", + "\t& x1 & x2 & y\\\\\n", + "\t\\hline\n", + "\t& Float64 & Float64 & Cat…\\\\\n", + "\t\\hline\n", + "\t1 & 1.24069 & -0.6675 & 1 \\\\\n", + "\t2 & 1.241 & -0.632259 & 1 \\\\\n", + "\t3 & 0.0699098 & 1.06216 & 0 \\\\\n", + "\\end{tabular}\n" + ], + "text/plain": [ + "3×3 DataFrame\n", + "│ Row │ x1 │ x2 │ y │\n", + "│ │ \u001b[90mFloat64\u001b[39m │ \u001b[90mFloat64\u001b[39m │ \u001b[90mCat…\u001b[39m │\n", + "├─────┼───────────┼───────────┼──────┤\n", + "│ 1 │ 1.24069 │ -0.6675 │ 1 │\n", + "│ 2 │ 1.241 │ -0.632259 │ 1 │\n", + "│ 3 │ 0.0699098 │ 1.06216 │ 0 │" + ] + }, + "execution_count": 38, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "X, y = make_moons(100; noise=0.05)\n", + "dfHalfCircles = DataFrame(X)\n", + "dfHalfCircles.y = y\n", + "first(dfHalfCircles, 3)" + ] + }, + { + "cell_type": "code", + "execution_count": 39, + "metadata": {}, + "outputs": [ + { + "data": { + "application/vnd.vegalite.v4+json": { + "data": { + "values": [ + { + "x1": 1.2406942174734734, + "x2": -0.6675000804338371, + "y": 1 + }, + { + "x1": 1.2410015086743167, + "x2": -0.6322593736369718, + "y": 1 + }, + { + "x1": 0.06990981037877807, + "x2": 1.0621620146552546, + "y": 0 + }, + { + "x1": -0.8594317246594768, + "x2": 0.5050126274320409, + "y": 0 + }, + { + "x1": 0.8913714672722427, + "x2": -0.7230636748651124, + "y": 1 + }, + { + "x1": -0.11665520774672355, + "x2": 0.217942948971188, + "y": 1 + }, + { + "x1": 1.7000198867946492, + "x2": -0.5047520760933171, + "y": 1 + }, + { + "x1": 0.20836468972948344, + "x2": 1.001857305257787, + "y": 0 + }, + { + "x1": -0.7214408823537237, + "x2": 0.5753991624309904, + "y": 0 + }, + { + "x1": 2.005258940360532, + "x2": 0.255014724581191, + "y": 1 + }, + { + "x1": -0.5172104956756103, + "x2": 0.894446068889573, + "y": 0 + }, + { + "x1": 1.2601388729129317, + "x2": -0.7584764057619642, + "y": 1 + }, + { + "x1": 0.756228663752844, + "x2": 0.5706802966510527, + "y": 0 + }, + { + "x1": 0.9379203571124439, + "x2": 0.4398660204799998, + "y": 0 + }, + { + "x1": 0.13500405333614585, + "x2": -0.134146618139435, + "y": 1 + }, + { + "x1": 0.4511498641919731, + "x2": -0.5076768658992339, + "y": 1 + }, + { + "x1": 0.8306170628655811, + "x2": -0.7538665408619786, + "y": 1 + }, + { + "x1": -0.8304235853329202, + "x2": 0.4621579957951099, + "y": 0 + }, + { + "x1": 0.46318715610804073, + "x2": 0.850339835687557, + "y": 0 + }, + { + "x1": -0.1344689233821857, + "x2": 0.9490067076633847, + "y": 0 + }, + { + "x1": 0.031965039830572126, + "x2": -0.2045227706231068, + "y": 1 + }, + { + "x1": 0.3885406964860613, + "x2": 0.9080135079237913, + "y": 0 + }, + { + "x1": -0.974505194344145, + "x2": 0.34656019315759395, + "y": 0 + }, + { + "x1": 0.1438441554968986, + "x2": -0.16236452653746059, + "y": 1 + }, + { + "x1": 0.12813431522308014, + "x2": -0.2690447807905186, + "y": 1 + }, + { + "x1": 1.2938798775353157, + "x2": -0.7035549855936218, + "y": 1 + }, + { + "x1": -0.6302207967178417, + "x2": 0.7824117699501971, + "y": 0 + }, + { + "x1": -0.008704996496725384, + "x2": 0.08940894489853825, + "y": 1 + }, + { + "x1": 1.3169684333413518, + "x2": -0.6572897539840402, + "y": 1 + }, + { + "x1": -0.9332465104265187, + "x2": 0.21874001494465423, + "y": 0 + }, + { + "x1": 0.27542680493350324, + "x2": -0.35149358013403253, + "y": 1 + }, + { + "x1": 0.9190916777834303, + "x2": 0.3811838191561983, + "y": 0 + }, + { + "x1": 0.007820344077088643, + "x2": 0.18752871031031082, + "y": 1 + }, + { + "x1": -0.9461105686507629, + "x2": 0.253107980182658, + "y": 0 + }, + { + "x1": 0.9010389590651727, + "x2": -0.6641776962617305, + "y": 1 + }, + { + "x1": -0.8594234021683481, + "x2": 0.4100651239702702, + "y": 0 + }, + { + "x1": 1.0819189417880468, + "x2": -0.5869965297536287, + "y": 1 + }, + { + "x1": 0.6369940453428288, + "x2": 0.7325765823751375, + "y": 0 + }, + { + "x1": -0.5006208853162406, + "x2": 0.8388452250863161, + "y": 0 + }, + { + "x1": 1.036360940479515, + "x2": -0.7469945691192094, + "y": 1 + }, + { + "x1": 0.7590166631159238, + "x2": -0.5905994964892134, + "y": 1 + }, + { + "x1": -0.07254469302597488, + "x2": 0.03267905546933966, + "y": 1 + }, + { + "x1": 1.9965625649036154, + "x2": 0.11021264190841462, + "y": 1 + }, + { + "x1": 1.9780164324432536, + "x2": 0.09354461214889419, + "y": 1 + }, + { + "x1": -0.9920860606058769, + "x2": 0.2500555193591608, + "y": 0 + }, + { + "x1": 1.8181351466415165, + "x2": -0.05980380929674341, + "y": 1 + }, + { + "x1": 0.2580670518614272, + "x2": 0.9526771397299082, + "y": 0 + }, + { + "x1": 1.6682176632658834, + "x2": -0.34209493011103237, + "y": 1 + }, + { + "x1": 0.04419050184214756, + "x2": 0.0038000959574060033, + "y": 1 + }, + { + "x1": 0.3722842064188983, + "x2": -0.42565442794834263, + "y": 1 + }, + { + "x1": -0.060340789411243406, + "x2": 0.3186224374148551, + "y": 1 + }, + { + "x1": -0.9271489561658299, + "x2": 0.1895608933324988, + "y": 0 + }, + { + "x1": 0.8919818543347324, + "x2": 0.42601455321717685, + "y": 0 + }, + { + "x1": 0.9572769659783547, + "x2": -0.5915801891502274, + "y": 1 + }, + { + "x1": -0.42302485905763504, + "x2": 0.9418416273800865, + "y": 0 + }, + { + "x1": -0.02700944338855181, + "x2": 0.8832396767374034, + "y": 0 + }, + { + "x1": 1.070286925514923, + "x2": 0.23066474523043926, + "y": 0 + }, + { + "x1": 0.4378403909500951, + "x2": 1.0052147297799667, + "y": 0 + }, + { + "x1": 0.8285573744934454, + "x2": -0.7255282602578935, + "y": 1 + }, + { + "x1": 0.02394960823879346, + "x2": -0.07774622722328649, + "y": 1 + }, + { + "x1": 0.774970451725866, + "x2": 0.6662959610909432, + "y": 0 + }, + { + "x1": 1.7633655144424822, + "x2": -0.4743784076687701, + "y": 1 + }, + { + "x1": -1.0123286161196665, + "x2": 0.09722689107239875, + "y": 0 + }, + { + "x1": 1.7929182192544169, + "x2": -0.3070946235199431, + "y": 1 + }, + { + "x1": 1.3672405408248658, + "x2": -0.5946502912623468, + "y": 1 + }, + { + "x1": -0.7374687708135843, + "x2": 0.663319133986389, + "y": 0 + }, + { + "x1": 0.46426958884451336, + "x2": -0.527841524915178, + "y": 1 + }, + { + "x1": -1.0411389013790964, + "x2": 0.08818585090514085, + "y": 0 + }, + { + "x1": -0.8280497824809255, + "x2": 0.46848448553091987, + "y": 0 + }, + { + "x1": 0.00825342417445183, + "x2": 1.0375494136557097, + "y": 0 + }, + { + "x1": -0.8545221210960231, + "x2": 0.6493234608697885, + "y": 0 + }, + { + "x1": -0.42056750697057993, + "x2": 0.8863193492861382, + "y": 0 + }, + { + "x1": 1.9725813454835974, + "x2": -0.025281198422860224, + "y": 1 + }, + { + "x1": -0.32875383551062515, + "x2": 0.8127421841321913, + "y": 0 + }, + { + "x1": 0.5072867395438626, + "x2": 0.816319733468487, + "y": 0 + }, + { + "x1": 0.9660722412765478, + "x2": -0.6999256194148809, + "y": 1 + }, + { + "x1": 0.21419993288074352, + "x2": 1.0227412476417808, + "y": 0 + }, + { + "x1": -0.2523311656461151, + "x2": 0.9976019379177721, + "y": 0 + }, + { + "x1": 1.5319756660203723, + "x2": -0.5310079604750354, + "y": 1 + }, + { + "x1": 0.8885281499040026, + "x2": 0.40257591427890743, + "y": 0 + }, + { + "x1": 0.1962344076598182, + "x2": -0.2998458681983232, + "y": 1 + }, + { + "x1": 0.5159268253607452, + "x2": -0.5351538356318559, + "y": 1 + }, + { + "x1": -0.20958319519246094, + "x2": 0.9895391400564701, + "y": 0 + }, + { + "x1": 1.9167630156114612, + "x2": -0.17201129221609, + "y": 1 + }, + { + "x1": -0.5664099254747955, + "x2": 0.8057659685679476, + "y": 0 + }, + { + "x1": -0.03741146699731375, + "x2": 0.1863830383705885, + "y": 1 + }, + { + "x1": -0.7630995766586226, + "x2": 0.6203680667979695, + "y": 0 + }, + { + "x1": 0.9704886316424353, + "x2": 0.25363412314854367, + "y": 0 + }, + { + "x1": 1.2784450172811785, + "x2": -0.6724363425235614, + "y": 1 + }, + { + "x1": 0.8898907336925174, + "x2": 0.4203863376456201, + "y": 0 + }, + { + "x1": 0.9225483388670838, + "x2": 0.37143481915881243, + "y": 0 + }, + { + "x1": 1.4376438297701843, + "x2": -0.720172316644443, + "y": 1 + }, + { + "x1": -0.4835469689296842, + "x2": 0.7552999338728068, + "y": 0 + }, + { + "x1": -0.06262882308861609, + "x2": 1.048085128668148, + "y": 0 + }, + { + "x1": 1.9434972847835554, + "x2": 0.20209614481597143, + "y": 1 + }, + { + "x1": -0.5395211642571712, + "x2": 0.8879771723238109, + "y": 0 + }, + { + "x1": 0.21271607890479852, + "x2": -0.2687112624602331, + "y": 1 + }, + { + "x1": 1.9307409031697218, + "x2": -0.0533880260750533, + "y": 1 + }, + { + "x1": 1.3096063915759464, + "x2": -0.6007925755906534, + "y": 1 + }, + { + "x1": -0.538846734372177, + "x2": 0.8112859643202828, + "y": 0 + } + ] + }, + "encoding": { + "color": { + "field": "y", + "type": "nominal" + }, + "x": { + "field": "x1", + "type": "quantitative" + }, + "y": { + "field": "x2", + "type": "quantitative" + } + }, + "mark": "circle" + }, + "image/png": "iVBORw0KGgoAAAANSUhEUgAAASIAAADyCAYAAADp98gtAAAABmJLR0QA/wD/AP+gvaeTAAAgAElEQVR4nO3daXgc1Znw/f+pql7U2ndLli1LsrzbeAMDtlkNOAYDMSExMIRJZpIneRIyPBfvNZ5MZjKZF16WyUAWmHDBA5PEQDAhyyR2gAAOmx28YGywLW+SrF3WvrfUS9V5P7QtW+6WV7WqWzq/T9bpUtddbunWqVPn3EdIKSWKoig20uwOQFEURSUiRVFspxKRoii2U4lIURTbGXYHcCE2btzI7Nmz7Q5DUWJWcXGx3SGcl7hMRGVlZaxevTqq56isrIy7DzMSdR2xZTSuo7KyMqrvHw3q1kxRFNupRKQoSkT33nsvCxcuxLIsAG6++WbuuOOOqJxLJSJFUSK68sor2b17Nzt37qS1tZU333yTK664IirnUolIUZSIvvSlL+F0OvnTn/7Em2++iZSSO++8MyrnisvBakVRoi8jI4MbbriBTZs2MXv2bJYvX05hYWFUzqV6RIqiDOuuu+5iz549bNy4kbVr10btPCoRKYoyrNtuuw2Px4PX6+ULX/hC1M6jbs0URRlWYmIiBQUFLFiwgOzs7KidRyUiRVEi6uvrY+rUqRw7doz169dH9VwqEY0DTR19/HlnJc0dfUzISOKmS4vJTvPYHZYS41wuF//5n//JtGnTuPTSS6N6LpWIxpDmjj4O1bbhMHTmleTgcTmwJGzYvJ+uPh8AlQ0d/Oqdfdy/5lI0TdgcsRLLDMPgnnvuGZ1zjcpZlKg7UtfOhr+UcaLg5l/31XHtginsP9zMsXYvCS7H4LFdfT7auvtVr0iJGSoRjREfldVzatXfI3Xt1DZ3YwiTxk4fmSkJZKYkDL6e4FIfvRI71E/jGOHzB0/+O2DS0+/HMDRcDkGqx0V7dz/pyW40IVg4bQJJCU4Aerw+Nn1UztHGTlI8Lq5bOIVZU7LsugxlnBq1eUSvvvoqgUAg4mumabJhwwaeeOIJKioqRiukMaUkP33w36ZlIRAkuUPJJjcjkfysZJbPm8xd18/mlitKB4/d+NdyjtS1EzQt2nv6+f2Hh2jr7h/2PKZl0djWS2vX8McoyvmKeo+ooaGBN954g/vvv5/Vq1fjcDjCjlm3bh2NjY2sWLGClStX8t577zFx4sRohxY3pJQcrGmjs3eAguwUJuWkhB1z1SWTsSzJgZpWslM9aJqG26Hj9foBKMpL4/qFUyK8byvH2nsJBC3cToPsNA+1zd1DbuNO6Orz8dJbewcT1dSJ6XzputnoatB7TJNS8lllMx8faqSpvQ8I/XFbPD2PecU5CHHxn3/UE9GePXs4cuQIuq5HfN3v9/PSSy9RVVWF2+2mtraWDRs28OCDD0Y7tLgggVc276e8vgNLSrp6fUzKSeaymRO5bEY+Tkfo/9XQNVYsLmLF4iIAapq7eWtnBTUNA5RMTOdzl5WEvbc/aNLQ1sOA3wQg0O8nYFokusP/WAC8u7tqSG+pvL6D3UeOsXh63ghftRIrJPD7Dw+x72jLkPb6lh7qW3qoqO/g81fNIFIqsiyL73znO2zevBmPx8Ozzz7L4sWLI54n6olo1apVrFq1ihdffDHi69XV1eTl5eF2uwEoLS3lgw8+iHZYcaO+pZvy+g4AGtt66RsI0Nbtpbc/QEVDB1++cW7Ev0iTc1L4+5sXnLEiYGNrL2lJCRxr7x1s0wTkZSRFPL6l0xuhre9CLkuJE3uOHAtLQqfad7SForw0FpROCHtt48aNVFVVUVZWxieffMI3v/lNduzYEfF9bB+sPlF0aThbtmxh69atYe2jUQ4zFkpu1rT04fV6CVqS7r4BACygt7ePA5Ve9ux3keqJ3IM5YbjraOvxYRAkN8XJQMBC1wWJLp3GhtqIt1u6OYDXOzQZWQM9o/b/FAufx0iIp+vYfaTpnI6JlIjeeustvvjFLyKEYNGiRdTX19Pb20tSUvgfOtsTUX5+PnV1dUgpEUJQU1NDScnJ24hly5axbNmyId/z+OOPj0rd31iokZybH2BHZS89/X50PTQpMcFpkJSUCEBBwaQzzgc603UUAUdaLMrr20k93nbF7AJKpxZFPD57QgGvbN7HsePjBLOmZPG55TNGZWJkrHweFyvealY3d5y9x9vc0RuxvaWlhaysk09g09PT6ejoiK1EtGHDBkpLS1m0aBHLly/n0UcfZcWKFaxfv55NmzbZFVbMSXQ7+OK1s3j74wpaOr1IKclNDyWh7DQP+6ta8A4EKMpLY2bh+T12F8Da62axv6qFlg4vVU1d7D5yjL2VzSyZNZGlcwqGHJ/scfK11Qvp6O7HYWgke1wjdZlKzDqHHemHGaxOSUmhpeXkbV1PTw9paWkRjx21x/ff+ta3Ij4xA3j++edpbW3l6aef5plnnqGoKPJf5LEoaFq8tbOSp363k+c2fhLxfnzKhFS+dstCnvzWCu64agZTCzJYNC2Pfl+QDz6t4eNDjbz23gE+/KzmvM+vaYK5xTkgoK6lmwF/kN5+P5t3HeVAdWvY8QLISElQSWicyEmPPF54quF65JdffjkfffQRABUVFWRmZpKcnBzx2FHrEf3zP//zkK9PLbKUkZHBk08+OVqhxJT39lSzrax+8Ovff3CQ1ERXxEf0HpeDGy8Ndev3HW1h1+HGIa9/fLCR5fMmX1AcVce6IrR1nncvSxlbFpTmUtfSfcZjFkYYHwK4++67eeGFF1i9ejUVFRU89dRTw76H7WNE413F8SdiJ0hCyzMiJaJTBc3wQf5AhLZzlZgQ3ls9MftaGb/ml06guqmLzyqaI74+rziH+cMkIo/Hw9atWzl8+DD5+fmkpAz/M60Skc3cEdZ8eYaZx3Oq4rw0XA4DX+Dk0o6LWZqxfO5kjjZ04g+G5hSlJblZOC00P8iyJGXVrbR1ecnPSqa0IOOCz6PEFwHctnQaxfnpfHKocfBBxYSMRBZOz2NuUXbEOUQnaJrGjBkzznoelYhs4PUFCAQtUjxOLp81kZqm7sEFq8kJTuYV55z1PVISXdx741w+3FtDb7+f4rx0ls2ddMEx5Wcl8a3PL+JwXTsOQ2fG5CychoaUklffLeNIXfvgsZfPmjh4i6iMfUII5hXnnNPP5YVSiWgUdXl9/OL1z6hq6iQ5wcmknBTWXj+bv/3cXLbtr8dhaFy/sOicekQQSh5funZWxNeCpoWhn9+ziGSPi0XThs6Sbun0DklCANsPNHDNgkKcRuTZ8opyvlQiGiWtXV6e+PV2aptDA39tXV5My+JPHx3hWFsf3d7QHKG65h6+umr+OSejSOf5w5bDNLT1kuR2MLcggYuZttJ/yqr+E6SU+PymSkTKiFGJaJTsONBAb79/8GtLQkePjz3lzXhOGSdq7xngo7L6sAWq5+r3Hx6isS00wayn38+7e7tYPG8aaUnuC3q/vIwkEt0O+gZOVk6YkJFIskcNZI8bUmJVbyFYsRnZGZoiItImY5Rcj1a4bNh5ROdDbSc0Sgb8wbAehGVJjAizkjt7Bi7oHIGgNTiYeIJpSRraIs98PRdOh87dK+YwJS+NZI+LmYVZfOm62Rf8fkq8kQS2/4zAjueQbRVgBsAMINsqCOx4jsD2n3G2SY+/+c1v+MEPfnDGY1SPaJRMnZjO3soWevv9g72LnLQELp9TwMcHh84HKsiJPOnrbByGhtup0+8bejuVcpG9l7zMJL5849zBr+taunnn46P4gybTJ2WyYNqEMz45UeKXWfk+Vs22YV+3arZh5sxGL74m7DWfz8dNN93E9u3b+c53vnPG86hENErmleTSOxDgk8PH6Ojux+0ymFeSyyUlOfQPBCmrbkWI0LyMS6fnX/B5blhczKaPjmBZob9SU/OSKcg+85yk89Hc0cf6P+8dnMd0pK4dX8DkitmqftRYZB19/5yOiZSInE4nv//97/nlL39JY2Nj2OunUoloFF05u4CJWcms//Ne+n1BtpfVs+tQI19dNZ/blk1DCHHRRcbmT81lck4KdS3dpCW5Cfa1jVD0IQdr2sImU+6tbFKJaIySXbVnPcYa5hghBOnp6Xg8Z9+kQY0RjbJdhxqHFLkPmhafHG7E0LURq3SYkZLAvJJcJuemnv3g83T6SvsBf5CuXh+dvRc2rqXEtnNY8gojcGOuekSjxOsL8OGn1Xy4t5Yer5/kBCdpSS6EEGFLM5o7+tj0UTnH2nvJTE1g5aUlFE4Y+aRyIWZNyWbLZzX4gxZNHX109fnwBUz+6/cfc+vSaaEFtMqYoaVOwmorP+MxIvXie8OqRzQKjtS188hLW3nx7f00tvXS3eejpctL9/FND2dMzhw8VkrJr989QF1LN0HToqm9j1+/d2DIUg47ZSS7+ftbFlBakIGUkvzMJFITXZiW5M87Kof09pT4pxVfc9Zj9OJrL/o8qkc0Ct7YXkHH8Ufyhq4RlBYOXUM3ND6/fDrTJ51MRN1eP+09Q3fI6PcFONbWFzO9oqxUD/On5obNuD6xdOVEHW0l/ulFVyGbD2BWh1dJBdAKl6IXXXXG9/j6179+1vOoRBQljW29vLmjgqb2PiobO9C1k51Pw9DITE3gshkTw25lPC4HmiYGn3qdEGsTCPOzktE1DfOUUr+56YkqCY05AuOy/4U2YS5mxV+QndWh1rRC9JLr0CZfiRojilFB02LD5v30HJ9JbVng8wfQRGhGtQASnI6Iu184DI3lcyfx/qcni5wtKJ1ARoTtfeyUmuhi9ZWl/HlnBf2+ILnpiXx++XS7w1KiQQi0wqVohUujdgqViKKgtcs7mIQgtAdUY2sP2RlJ+P0mM6dkcfOSEhCCtu7+sD3Erp5fSMnEdBraeslO9VCUF7m8pt3mleQwtzgbf9DE5VA/SsqFUz89UXD6vmAuh07hhFS+cdtCMlM8dPYO8Kt39g8+8p5VmMUdV88Ysi1QQXbKiE5EjBYhhEpCykVTT82iINnjYuG0oVXrlswqIDc9CUPXePvjo0Pm3ZRVt7K/Krw+tKKMF+pPWZTcckUpMydn0dLlJTc9ccjtVWtXpI0Kw9vilT9g8vr2Cg7XtuE0dK6cW8BlMy582Yoy9qlEFEUlE9MpmZge1p6bnjT4OP+ECRmJoxVW1P3lkyo+qwhtzDfgD/Lm9gqyUhIozg//v1AUULdmtrjx0qLBLViEECycNmHIpMZ4d/RYZ3hbY3ibopygekQ2SEty841bF9LRM4DbaVxwNcZYleh2cPrubEkxNg9KiS2qR2QTIQQZKQljLgkBXHVJ4ZB62RnJbi4pybUxIiXWqR6RMuKmTEjlm7ctoqKhA6dDZ+bkLByG+punDE8lohEiCZV+HalSHvEuPdkdceY4QG+/n/qWHjxuBwXZyUPmTynjk0pEI+Dd3dVsK6vDtCTTCjK5bVmpmuQ3jMrGTn79l7LBjRxLCzJYe90slYzGOdVfPg89/X427zrKH7YcZm9lM1JKyqpa+PCzGgJBC8uSHKxp5d1Pqu0ONWa9/XHlYBKCUImUw3UdZ/gOZTxQf7bPUb8vwPOb9tBzfP+xTyuaaO/uxxcww46tae4a7fDiRlevL6yts7c/wpHKeKJ6ROfoSF3HYBI6YdfhY6QkusKOTY3QpoScvn5OCMHErAvbtUQZO1QiOkdWhMqD5vGaQZ29PhrbehnwB3E7Da6eXzja4cWNm6+YysTsUOJxGho3LC6Ki8W9SnSpW7NzVJKfhttpMHDKFswJboO3dlaSmujCYWhYEu66fjYTMpJsjDS2pSa6+LtV8/EFQhtOqkFqBVSP6Jwle1x8+aa5zCnKpjA3lesWTsFxfNKeEKHZxB6XoZYynCOXw1BJSBmkekTnYUJGEmuumjH49cGa8NIduq5yu6KcL/VbcxEWTD1Zc0jK0JhRVmqC2slCUc6T6hFdhEXT8/C4HXxW0czuI40Yhs6rfyljcm4q96yYo5Y1nMayJNsO1HOkth23y+DK2QVMylED1YrqEV20mYVZJLgMPG4nTiO0g0VNUxc7DzXYHFns+eCzGt75+CjVTV0cqmnjxbf20tql5hApKhGNiPbu8F+m1jFUcXGklFUNLQ4SNC0O17bZFI0SS1QiGgG5EaorjqWKiyMl0kC+rqsnZ4pKRCPimvlThkzKm1mYxaLpqkbz6RaWDl2Nn+ByMKswy6ZolFiiBqtHQILL4KurLqGrz4cmRMztyhorLp2RR0qii0M1rXjcDi6dkU+yRy2HUUYhEZmmyWuvvUZ9fT233347JSUlYceUlZWxZcuWwa9Xr15NXl7kWjZ2klLS3jOAlJLMVE/YRrtqjdnZTZ+UwfRJGXaHocSYqCeidevW0djYyIoVK1i5ciXvvfceEydOHHLMK6+8QltbG4WFoTVawWAw0lvZyh+0+NU7+6hpCq2sL8hO4e4Vs3E7VadSUS5WVH+L/H4/L730ElVVVbjdbmpra9mwYQMPPvjgkOPKy8v54Q9/SEFBQTTDuSjbyuoGkxBAXUs3H35Www2Li22MSlHGhqgOVldXV5OXl4fb7QagtLSU8vLysOMqKiq45557mDdvHmvWrKG3tzeaYV2Q5o6+sLaWTjUHRlFGQlR7RJZlndNxDzzwALfeeitJSUncf//9/OQnP+F73/seAFu2bGHr1q1h31NZWTmisUYy5ByBPrxeL0iJ12/iC1gkOiyOlFfEfJ3q0fi/Gg3qOsauqCai/Px86urqkFIihKCmpiZssNrr9VJaWkpSUqh0xpIlS9i+ffvg68uWLWPZsmVDvufxxx+nuDi6t0SVlZVDzlEwuRCvuZ+/7q+jtXMAIeBgfS+JZZ38r9ULY3Yl+enXEa/UdZzfOeJNVG/NkpOTWb58OY8++ig7duxg/fr13HHHHQBs2LCBXbt2YRgGa9eu5Y033mDfvn089dRTrFmzJpphXRCnofPFa2fR7zNxOXRcToN+f5AdBxpoaI29W0lFiSdRn9D4/PPP09raytNPP80zzzxDUVHRkNedTievv/46r7/+Oj/96U956KGHuPbaa6Md1gVpbOvFtCzEKbdi/qBFS5dazjGSyqpb+c37B/jDlsMqyY8TUX/2nJGRwZNPPhnWvnbt2sF/T58+naeeeiraoVy09GQ3HqeB95QqjQ5dY8qEVBujGju6vT4+/LSWj8rqBhcQ769q4e9Wzbc5MiXa1CSY85CW5Ob6RUW8/2kNPn8Ah0PnmvlTSEty2x1a3NtRVs/L7+ynqbMXaQmy0xLISU8kaFrsO9pMUbrdESrRpBLReahs7ETXBHNLcshKSWBB6QSmTlS/IRfLkpLnX/8Ury9AMCgxLZNj7X2kJblwqo0qxwX1KZ+jo42dvPzWXk7UXmzp6GNeSY6tMY0V9a299Hr9+IMmQdNCIsGy8PpNElwOZhfl4O1ssjtMJYrU6vtztLeymdMLwO6tbIl4rHJ+DB38wWDoQYAAgUBKmFeUw30r56mSKuOASkQXITZnDsWfgYEgKYkuNBFKQBKJw6ExozBL7Xk2TqhEdI7mleQOSTxCCHVrNkKSE11Mzk0jMzUBXdPQNIFlwa827+NQjargOB6oMaJzNGVCKvfeOJc9FU1YluSSklxK1ED1iMhK9TCvOJuapi4sGVoWZBiC9u5+3thezvTJmTZHqESbSkQRSClpaPPiFS1Mzk0h5Xjxril5aUzJS7M5urHp88un88mRY3gH/GjHe0UAbRHqgStjj0pEp5FS8srmMj47Uo/H04Gha3zh6plMU8W8okoIwcLSXKqPdQ0+FNAEFOSoyaLjgRojOk15fQfl9e2DXwdNi3d2HbUxovHj+kXFTJuUSUaym8zkBKbkpnLt/EK7w1JGgeoRnaarzxfW1h2hLRZZnbVYtR8hhEArXIZIjr1yu2eSmujigS9cyqcVzfj8QWYWZpGflWx3WMooUInoNAXZKWGP5QviYDdSq62c4Lv/H9I6vg7u4Bs4r/++vUFdgGSPi2VzJ9kdhjLK1K3ZaSZkJLLq8qk4DQ0hBIUTUrnliql2h3VWZvk7J5MQgBXArHjXvoAU5TyoHlEEi6bnke7oZ3LhFIwImwLGImH6w9pkUD1xUuJDfPyW2SRekhCAyF8Q1qZNXGRDJIpy/lSPaIzQpyyHoA+z6kOE0NCKrkaftATisGyoMv6oRHQWliUHJ9fFOn3qCvSpK+wOY0QN+INjZu842d+B3tcA1iTQHHaHE1PGxic8gnr6/WzdW8uBijq8WxsJWpKUBAcrFhcxt1itLRstVY2d/PGvR+jsHSAl0cWCyQnEc+384O71mOXvkNLXh7+6AMfS/4NIn2J3WDEjfgZBRkHQtPjlm5+x40ADn1V1cqCmlWNtPfT0+/nD1sNqucEoMS3Ja+8foLN3AAjN49r86TH8wXPbnirWWE37MY+8DTI0Z1x62wl88gt7g4oxZ+0R9fb20t3dTX5+/mDb/v37SU9PH9I2FtS19NB+PNn4AiaartPr9WNlAJakrqWbzJQEe4McBzp6+un3Dd123B+0qDnWxeG6Npo6+shJ83DV/EKSE5w2RXnuZHd9eFtXeNt4dsYe0aOPPkpaWhoTJ07kxhtvHNyB9Y477uC5554blQBH06lbk+maAAl+06K1s4+efj9J7hi8rw94McvfwTywEdlVa3c0IyLZ4wwfl5OSt3ZV8vGhRmqbu9l1+Bgvv70PKU8vVxd7tNTwrdQjtY1nwyaiyspK/uVf/oX77ruPf/u3f2Pbtm3cddddcfHBX6iJWclkpYZ6PGmJTgb8QSxL0tnno7vPx9FjXTZHOJT09eB787sEP/klwb2/xv/Wv2A17LY7rIvmchhct3DK4Ax3IQSzCtNo7Ry6bVNzRx+tXbF/uyxyZqFPu4kTpfSEJxNj0d/aGlOsGfbW7ODBg0gpeeaZZ3A6nVx++eXcfPPNPPHEE6MZ36gydI0v3zSPj/bXsetAEKkbuAwdh6GR4DT4+GAD1y8qipnKjFb1Vug/uUAXaRE8sBFnhDlF8ebK2QVMn5Q5eBvW2FBHVWt43Wpdj5VPI5xVvZXggY0Q7EfLW4Dz5idorDhM+pwloKnnRKca9n/jxLa4d911F9/97ndZuXIljzzyCP/4j/+IwxGDtygjJCnByQ2Li9ECvfgOddLVN0B3XwDvQICsVA+WJWNmr3vp7wtv9Pcim/YhgwMQ9OGuO4xMNRGZpaMf4EXKTEkYHJPrbjOYOjFjSGWEorw00mN0KyfZdoTA9mfheFETs2IzaDpW6lKVhCIY9n9kxowZ/PjHP+ahhx5i6dKlLF68mHXr1iGl5JFHHhnNGG2Rl55Ae8+xIU/KDF2LqUSk583DLPsDJ37YkRZW+1H87z+ObK9E+vvwJBbgP/YX9Dl3Ysy61dZ4L9YXr53Jx4caOdbeR266h8XT8xEiNj6L01nH9sFp2y1Yx/ZC6lJ7AopxZ0zN3/nOd7jrrrtITj5ZiuGf/umfeOCBBygrK4t6cHZyOnRSE134/EECpoXH5SDF46K2uYvi/NgoESsySzGWfAPr0CZkoB90F3TVIQNe5EBoPEvzNkNyOuaB/8GYcTNous1RXzhD17h81kS7wzg3zgg7jziTRj+OOHHWeUSbN2/m2muv5eDBgwAcPXqUVatW8cc//jHqwdlJSonLYZCflUxhbirZaR40TWBasTVYrxdeiePGR3De/CO0CXNDj/5OWYUvrEDoH2YgdLumjAqt8ErwZJ3SoGPMXG1fQDHurDer06dPp6amhkWLFvE3f/M3vPzyywgh+Id/+IfRiM82uiaYPjmDsqrWwbYUj4vJubFbm0jLnIoJCIcHqelgmUhHaIxFyyhGRPorrUSFcCbhvOkRrNqPkL4+9IkLESkT1dq/YZw1ES1YsIC3336b5cuX89xzz+HxeNi8eTNLliwZjfhsdevSaWSneqht6SEtycXSOZNwxfAWyFrBZegzbsE68mdEegmYPoJaMgkT5mEsvM/u8MYd4UhAL77O7jDiwll/qz788ENuuukmnE4n69at44UXXuC6667jxRdfZM2aNaMRo22chs7V8VQzWQiMeV+COV9ASguhO2iorCQjnhdpKePCWceI6uvrueqqq9i7dy+PPfYY+/fvZ+XKlXz22WejEZ9yITQdoY/dKRbK2HPWHtGqVatYu3bt4Nc5OTn89re/5dixY1ENTFHGDolZ8S5W4x4Su71YaXejZahe6qnOmohSUiIPzk6YMGHEg7GblJLy+g46egdgwIf6UVFGQvDAJoKfvQqAs7+fwLtHcd74cNztshJNsTvyOspaOr384s3PaGjtITnBSSDgw3KmxM+8lVNIM4BVsRnZUUVCv44smIBweuwOa1ySgX6C259BdtaCEOiOFEjwYNXvQp9xi93hxQyViICa5m7+78ZPBhe1tnf3k53i4L3dVSyZGbuzd4cT3PYzrPqPAXB7vQSD9Tiu+9e4nswYr8z9v4W+VkCClGj9bciBtKGlHhRVGA1gR1n9kKJbppT09AfxBy18AdPGyC6Av3cwCZ1gtVcgu+tsCmh8k20V4Dltu3LThzbxUnsCilGqR8TJusiagBMTpy1LkpeZFH/1kq3IiVOagZipGjCueDIRidmhha4DXVhBE2PBvYgkVXb4VKpHBJROykDXBPmZyTgNHU3A5GwPd1w1w+7Qzp87FS1n5pAmkZyHlh5H86HGEGP2GoQrBZGQjkifgj9nIfqcO+0OK+bE2Z/76Lhs5kR8fpN9R5uZkpfGpcc3WMyI07KwxpXfCVVsbK/E53OSdtXfqV0jbCJS8nF87ofIlgNgOOnudZPliM+fq2hSiYhQ3byrLpnMVZdMHmyrjLM1QVbjZwQ/fRnZ14KWNR1j0d8iLrkLb2Ul4vQxCmVUCacHcWKzy774+rkaLerWbAyQ/Z0EP/opsrsBzABW0z6C235md1iKcs6i3iMyTZPXXnuN+vp6br/9dkpKSi7oGGV4sqMSGfQNabM6joZqFAHSCkLQp1bfKzEr6j2idevWsXHjRjIyMli5ciX19eHbqJzLMcoZuCLMftddCAcEIkIAABYhSURBVIebhOq38f/u6/j/5xsE/vL/Ir3t4ccqis2i2iPy+/289NJLVFVV4Xa7qa2tZcOGDTz44IPndYxyZlpGCVr+QqyGT0INQmDMuQOraT/u6rfAE5pVbbUeIbj7RRxLx3YtKSX+RDURVVdXk5eXh9sdKnBeWlrKBx98cN7HKGchBI6lD2A1fnp8sLoUkT4F88DGsEOt9nIbAhzfpBmA3ib10OAMopqILOvsWwSf7ZgtW7awdevWsPbReKoVb0/OIAX0FOiwoKMSZ1eARMDrPbkfWNDIoSfurisk/j4PcHSW4znwMlqgFyl03MWriL+riL6oJqL8/Hzq6uqQUiKEoKamJmwg+mzHLFu2jGXLlg35nscff3xwu6NoqaysjPo5ok1OmUzLsZ2k+kPLO4QzEWP5N8nOjL/ritfPw7/pp0iHBY7Q7bGo2ETe/JWI5OhVr4jHhB3VRJScnMzy5ct59NFHWbFiBevXr2fTpk0AbNiwgdLSUhYtWjTsMcrFEZpB79y/JytNgq8XkTUVoXaSGDVW62GstnIwXAjHieoHEtldF9VEFI+i/tTs+eefp7W1laeffppnnnmGoqKiCzpGuUBCoGVORcufr5JQlJjVWwls+RGBj/4LqyW02415YCOBdx9GdtcjWw6F5ngBCBEqoq8MEfV5RBkZGTz55JNh7adWfRzuGEWJddbR9wnufP7k1/U7cV69juC+34CUiLTJyI4qZF8zInkC/YU3kq4KooVRSzzGMKt6Kyk7X8S/34GWNx9j/j1gxOYWzfHKrP7r0AbLJFj5HsjQQxjhSkHkzoaAD2PZ/8OAL3X0g4wDaonHGCXbKghsfxa9vwXZ34lZ+R7BT1+xO6yxR4T/Cgl3GsJ9SsIROiRmoefODDtWCVGJaIyymvdz+t7rZqPaeWWkaVOWn9bgQCu8EuPKfxisSS2ScnBceb/qjZ6BujUbq1zhA9PCHbu71MYrvfBKhMONWfMRwnCjFV+Llhaq/eT83H+AGQC1tdNZqUQ0RmkFlyMOvQHe43NKNB1j5q32BjVGafkL0fIXRn5RJaFzohLRGCWcHhw3PEzfjt+TkpmCljcfkZIPgPT3ItsrQ9Uc01TlRsV+KhGNYcJw4Z9wGfopM5KtloMEtvwIAqFlH/qkJRhXfAtURWvFRmqwepwx9/xqMAkBmLXbsRo+tTEiRVE9onHH6m1CSgv6O0CaCFcqsrfJ7rCUcU4lonFGpBZgHfgDBAYAkKIRTN9Zvks5X1ZnNVbDpwhHAlrhlao65lmoW7NxRsudiTgxHiQ0RFIuVsNue4MaY6yGTwi8/X3Mfa8R3L0e/5//GenrtjusmKZ6ROOMEA5E9gyEGQht+ieE+iUZYebBTYNLPADob8eq/iv6tJX2BRXjVI9ojBOBvtCK8P5QrWqROxsQofktx/df13Ln2Bjh2CP9feGNkdqUQapHNIaZtdtJ2/FjAi4DhIYxew36rNswLvs65oE/IgNe9PwFGJfcZXeoY4o24RLME2U/ABCICfNsiyceqEQ0ZknMXT8H0w8YIC2C+3+LNvly9CnL0KcsO+s7KBfGmHsnSBOz/hOE04Mx4xa0rFK7w4ppKhGNVQPd4bcIUiJ7mxBJufbENF7oDowF92IsuNfuSOKGGiMaq1wpiIS0oW2ajkgpsCceRTkDlYjGKiEwLvsGljM59LXuxFh4n9rSRolJ6tZsDNNyZ9O15Htk5CQhPJmgO+0OSVEiUolorBP6YIEuANnTSHD/76C3BZFZjD71RszDb2C1HEIk5aDPuUOtyFdGnUpE40nQh/+9xwbnFNFegbnvd+AK3b7J7npkWwWOzz2udvxQRpUaIxpHrPbKk0kIQFpY7RVgmSebfN3INrUttTK6VI9oPDm9WqAQSDOA7KwCzUAk5oAjQdVWVkadSkTjiEifgpZREuoFAVZPM8JwIwdCa83kQBdG6Y1omVPtDDNuyJ5jBD9+AavtCMKThXHJ3WgThykZq5yRujUbR4Rm4LjmuxiX3IVech1aygS03DloGUWIpBxEch6icFloMaxyVoHtz4TW8VkmsreJwLankd72s3+jEkb9xI03hgt9+iogtDe77KoDdxrCHZr8KAyXndHFDRn0ITuODm00A8j2CjVX6wKoHtE4pp+2J5dwJKAXLLYpmhhmmVg12wiW/Q9W014AhO5EODzhxyakj3JwY4PqEY1j+rTPgTsVq2E3wpmIXnoTVk8T1pF3EO5ktKKr1WN8KQlseRLrWGhzShPQZ9yCMe9L6HO/SHDXLzixkaVecClaRoltocYzlYjGMyHQC5eiFy4FwCzfTPCTX5x8ufwdHDc8PK7LnMrO6sEkdIJ1+E3k7DWhcbasaaHB6sRstJzZgzWelPOjEpEyyDzy5yFfy75WrPqP0Yuutiki+0UqciatIAT7QXcgUgvQU9VC4oulxoiUQTLYH954vMj+eKVlFCGOzzw/2VaMcKntu0eSSkTKIP30bZM1B1reOK8s6PBgLH8QLXsGIiENbeJijCvutzuqMUfdmimD9EvuBsON1fgpwp2CPvPWIQtmxystowTt2u/ZHcaYphKRMkgYrlD9alXDWhll6tZMOTvLRPa1gBmwOxJljFI9IuWMrJaDBLb9F/R3IgwX+iV3o5dcZ3dYyhijekTKGQW2PQP9nUBoWUNw93pkX6vNUSljjUpEyvD8vUPrF0HoNq2r1p54lDFLJSJleI5EOH2JhxCI5An2xKOMWSoRKcMTAsfir5yyIl9gzLxNPdI/zuqsDpUBMf12hxL31GC1ckZawWU4smdCVy0k5iASs+wOyXbSChLc+iOsxtAaNJGQjuPqdYiUiTZHFr+inohM0+S1116jvr6e22+/nZKS8NXJZWVlbNmyZfDr1atXk5en/urGCuFKhpxZdocRM2TNR4NJCED2dxD8bAOOZQ/aGFV8i/qt2bp169i4cSMZGRmsXLmS+vr6sGNeeeUV9uzZQ0dHBx0dHQSDwWiHpSgXQCI7a7Ca94e/0nPMhnjGjqj2iPx+Py+99BJVVVW43W5qa2vZsGEDDz449C9HeXk5P/zhDykoUKuYY53saST4yXpkRxUk52EsuGdc1OCR/j6CH/xHaCeUgU5kbzMiswSEDoBIm2JvgHEuqj2i6upq8vLycLtDu0KUlpZSXh6+VU1FRQX33HMP8+bNY82aNfT29kYzLOVCSUlgy4+wmvYh/b3ItiMEPnwSgj67I4s68+CmUBICcKWBkYDsaQZCq/ENtSzmoox4j6izs5M777wTgNtuu+2cvueBBx7g1ltvJSkpifvvv5+f/OQnfO97oUWGW7ZsYevWrWHfU1lZOXJBD2M0zjEaRuo6NF8nqU0VCF83en8LAhPLSKJ1+28IZMzGinJpjKh+HpaJs3kXRnctlicLX94VyFO26E6q3o/D6z15vDOTYPp0+qbdieVKhWOdQOc5nWqs/FyNpBFPRMnJyTz77LMAJCQk8O///u9IKRFCUFNTEzZY7fV6KS0tJSkpNF9lyZIlbN++ffD1ZcuWsWzZsiHf8/jjj1NcXDzSoQ9RWVkZ9XOMhhG9jqCPgU8EsusYIEFaaN5GMvf+DJFZgl50Dcair0SlSmG0P4/gzv+LWfdB6Itu0PzVOK77V9BCt16m9xKC/UOL5esli8ieueC8zjMaP1fxmOhG/NZM13WKi4spLi4mLy+P5cuX8+ijj7Jjxw7Wr1/PHXfcAcCGDRvYtWsXhmGwdu1a3njjDfbt28dTTz3FmjVrRjosZSQYLrSsUpChGs0EBxBCRwb6QUrMynexGnfbG+OFMP2YVVuGNFntFVin7NKhz7gF7ZQnh1rubIwZt4xaiGNd1B/fP//88zz88MM8/fTTPPPMMxQVFQ153el08vrrr/P000/j8/l46KGHuPbaa6MdlnKB9Gkrkc0HkIF+ZPfxJ6Cn7IMmO2vh9AJrsc4KcqIA/hCnTlQ0XDiu+S54j6+z86j5VCMp6okoIyODJ598Mqx97dq1g/+ePn06Tz31VLRDUUaAPmkJZtY06GlE+nrA34NIyh18XaTG4aQ+hwctZxZW08nH8sKTiYj0NFAloKhQM6uV82O4cdzwEFbNR6ElDlVbQ4XkCe2TpuUvGqVAJDByY1HG5d/C3P87rPZKRFIuxuw1arPJUaQSkXLehOFCL74GHWD+3ciuenAmndPyD6u5DKtmG2gaetE1iPQpEY+T7ZVYDbvASEA7sRGklAT3/xar/G2kGQgtP1n8VTjl6daZSG8b0tuGSClAOIdujihcyRgL7zun91FGnkpEysXRHMMmk9NZjXsIbHlycLDbqnwf47p/RcsY+hTJqt9FYOtPODFuIw69jjb961i12zHL/nDyuOqtmJ4M9LlfjHxC049Z/g6yux6rqw6rrRIhQDg9GEv+N1reJed9uUp0qESkjBqz8r2TT9wILR61qreEJSLz8JucOngsfT04m3ZhJevh79lcRngrocmXHz6B1VwGwQGs5gPgSkVkFiP9XoIfv4Bz9U9H5sKUi6bKgCijxzLPqU0Gw/dSE8F+hDt8X3mRkBHxVLL3WCgJnfp+vi4wQ+sYZX8HRDiPYg+ViJRRo02+/LQWgT5pSfhx+adPEhT4M2ahlVyHSMo52er0YMy6feihA12YR94mWL558PG7MBIYHNiWocQnknLBcF/E1SgjSd2aKaNGL1wKgFX9V9B09OLrEBHKi+gzb0VIidWwC6k7MaavwvRnIVzJOG96FLNhN1hBtAlzh+y4KnubCbzzb0h/L0iJ1VGFlloADg8iNR850A26CxLSMC792qhdt3J2KhEpo0ovXDqYkIYjNAN9zh3oc+442Xhi2YLujNiLAkID0/7jC6aFQEsvAkdCaNvokuvRp30OsBCJ2UMmYSr2U5+GEjssE9lRiTSDaFlTQXOc17cPJqETNB38XqzOWuhrQaTko5feOIIBKyNFJSIlNgS8BN59BKuzGgiN4Tiu/R4iIXyAejh67hzMvb9GetvAtED6wZ2O0KYgggMEd7+IlpKPyJ0TratQLpAarFZignn4z4NJCED2NmHu/5/zeg9pBUBoSGkh/V1IXy94W5EtBwZ3qTWPP0lTYotKREpMkL1N4W194W1nfI/mMkRSLsKVjHAmge4ILWg1A8jji1WFO21E4lVGlkpESkwIm50d9CG76glseQLz8JsIGWEO0umOP0ETVmiukNAdSClDj+zNICI5D61w2ZneQbGJSkRKTNCmrkA/Mc/ICiK9Lcj+dqyGPQT3vExC+R/O/AaAXnojwpWMdCWH3iMwgDDcYJmI9Ck4b3g4bI2ZEhvUYLViL9OP9LYjErPQp9+MllmK1deEefitIYe5WnaHloecofqjSMzG+bn/IFi7jcDmh2CgCyk0tMRskCayvXxw3pJZux2rbifCcKNNvT70qF+xjUpEim2so+8T3P0iMuhDDvSABsKZDP0dSH8vInXS4LHyXEt+OJMwSq7H2v0y0hq6LZXV14p+4rw7nz/ZXvNXjOv/HS1tEoo91K2ZYgvZ30lw1y+QQR9IieyohPbQUzPpSsYa6BqyO4g/d9F5lB8ScHoPR2homVMBMKs+HBqLGcCq3Xahl6KMANUjUmwhu+tDPZagLzQR0QoihYUw/QjdiZE1DTFxEcJwI9KL4Mh2fL/5KsKRgD7zVvRpK8/4/o7LvkZw+7NY7RUIZxLGJWsRKfnDHh+Fev/KeVCJSLGFSJ6A7GsJ1b2WEhnwhioinphNnZCG44pvg8NDcMdzuNoPgMeD9PUQ3PMyWtrkiOvUAMwjb2EeDe3Ioc9bizF91ZBMo02+Eqvl0Mlv0Ay0gsjLRpTRoRKRYgvhTEIIcXzsR4IjEak5kNJCS52EseDL4Ag94bJaD4V9v9lyECNCIjKrtxLc/eLJrzurEe5U9CknH9vrJdeB4T4+WO1En3ojIm3yyF+kcs5UIlJsIfvbwZOF5kpFmj40ww2ageP676Nllg45NtIkxOGWflgNeyK0fTIkEQHohVeiF155EVegjCQ1WK3YIzEbjs9+Fs6k0Gp4w42WGv7kyphzB/KU1fIirXDYFfzCkRDe6FBzh2Kd6hEpthCageOKbxPc+VxoHpE7FePSv49YrEzkzKL70n8iNaEX4UxETJg3bBkPfer1WNVbkMfXlqE50KdeH81LUUaASkSKbbTc2Thv+Qn4usGZfMZHV5YrFX3K2bd3FmmFOG58BKvmo9A5Jl+BSJ4wYjEr0aESkWK/U6osjgSRPAF99udH9D2V6IrLRJSdnc3jjz9udxiKEpNmzZpFcXHx2Q+MJVKJ6LHHHrM7hBGhriO2jJXrGGnqqZmiKLZTiUhRFNupRKQoiu1UIhrG0qVn3vImXqjriC1j5TpGmpDylM3IFUVRbKB6RIqi2E4lorN49dVXCQQCdodx3kzTZMOGDTzxxBNUVFTYHc5Fi9fP4QSfz8err77KY489xocffnj2bxhnVCIaRkNDAy+88AJf+cpX4vIXYN26dWzcuJGMjAxWrlxJfX293SFdkHj/HE742te+xubNmykuLub+++/nj3/8o90hxZS4nFk9Gvbs2cORI0fQdd3uUM6b3+/npZdeoqqqCrfbTW1tLRs2bODBBx+0O7TzFs+fw6neeOMNGhsbMQyD/v5+/vSnP3HrrbfaHVbMUD2iYaxatYrHHnuMlJSRXQc1Gqqrq8nLy8PtDq1kLy0tpby83OaoLkw8fw6namlpwTAMent7+dWvfqWenp1GJaLjOjs7ueGGG7jhhht4++237Q7noliWZXcISgQbN25k+fLl3HnnnXz5y1+2O5yYom7NjktOTubZZ58FICcnx+ZoLk5+fj51dXVIKRFCUFNTQ0lJid1hjWuPP/4427Zt48033yQ3N9fucGKOmkd0FhMnTuTIkSN4PPFV5W/NmjUsXryYFStW8JWvfIVNmzZRVBS/mwjG6+cA4PV6KSoq4sc//jGGEfrbX1RUxOLFi22OLHboP/jBD35gdxCxzOfzsXz58rgbLL3hhht48803ee+99/j+97/P/Pnz7Q7posTr5wCh2/7+/n6amppobGyksbGRxMREZs2KvAvJeKR6RIqi2E4NViuKYjuViBRFsZ1KRIqi2E4lIkVRbKcSkaIotlOJSBlR+/fvZ+/evXaHocQZlYiUEXP48GFWrlzJww8/bHcoSpxRiUg5Zz09Pbz22mts2bIFCK2Mf+2112htbWX16tVMnz6duro6m6NU4pFaa6acs+TkZH7+85/z3nvv8fHHH3PzzTczZcoU1qxZw3PPPUd/fz+XX3653WEqcUglIuW8PPfcc8yZM4fly5czMDDAz3/+c3RdJy8vDyAul2Ao9lO3Zsp5KSgo4Jvf/Cbt7e3ce++9TJs2ze6QlDFAJSLlvDQ1NfH888/j8Xh46aWXxkQ9bMV+KhEp5+Vb3/oWfX19vPvuu+i6zn333acKsSkXTY0RKeesrq6OqVOn8otf/ILLLruM//7v/2bnzp1UVFRQWloKwLe//e24rnuk2EOVAVEUxXbq1kxRFNupRKQoiu1UIlIUxXYqESmKYjuViBRFsd3/D6QFv5NuXsKGAAAAAElFTkSuQmCC", + "image/svg+xml": [ + "\n", + "\n", + "-1012x1-0.50.00.51.0x201y\n" + ], + "text/plain": [ + "@vlplot(\n", + " mark=\"circle\",\n", + " encoding={\n", + " x={\n", + " field=\"x1\"\n", + " },\n", + " y={\n", + " field=\"x2\"\n", + " },\n", + " color={\n", + " field=\"y\",\n", + " type=\"nominal\"\n", + " }\n", + " },\n", + " data={\n", + " values=...\n", + " }\n", + ")" + ] + }, + "execution_count": 39, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "dfHalfCircles |> @vlplot(:circle, x=:x1, y=:x2, color = :\"y:n\") " + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Regression data generated from noisy linear models\n", + "\n", + "```@docs\n", + "make_regression\n", + "```" + ] + }, + { + "cell_type": "code", + "execution_count": 40, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "

3 rows × 6 columns

x1x2x3x4x5y
Float64Float64Float64Float64Float64Float64
10.4438210.136731-1.10758-0.5044431.087490.215017
2-0.7274960.8432990.468311-0.922993-0.297077-0.59015
3-0.412518-1.260380.9327220.116239-0.570425-0.712242
" + ], + "text/latex": [ + "\\begin{tabular}{r|cccccc}\n", + "\t& x1 & x2 & x3 & x4 & x5 & y\\\\\n", + "\t\\hline\n", + "\t& Float64 & Float64 & Float64 & Float64 & Float64 & Float64\\\\\n", + "\t\\hline\n", + "\t1 & 0.443821 & 0.136731 & -1.10758 & -0.504443 & 1.08749 & 0.215017 \\\\\n", + "\t2 & -0.727496 & 0.843299 & 0.468311 & -0.922993 & -0.297077 & -0.59015 \\\\\n", + "\t3 & -0.412518 & -1.26038 & 0.932722 & 0.116239 & -0.570425 & -0.712242 \\\\\n", + "\\end{tabular}\n" + ], + "text/plain": [ + "3×6 DataFrame\n", + "│ Row │ x1 │ x2 │ x3 │ x4 │ x5 │ y │\n", + "│ │ \u001b[90mFloat64\u001b[39m │ \u001b[90mFloat64\u001b[39m │ \u001b[90mFloat64\u001b[39m │ \u001b[90mFloat64\u001b[39m │ \u001b[90mFloat64\u001b[39m │ \u001b[90mFloat64\u001b[39m │\n", + "├─────┼───────────┼──────────┼──────────┼───────────┼───────────┼───────────┤\n", + "│ 1 │ 0.443821 │ 0.136731 │ -1.10758 │ -0.504443 │ 1.08749 │ 0.215017 │\n", + "│ 2 │ -0.727496 │ 0.843299 │ 0.468311 │ -0.922993 │ -0.297077 │ -0.59015 │\n", + "│ 3 │ -0.412518 │ -1.26038 │ 0.932722 │ 0.116239 │ -0.570425 │ -0.712242 │" + ] + }, + "execution_count": 40, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "X, y = make_regression(100, 5; noise=0.5, sparse=0.2, outliers=0.1)\n", + "dfRegression = DataFrame(X)\n", + "dfRegression.y = y\n", + "first(dfRegression, 3)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Julia 1.4.2", + "language": "julia", + "name": "julia-1.4" + }, + "language_info": { + "file_extension": ".jl", + "mimetype": "application/julia", + "name": "julia", + "version": "1.4.2" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/v0.20.3/generating_synthetic_data/index.html b/v0.20.3/generating_synthetic_data/index.html new file mode 100644 index 000000000..d77b5ac50 --- /dev/null +++ b/v0.20.3/generating_synthetic_data/index.html @@ -0,0 +1,21 @@ + +Generating Synthetic Data · MLJ

Generating Synthetic Data

Here synthetic data means artificially generated data, with no reference to a "real world" data set. Not to be confused "fake data" obtained by resampling from a distribution fit to some actual real data.

MLJ has a set of functions - make_blobs, make_circles, make_moons and make_regression (closely resembling functions in scikit-learn of the same name) - for generating synthetic data sets. These are useful for testing machine learning models (e.g., testing user-defined composite models; see Composing Models)

Generating Gaussian blobs

MLJBase.make_blobsFunction
X, y = make_blobs(n=100, p=2; kwargs...)

Generate Gaussian blobs for clustering and classification problems.

Return value

By default, a table X with p columns (features) and n rows (observations), together with a corresponding vector of n Multiclass target observations y, indicating blob membership.

Keyword arguments

  • shuffle=true: whether to shuffle the resulting points,

  • centers=3: either a number of centers or a c x p matrix with c pre-determined centers,

  • cluster_std=1.0: the standard deviation(s) of each blob,

  • center_box=(-10. => 10.): the limits of the p-dimensional cube within which the cluster centers are drawn if they are not provided,

  • eltype=Float64: machine type of points (any subtype of AbstractFloat).

  • rng=Random.GLOBAL_RNG: any AbstractRNG object, or integer to seed a MersenneTwister (for reproducibility).

  • as_table=true: whether to return the points as a table (true) or a matrix (false). If false the target y has integer element type.

Example

X, y = make_blobs(100, 3; centers=2, cluster_std=[1.0, 3.0])
source
using MLJ, DataFrames
+X, y = make_blobs(100, 3; centers=2, cluster_std=[1.0, 3.0])
+dfBlobs = DataFrame(X)
+dfBlobs.y = y
+first(dfBlobs, 3)
3×4 DataFrame
Rowx1x2x3y
Float64Float64Float64Cat…
11.851260.740628-0.6376182
22.81756-4.847122.909422
35.11089-31.2952.861761
using VegaLite
+dfBlobs |> @vlplot(:point, x=:x1, y=:x2, color = :"y:n") 

svg

dfBlobs |> @vlplot(:point, x=:x1, y=:x3, color = :"y:n") 

svg

Generating concentric circles

MLJBase.make_circlesFunction
X, y = make_circles(n=100; kwargs...)

Generate n labeled points close to two concentric circles for classification and clustering models.

Return value

By default, a table X with 2 columns and n rows (observations), together with a corresponding vector of n Multiclass target observations y. The target is either 0 or 1, corresponding to membership to the smaller or larger circle, respectively.

Keyword arguments

  • shuffle=true: whether to shuffle the resulting points,

  • noise=0: standard deviation of the Gaussian noise added to the data,

  • factor=0.8: ratio of the smaller radius over the larger one,

  • eltype=Float64: machine type of points (any subtype of AbstractFloat).

  • rng=Random.GLOBAL_RNG: any AbstractRNG object, or integer to seed a MersenneTwister (for reproducibility).

  • as_table=true: whether to return the points as a table (true) or a matrix (false). If false the target y has integer element type.

Example

X, y = make_circles(100; noise=0.5, factor=0.3)
source
using MLJ, DataFrames
+X, y = make_circles(100; noise=0.05, factor=0.3)
+dfCircles = DataFrame(X)
+dfCircles.y = y
+first(dfCircles, 3)
3×3 DataFrame
Rowx1x2y
Float64Float64Cat…
1-0.205633-0.07741570
20.2135130.9168451
3-0.3762650.05981270
using VegaLite
+dfCircles |> @vlplot(:circle, x=:x1, y=:x2, color = :"y:n") 

svg

Sampling from two interleaved half-circles

MLJBase.make_moonsFunction
    make_moons(n::Int=100; kwargs...)

Generates labeled two-dimensional points lying close to two interleaved semi-circles, for use with classification and clustering models.

Return value

By default, a table X with 2 columns and n rows (observations), together with a corresponding vector of n Multiclass target observations y. The target is either 0 or 1, corresponding to membership to the left or right semi-circle.

Keyword arguments

  • shuffle=true: whether to shuffle the resulting points,

  • noise=0.1: standard deviation of the Gaussian noise added to the data,

  • xshift=1.0: horizontal translation of the second center with respect to the first one.

  • yshift=0.3: vertical translation of the second center with respect to the first one.

  • eltype=Float64: machine type of points (any subtype of AbstractFloat).

  • rng=Random.GLOBAL_RNG: any AbstractRNG object, or integer to seed a MersenneTwister (for reproducibility).

  • as_table=true: whether to return the points as a table (true) or a matrix (false). If false the target y has integer element type.

Example

X, y = make_moons(100; noise=0.5)
source
using MLJ, DataFrames
+X, y = make_moons(100; noise=0.05)
+dfHalfCircles = DataFrame(X)
+dfHalfCircles.y = y
+first(dfHalfCircles, 3)
3×3 DataFrame
Rowx1x2y
Float64Float64Cat…
1-1.01920.05539810
20.03149340.1243661
30.5534240.8338270
using VegaLite
+dfHalfCircles |> @vlplot(:circle, x=:x1, y=:x2, color = :"y:n") 

svg

Regression data generated from noisy linear models

MLJBase.make_regressionFunction
make_regression(n, p; kwargs...)

Generate Gaussian input features and a linear response with Gaussian noise, for use with regression models.

Return value

By default, a tuple (X, y) where table X has p columns and n rows (observations), together with a corresponding vector of n Continuous target observations y.

Keywords

  • intercept=true: Whether to generate data from a model with intercept.

  • n_targets=1: Number of columns in the target.

  • sparse=0: Proportion of the generating weight vector that is sparse.

  • noise=0.1: Standard deviation of the Gaussian noise added to the response (target).

  • outliers=0: Proportion of the response vector to make as outliers by adding a random quantity with high variance. (Only applied if binary is false.)

  • as_table=true: Whether X (and y, if n_targets > 1) should be a table or a matrix.

  • eltype=Float64: Element type for X and y. Must subtype AbstractFloat.

  • binary=false: Whether the target should be binarized (via a sigmoid).

  • eltype=Float64: machine type of points (any subtype of AbstractFloat).

  • rng=Random.GLOBAL_RNG: any AbstractRNG object, or integer to seed a MersenneTwister (for reproducibility).

  • as_table=true: whether to return the points as a table (true) or a matrix (false).

Example

X, y = make_regression(100, 5; noise=0.5, sparse=0.2, outliers=0.1)
source
using MLJ, DataFrames
+X, y = make_regression(100, 5; noise=0.5, sparse=0.2, outliers=0.1)
+dfRegression = DataFrame(X)
+dfRegression.y = y
+first(dfRegression, 3)
3×6 DataFrame
Rowx1x2x3x4x5y
Float64Float64Float64Float64Float64Float64
1-0.254741-0.1974121.110670.5480980.441780.704075
2-0.2432550.668206-0.708033-0.862750.8007670.283942
30.442836-0.8982810.4538842.4591.782260.231597
diff --git a/v0.20.3/getting_started/index.html b/v0.20.3/getting_started/index.html new file mode 100644 index 000000000..ba7f0e396 --- /dev/null +++ b/v0.20.3/getting_started/index.html @@ -0,0 +1,232 @@ + +Getting Started · MLJ

Getting Started

For an outline of MLJ's goals and features, see About MLJ.

This page introduces some MLJ basics, assuming some familiarity with machine learning. For a complete list of other MLJ learning resources, see Learning MLJ.

MLJ collects together the functionality provided by mutliple packages. To learn how to install components separately, run using MLJ; @doc MLJ.

This section introduces only the most basic MLJ operations and concepts. It assumes MLJ has been successfully installed. See Installation if this is not the case.

Choosing and evaluating a model

The following code loads Fisher's famous iris data set as a named tuple of column vectors:

julia> using MLJ
julia> iris = load_iris();
julia> selectrows(iris, 1:3) |> pretty┌──────────────┬─────────────┬──────────────┬─────────────┬──────────────────────────────────┐ +│ sepal_length │ sepal_width │ petal_length │ petal_width │ target │ +│ Float64 │ Float64 │ Float64 │ Float64 │ CategoricalValue{String, UInt32} │ +│ Continuous │ Continuous │ Continuous │ Continuous │ Multiclass{3} │ +├──────────────┼─────────────┼──────────────┼─────────────┼──────────────────────────────────┤ +│ 5.1 │ 3.5 │ 1.4 │ 0.2 │ setosa │ +│ 4.9 │ 3.0 │ 1.4 │ 0.2 │ setosa │ +│ 4.7 │ 3.2 │ 1.3 │ 0.2 │ setosa │ +└──────────────┴─────────────┴──────────────┴─────────────┴──────────────────────────────────┘
julia> schema(iris)┌──────────────┬───────────────┬──────────────────────────────────┐ +│ names │ scitypes │ types │ +├──────────────┼───────────────┼──────────────────────────────────┤ +│ sepal_length │ Continuous │ Float64 │ +│ sepal_width │ Continuous │ Float64 │ +│ petal_length │ Continuous │ Float64 │ +│ petal_width │ Continuous │ Float64 │ +│ target │ Multiclass{3} │ CategoricalValue{String, UInt32} │ +└──────────────┴───────────────┴──────────────────────────────────┘

Because this data format is compatible with Tables.jl (and satisfies Tables.istable(iris) == true) many MLJ methods (such as selectrows, pretty and schema used above) as well as many MLJ models can work with it. However, as most new users are already familiar with the access methods particular to DataFrames (also compatible with Tables.jl) we'll put our data into that format here:

import DataFrames
+iris = DataFrames.DataFrame(iris);

Next, let's split the data "horizontally" into input and target parts, and specify an RNG seed, to force observations to be shuffled:

julia> y, X = unpack(iris, ==(:target); rng=123);
julia> first(X, 3) |> pretty┌──────────────┬─────────────┬──────────────┬─────────────┐ +│ sepal_length │ sepal_width │ petal_length │ petal_width │ +│ Float64 │ Float64 │ Float64 │ Float64 │ +│ Continuous │ Continuous │ Continuous │ Continuous │ +├──────────────┼─────────────┼──────────────┼─────────────┤ +│ 6.7 │ 3.3 │ 5.7 │ 2.1 │ +│ 5.7 │ 2.8 │ 4.1 │ 1.3 │ +│ 7.2 │ 3.0 │ 5.8 │ 1.6 │ +└──────────────┴─────────────┴──────────────┴─────────────┘

This call to unpack splits off any column with name == to :target into something called y, and all the remaining columns into X.

To list all models available in MLJ's model registry do models(). Listing the models compatible with the present data:

julia> models(matching(X,y))54-element Vector{NamedTuple{(:name, :package_name, :is_supervised, :abstract_type, :deep_properties, :docstring, :fit_data_scitype, :human_name, :hyperparameter_ranges, :hyperparameter_types, :hyperparameters, :implemented_methods, :inverse_transform_scitype, :is_pure_julia, :is_wrapper, :iteration_parameter, :load_path, :package_license, :package_url, :package_uuid, :predict_scitype, :prediction_type, :reporting_operations, :reports_feature_importances, :supports_class_weights, :supports_online, :supports_training_losses, :supports_weights, :transform_scitype, :input_scitype, :target_scitype, :output_scitype)}}:
+ (name = AdaBoostClassifier, package_name = MLJScikitLearnInterface, ... )
+ (name = AdaBoostStumpClassifier, package_name = DecisionTree, ... )
+ (name = BaggingClassifier, package_name = MLJScikitLearnInterface, ... )
+ (name = BayesianLDA, package_name = MLJScikitLearnInterface, ... )
+ (name = BayesianLDA, package_name = MultivariateStats, ... )
+ (name = BayesianQDA, package_name = MLJScikitLearnInterface, ... )
+ (name = BayesianSubspaceLDA, package_name = MultivariateStats, ... )
+ (name = CatBoostClassifier, package_name = CatBoost, ... )
+ (name = ConstantClassifier, package_name = MLJModels, ... )
+ (name = DecisionTreeClassifier, package_name = BetaML, ... )
+ ⋮
+ (name = SGDClassifier, package_name = MLJScikitLearnInterface, ... )
+ (name = SVC, package_name = LIBSVM, ... )
+ (name = SVMClassifier, package_name = MLJScikitLearnInterface, ... )
+ (name = SVMLinearClassifier, package_name = MLJScikitLearnInterface, ... )
+ (name = SVMNuClassifier, package_name = MLJScikitLearnInterface, ... )
+ (name = StableForestClassifier, package_name = SIRUS, ... )
+ (name = StableRulesClassifier, package_name = SIRUS, ... )
+ (name = SubspaceLDA, package_name = MultivariateStats, ... )
+ (name = XGBoostClassifier, package_name = XGBoost, ... )

In MLJ a model is a struct storing the hyperparameters of the learning algorithm indicated by the struct name (and nothing else). For common problems matching data to models, see Model Search and Preparing Data.

To see the documentation for DecisionTreeClassifier (without loading its defining code) do

doc("DecisionTreeClassifier", pkg="DecisionTree")

Assuming the MLJDecisionTreeInterface.jl package is in your load path (see Installation) we can use @load to import the DecisionTreeClassifier model type, which we will bind to Tree:

julia> Tree = @load DecisionTreeClassifier pkg=DecisionTree[ Info: For silent loading, specify `verbosity=0`. 
+import MLJDecisionTreeInterface ✔
+MLJDecisionTreeInterface.DecisionTreeClassifier

(In this case, we need to specify pkg=... because multiple packages provide a model type with the name DecisionTreeClassifier.) Now we can instantiate a model with default hyperparameters:

julia> tree = Tree()DecisionTreeClassifier(
+  max_depth = -1,
+  min_samples_leaf = 1,
+  min_samples_split = 2,
+  min_purity_increase = 0.0,
+  n_subfeatures = 0,
+  post_prune = false,
+  merge_purity_threshold = 1.0,
+  display_depth = 5,
+  feature_importance = :impurity,
+  rng = Random._GLOBAL_RNG())

Important: DecisionTree.jl and most other packages implementing machine learning algorithms for use in MLJ are not MLJ dependencies. If such a package is not in your load path you will receive an error explaining how to add the package to your current environment. Alternatively, you can use the interactive macro @iload. For more on importing model types, see Loading Model Code.

Once instantiated, a model's performance can be evaluated with the evaluate method. Our classifier is a probabilistic predictor (check prediction_type(tree) == :probabilistic) which means we can specify a probabilistic measure (metric) like log_loss, as well deterministic measures like accuracy (which are applied after computing the mode of each prediction):

julia> evaluate(tree, X, y,
+                resampling=CV(shuffle=true),
+                        measures=[log_loss, accuracy],
+                        verbosity=0)PerformanceEvaluation object with these fields:
+  model, measure, operation, measurement, per_fold,
+  per_observation, fitted_params_per_fold,
+  report_per_fold, train_test_rows, resampling, repeats
+Extract:
+┌──────────────────────┬──────────────┬─────────────┬─────────┬─────────────────
+│ measure              │ operation    │ measurement │ 1.96*SE │ per_fold       ⋯
+├──────────────────────┼──────────────┼─────────────┼─────────┼─────────────────
+│ LogLoss(             │ predict      │ 1.92        │ 1.53    │ [1.44, 2.88, 2 ⋯
+│   tol = 2.22045e-16) │              │             │         │                ⋯
+│ Accuracy()           │ predict_mode │ 0.947       │ 0.0425  │ [0.96, 0.92, 1 ⋯
+└──────────────────────┴──────────────┴─────────────┴─────────┴─────────────────
+                                                                1 column omitted

Under the hood, evaluate calls lower level functions predict or predict_mode according to the type of measure, as shown in the output. We shall call these operations directly below.

For more on performance evaluation, see Evaluating Model Performance for details.

A preview of data type specification in MLJ

The target y above is a categorical vector, which is appropriate because our model is a decision tree classifier:

julia> typeof(y)CategoricalVector{String, UInt32, String, CategoricalValue{String, UInt32}, Union{}} (alias for CategoricalArray{String, 1, UInt32, String, CategoricalValue{String, UInt32}, Union{}})

However, MLJ models do not prescribe the machine types for the data they operate on. Rather, they specify a scientific type, which refers to the way data is to be interpreted, as opposed to how it is encoded:

julia> target_scitype(tree)AbstractVector{<:Finite} (alias for AbstractArray{<:Finite, 1})

Here Finite is an example of a "scalar" scientific type with two subtypes:

julia> subtypes(Finite)2-element Vector{Any}:
+ Multiclass
+ OrderedFactor

We use the scitype function to check how MLJ is going to interpret given data. Our choice of encoding for y works for DecisionTreeClassifier, because we have:

julia> scitype(y)AbstractVector{Multiclass{3}} (alias for AbstractArray{Multiclass{3}, 1})

and Multiclass{3} <: Finite. If we would encode with integers instead, we obtain:

julia> yint = int.(y);
julia> scitype(yint)AbstractVector{Count} (alias for AbstractArray{Count, 1})

and using yint in place of y in classification problems will fail. See also Working with Categorical Data.

For more on scientific types, see Data containers and scientific types below.

Fit and predict

To illustrate MLJ's fit and predict interface, let's perform our performance evaluations by hand, but using a simple holdout set, instead of cross-validation.

Wrapping the model in data creates a machine which will store training outcomes:

julia> mach = machine(tree, X, y)untrained Machine; caches model-specific representations of data
+  model: DecisionTreeClassifier(max_depth = -1, …)
+  args:
+    1:	Source @110 ⏎ Table{AbstractVector{Continuous}}
+    2:	Source @766 ⏎ AbstractVector{Multiclass{3}}

Training and testing on a hold-out set:

julia> train, test = partition(eachindex(y), 0.7); # 70:30 split
julia> fit!(mach, rows=train);[ Info: Training machine(DecisionTreeClassifier(max_depth = -1, …), …).
julia> yhat = predict(mach, X[test,:]);
julia> yhat[3:5]3-element UnivariateFiniteVector{Multiclass{3}, String, UInt32, Float64}: + UnivariateFinite{Multiclass{3}}(setosa=>1.0, versicolor=>0.0, virginica=>0.0) + UnivariateFinite{Multiclass{3}}(setosa=>0.0, versicolor=>0.0, virginica=>1.0) + UnivariateFinite{Multiclass{3}}(setosa=>0.0, versicolor=>0.0, virginica=>1.0)
julia> log_loss(yhat, y[test])2.4029102259411435

Note that log_loss and cross_entropy are aliases for LogLoss() (which can be passed an optional keyword parameter, as in LogLoss(tol=0.001)). For a list of all losses and scores, and their aliases, run measures().

Notice that yhat is a vector of Distribution objects, because DecisionTreeClassifier makes probabilistic predictions. The methods of the Distributions.jl package can be applied to such distributions:

julia> broadcast(pdf, yhat[3:5], "virginica") # predicted probabilities of virginica3-element Vector{Float64}:
+ 0.0
+ 1.0
+ 1.0
julia> broadcast(pdf, yhat, y[test])[3:5] # predicted probability of observed class3-element Vector{Float64}: + 1.0 + 1.0 + 1.0
julia> mode.(yhat[3:5])3-element CategoricalArray{String,1,UInt32}: + "setosa" + "virginica" + "virginica"

Or, one can explicitly get modes by using predict_mode instead of predict:

julia> predict_mode(mach, X[test[3:5],:])3-element CategoricalArray{String,1,UInt32}:
+ "setosa"
+ "virginica"
+ "virginica"

Finally, we note that pdf() is overloaded to allow the retrieval of probabilities for all levels at once:

julia> L = levels(y)3-element Vector{String}:
+ "setosa"
+ "versicolor"
+ "virginica"
julia> pdf(yhat[3:5], L)3×3 Matrix{Float64}: + 1.0 0.0 0.0 + 0.0 0.0 1.0 + 0.0 0.0 1.0

Unsupervised models have a transform method instead of predict, and may optionally implement an inverse_transform method:

julia> v = Float64[1, 2, 3, 4]4-element Vector{Float64}:
+ 1.0
+ 2.0
+ 3.0
+ 4.0
julia> stand = Standardizer() # this type is built-inStandardizer( + features = Symbol[], + ignore = false, + ordered_factor = false, + count = false)
julia> mach2 = machine(stand, v)untrained Machine; caches model-specific representations of data + model: Standardizer(features = Symbol[], …) + args: + 1: Source @136 ⏎ AbstractVector{Continuous}
julia> fit!(mach2)[ Info: Training machine(Standardizer(features = Symbol[], …), …). +trained Machine; caches model-specific representations of data + model: Standardizer(features = Symbol[], …) + args: + 1: Source @136 ⏎ AbstractVector{Continuous}
julia> w = transform(mach2, v)4-element Vector{Float64}: + -1.161895003862225 + -0.3872983346207417 + 0.3872983346207417 + 1.161895003862225
julia> inverse_transform(mach2, w)4-element Vector{Float64}: + 1.0 + 2.0 + 3.0 + 4.0

Machines have an internal state which allows them to avoid redundant calculations when retrained, in certain conditions - for example when increasing the number of trees in a random forest, or the number of epochs in a neural network. The machine-building syntax also anticipates a more general syntax for composing multiple models, an advanced feature explained in Learning Networks.

There is a version of evaluate for machines as well as models. This time we'll use a simple holdout strategy as above. (An exclamation point is added to the method name because machines are generally mutated when trained.)

julia> evaluate!(mach, resampling=Holdout(fraction_train=0.7),
+                       measures=[log_loss, accuracy],
+                       verbosity=0)PerformanceEvaluation object with these fields:
+  model, measure, operation, measurement, per_fold,
+  per_observation, fitted_params_per_fold,
+  report_per_fold, train_test_rows, resampling, repeats
+Extract:
+┌──────────────────────┬──────────────┬─────────────┬──────────┐
+│ measure              │ operation    │ measurement │ per_fold │
+├──────────────────────┼──────────────┼─────────────┼──────────┤
+│ LogLoss(             │ predict      │ 2.4         │ [2.4]    │
+│   tol = 2.22045e-16) │              │             │          │
+│ Accuracy()           │ predict_mode │ 0.933       │ [0.933]  │
+└──────────────────────┴──────────────┴─────────────┴──────────┘

Changing a hyperparameter and re-evaluating:

julia> tree.max_depth = 33
julia> evaluate!(mach, resampling=Holdout(fraction_train=0.7), + measures=[log_loss, accuracy], + verbosity=0)PerformanceEvaluation object with these fields: + model, measure, operation, measurement, per_fold, + per_observation, fitted_params_per_fold, + report_per_fold, train_test_rows, resampling, repeats +Extract: +┌──────────────────────┬──────────────┬─────────────┬──────────┐ +│ measure │ operation │ measurement │ per_fold │ +├──────────────────────┼──────────────┼─────────────┼──────────┤ +│ LogLoss( │ predict │ 1.61 │ [1.61] │ +│ tol = 2.22045e-16) │ │ │ │ +│ Accuracy() │ predict_mode │ 0.956 │ [0.956] │ +└──────────────────────┴──────────────┴─────────────┴──────────┘

Next steps

For next steps, consult the Learning MLJ section. At the least, we recommned you read the remainder of this page before considering serious use of MLJ.

Data containers and scientific types

The MLJ user should acquaint themselves with some basic assumptions about the form of data expected by MLJ, as outlined below. The basic machine constructors look like this (see also Constructing machines):

machine(model::Unsupervised, X)
+machine(model::Supervised, X, y)

Each supervised model in MLJ declares the permitted scientific type of the inputs X and targets y that can be bound to it in the first constructor above, rather than specifying specific machine types (such as Array{Float32, 2}). Similar remarks apply to the input X of an unsupervised model.

Scientific types are julia types defined in the package ScientificTypesBase.jl; the package ScientificTypes.jl implements the particular convention used in the MLJ universe for assigning a specific scientific type (interpretation) to each julia object (see the scitype examples below).

The basic "scalar" scientific types are Continuous, Multiclass{N}, OrderedFactor{N}, Count and Textual. Missing and Nothing are also considered scientific types. Be sure you read Scalar scientific types below to guarantee your scalar data is interpreted correctly. Tools exist to coerce the data to have the appropriate scientific type; see ScientificTypes.jl or run ?coerce for details.

Additionally, most data containers - such as tuples, vectors, matrices and tables - have a scientific type parameterized by scitype of the elements they contain.

Figure 1. Part of the scientific type hierarchy in ScientificTypesBase.jl.

julia> scitype(4.6)Continuous
julia> scitype(42)Count
julia> x1 = coerce(["yes", "no", "yes", "maybe"], Multiclass);
julia> scitype(x1)AbstractVector{Multiclass{3}} (alias for AbstractArray{Multiclass{3}, 1})
julia> X = (x1=x1, x2=rand(4), x3=rand(4)) # a "column table"(x1 = CategoricalValue{String, UInt32}["yes", "no", "yes", "maybe"], + x2 = [0.17296982732153476, 0.4311265688549152, 0.9164218371622808, 0.0029817110637152533], + x3 = [0.7422777179776697, 0.5491931062469285, 0.05065936857102282, 0.14872233376483412],)
julia> scitype(X)Table{Union{AbstractVector{Continuous}, AbstractVector{Multiclass{3}}}}

Two-dimensional data

Generally, two-dimensional data in MLJ is expected to be tabular. All data containers X compatible with the Tables.jl interface and sastisfying Tables.istable(X) == true (most of the formats in this list) have the scientific type Table{K}, where K depends on the scientific types of the columns, which can be individually inspected using schema:

julia> schema(X)┌───────┬───────────────┬──────────────────────────────────┐
+│ names │ scitypes      │ types                            │
+├───────┼───────────────┼──────────────────────────────────┤
+│ x1    │ Multiclass{3} │ CategoricalValue{String, UInt32} │
+│ x2    │ Continuous    │ Float64                          │
+│ x3    │ Continuous    │ Float64                          │
+└───────┴───────────────┴──────────────────────────────────┘

Matrix data

MLJ models expecting a table do not generally accept a matrix instead. However, a matrix can be wrapped as a table, using MLJ.table:

julia> matrix_table = MLJ.table(rand(2,3));
julia> schema(matrix_table)┌───────┬────────────┬─────────┐ +│ names │ scitypes │ types │ +├───────┼────────────┼─────────┤ +│ x1 │ Continuous │ Float64 │ +│ x2 │ Continuous │ Float64 │ +│ x3 │ Continuous │ Float64 │ +└───────┴────────────┴─────────┘

The matrix is not copied, only wrapped. To manifest a table as a matrix, use MLJ.matrix.

Observations correspond to rows, not columns

When supplying models with matrices, or wrapping them in tables, each row should correspond to a different observation. That is, the matrix should be n x p, where n is the number of observations and p the number of features. However, some models may perform better if supplied the adjoint of a p x n matrix instead, and observation resampling is always more efficient in this case.

Inputs

Since an MLJ model only specifies the scientific type of data, if that type is Table - which is the case for the majority of MLJ models - then any Tables.jl container X is permitted, so long as Tables.istable(X) == true.

Specifically, the requirement for an arbitrary model's input is scitype(X) <: input_scitype(model).

Targets

The target y expected by MLJ models is generally an AbstractVector. A multivariate target y will generally be a table.

Specifically, the type requirement for a model target is scitype(y) <: target_scitype(model).

Querying a model for acceptable data types

Given a model instance, one can inspect the admissible scientific types of its input and target, and without loading the code defining the model;

julia> i = info("DecisionTreeClassifier", pkg="DecisionTree")(name = "DecisionTreeClassifier",
+ package_name = "DecisionTree",
+ is_supervised = true,
+ abstract_type = Probabilistic,
+ deep_properties = (),
+ docstring = "```\nDecisionTreeClassifier\n```\n\nA model type for c...",
+ fit_data_scitype =
+     Tuple{Table{<:Union{AbstractVector{<:Continuous}, AbstractVector{<:Count}, AbstractVector{<:OrderedFactor}}}, AbstractVector{<:Finite}},
+ human_name = "CART decision tree classifier",
+ hyperparameter_ranges = (nothing,
+                          nothing,
+                          nothing,
+                          nothing,
+                          nothing,
+                          nothing,
+                          nothing,
+                          nothing,
+                          nothing,
+                          nothing),
+ hyperparameter_types = ("Int64",
+                         "Int64",
+                         "Int64",
+                         "Float64",
+                         "Int64",
+                         "Bool",
+                         "Float64",
+                         "Int64",
+                         "Symbol",
+                         "Union{Integer, Random.AbstractRNG}"),
+ hyperparameters = (:max_depth,
+                    :min_samples_leaf,
+                    :min_samples_split,
+                    :min_purity_increase,
+                    :n_subfeatures,
+                    :post_prune,
+                    :merge_purity_threshold,
+                    :display_depth,
+                    :feature_importance,
+                    :rng),
+ implemented_methods = [:clean!,
+                        :fit,
+                        :fitted_params,
+                        :predict,
+                        :reformat,
+                        :selectrows,
+                        :feature_importances],
+ inverse_transform_scitype = Unknown,
+ is_pure_julia = true,
+ is_wrapper = false,
+ iteration_parameter = nothing,
+ load_path = "MLJDecisionTreeInterface.DecisionTreeClassifier",
+ package_license = "MIT",
+ package_url = "https://github.com/bensadeghi/DecisionTree.jl",
+ package_uuid = "7806a523-6efd-50cb-b5f6-3fa6f1930dbb",
+ predict_scitype =
+     AbstractVector{ScientificTypesBase.Density{_s25} where _s25<:Finite},
+ prediction_type = :probabilistic,
+ reporting_operations = (),
+ reports_feature_importances = true,
+ supports_class_weights = false,
+ supports_online = false,
+ supports_training_losses = false,
+ supports_weights = false,
+ transform_scitype = Unknown,
+ input_scitype =
+     Table{<:Union{AbstractVector{<:Continuous}, AbstractVector{<:Count}, AbstractVector{<:OrderedFactor}}},
+ target_scitype = AbstractVector{<:Finite},
+ output_scitype = Unknown)
julia> i.input_scitypeTable{<:Union{AbstractVector{<:Continuous}, AbstractVector{<:Count}, AbstractVector{<:OrderedFactor}}}
julia> i.target_scitypeAbstractVector{<:Finite} (alias for AbstractArray{<:Finite, 1})

This output indicates that any table with Continuous, Count or OrderedFactor columns is acceptable as the input X, and that any vector with element scitype <: Finite is acceptable as the target y.

For more on matching models to data, see Model Search.

Scalar scientific types

Models in MLJ will always apply the MLJ convention described in ScientificTypes.jl to decide how to interpret the elements of your container types. Here are the key features of that convention:

  • Any AbstractFloat is interpreted as Continuous.

  • Any Integer is interpreted as Count.

  • Any CategoricalValue x, is interpreted as Multiclass or OrderedFactor, depending on the value of isordered(x).

  • Strings and Chars are not interpreted as Multiclass or OrderedFactor (they have scitypes Textual and Unknown respectively).

  • In particular, integers (including Bools) cannot be used to represent categorical data. Use the preceding coerce operations to coerce to a Finite scitype.

  • The scientific types of nothing and missing are Nothing and Missing, native types we also regard as scientific.

Use coerce(v, OrderedFactor) or coerce(v, Multiclass) to coerce a vector v of integers, strings or characters to a vector with an appropriate Finite (categorical) scitype. See also Working with Categorical Data, and the ScientificTypes.jl documentation.

diff --git a/v0.20.3/glossary/index.html b/v0.20.3/glossary/index.html new file mode 100644 index 000000000..bbb45fe12 --- /dev/null +++ b/v0.20.3/glossary/index.html @@ -0,0 +1,2 @@ + +Glossary · MLJ

Glossary

Note: This glossary includes some detail intended mainly for MLJ developers.

Basics

hyperparameters

Parameters on which some learning algorithm depends, specified before the algorithm is applied, and where learning is interpreted in the broadest sense. For example, PCA feature reduction is a "preprocessing" transformation "learning" a projection from training data, governed by a dimension hyperparameter. Hyperparameters in our sense may specify configuration (eg, number of parallel processes) even when this does not affect the end-product of learning. (But we exclude verbosity level.)

model (object of abstract type Model)

Object collecting together hyperpameters of a single algorithm. Models are classified either as supervised or unsupervised models (eg, "transformers"), with corresponding subtypes Supervised <: Model and Unsupervised <: Model.

fitresult (type generally defined outside of MLJ)

Also known as "learned" or "fitted" parameters, these are "weights", "coefficients", or similar parameters learned by an algorithm, after adopting the prescribed hyper-parameters. For example, decision trees of a random forest, the coefficients and intercept of a linear model, or the projection matrices of a PCA dimension-reduction algorithm.

operation

Data-manipulating operations (methods) using some fitresult. For supervised learners, the predict, predict_mean, predict_median, or predict_mode methods; for transformers, the transform or inverse_transform method. An operation may also refer to an ordinary data-manipulating method that does not depend on a fit-result (e.g., a broadcasted logarithm) which is then called static operation for clarity. An operation that is not static is dynamic.

machine (object of type Machine)

An object consisting of:

(1) A model

(2) A fit-result (undefined until training)

(3) Training arguments (one for each data argument of the model's associated fit method). A training argument is data used for training (subsampled by specifying rows=... in fit!) but also in evaluation (subsampled by specifying rows=... in predict, predict_mean, etc). Generally, there are two training arguments for supervised models, and just one for unsupervised models. Each argument is either a Source node, wrapping concrete data supplied to the machine constructor, or a Node, in the case of a learning network (see below). Both kinds of nodes can be called with an optional rows=... keyword argument to (lazily) return concrete data.

In addition, machines store "report" metadata, for recording algorithm-specific statistics of training (eg, an internal estimate of generalization error, feature importances); and they cache information allowing the fit-result to be updated without repeating unnecessary information.

Machines are trained by calls to a fit! method which may be passed an optional argument specifying the rows of data to be used in training.

For more, see the Machines section.

Learning Networks and Composite Models

Note: Multiple machines in a learning network may share the same model, and multiple learning nodes may share the same machine.

source node (object of type Source)

A container for training data and point of entry for new data in a learning network (see below).

node (object of type Node)

Essentially a machine (whose arguments are possibly other nodes) wrapped in an associated operation (e.g., predict or inverse_transform). It consists primarily of:

  1. An operation, static or dynamic.
  2. A machine, or nothing if the operation is static.
  3. Upstream connections to other nodes, specified by a list of arguments (one for each argument of the operation). These are the arguments on which the operation "acts" when the node N is called, as in N().

learning network

A directed acyclic graph implicit in the connections of a collection of source(s) and nodes.

wrapper

Any model with one or more other models as hyper-parameters.

composite model

Any wrapper, or any learning network, "exported" as a model (see Composing Models).

diff --git a/v0.20.3/homogeneous_ensembles/index.html b/v0.20.3/homogeneous_ensembles/index.html new file mode 100644 index 000000000..87bce5893 --- /dev/null +++ b/v0.20.3/homogeneous_ensembles/index.html @@ -0,0 +1,8 @@ + +Homogeneous Ensembles · MLJ

Homogeneous Ensembles

Although an ensemble of models sharing a common set of hyperparameters can be defined using the learning network API, MLJ's EnsembleModel model wrapper is preferred, for convenience and best performance. Examples of using EnsembleModel are given in this Data Science Tutorial.

When bagging decision trees, further randomness is normally introduced by subsampling features, when training each node of each tree (Ho (1995), Brieman and Cutler (2001)). A bagged ensemble of such trees is known as a Random Forest. You can see an example of using EnsembleModel to build a random forest in this Data Science Tutorial. However, you may also want to use a canned random forest model. Run models("RandomForest") to list such models.

MLJEnsembles.EnsembleModelFunction
EnsembleModel(model,
+              atomic_weights=Float64[],
+              bagging_fraction=0.8,
+              n=100,
+              rng=GLOBAL_RNG,
+              acceleration=CPU1(),
+              out_of_bag_measure=[])

Create a model for training an ensemble of n clones of model, with optional bagging. Ensembling is useful if fit!(machine(atom, data...)) does not create identical models on repeated calls (ie, is a stochastic model, such as a decision tree with randomized node selection criteria), or if bagging_fraction is set to a value less than 1.0, or both.

Here the atomic model must support targets with scitype AbstractVector{<:Finite} (single-target classifiers) or AbstractVector{<:Continuous} (single-target regressors).

If rng is an integer, then MersenneTwister(rng) is the random number generator used for bagging. Otherwise some AbstractRNG object is expected.

The atomic predictions are optionally weighted according to the vector atomic_weights (to allow for external optimization) except in the case that model is a Deterministic classifier, in which case atomic_weights are ignored.

The ensemble model is Deterministic or Probabilistic, according to the corresponding supertype of atom. In the case of deterministic classifiers (target_scitype(atom) <: Abstract{<:Finite}), the predictions are majority votes, and for regressors (target_scitype(atom)<: AbstractVector{<:Continuous}) they are ordinary averages. Probabilistic predictions are obtained by averaging the atomic probability distribution/mass functions; in particular, for regressors, the ensemble prediction on each input pattern has the type MixtureModel{VF,VS,D} from the Distributions.jl package, where D is the type of predicted distribution for atom.

Specify acceleration=CPUProcesses() for distributed computing, or CPUThreads() for multithreading.

If a single measure or non-empty vector of measures is specified by out_of_bag_measure, then out-of-bag estimates of performance are written to the training report (call report on the trained machine wrapping the ensemble model).

Important: If per-observation or class weights w (not to be confused with atomic weights) are specified when constructing a machine for the ensemble model, as in mach = machine(ensemble_model, X, y, w), then w is used by any measures specified in out_of_bag_measure that support them.

source
diff --git a/v0.20.3/img/MLJLogo2.svg b/v0.20.3/img/MLJLogo2.svg new file mode 100644 index 000000000..799834c55 --- /dev/null +++ b/v0.20.3/img/MLJLogo2.svg @@ -0,0 +1,82 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/v0.20.3/img/MLPackages.png b/v0.20.3/img/MLPackages.png new file mode 100644 index 0000000000000000000000000000000000000000..29e9b3266aa64bf84978f8ce662007a749b9394b GIT binary patch literal 60357 zcmag`1yq%5)HMumK%@}^6r{UBke2S07HN=>R=Pn#6qJ-M=@yi35J99|i{>6Ep zXN>=Q$2Z2e4+DR92G4!6d>&AP_jRG7_o?1d0j*fmDEw4Bz?o zHn;%3+%}e%mOxzJ{P(p!CklZeN61Qus=Fp_Ou1;OjXXx%Ii3zs5c^4BqE_U=ru(&u z1$RhOW(UO{GSeW>JRa6#2IrB>a%z2}=^;TWH7}_wGp@U0&WLx=R>L!P9i^ z=II&fV7GPYCo`Sou+4W%QpFcfOe3VW)v$mohadEqgRjcNL6@GnH4fK<$qWC2$izFU6QU}FKL7VSLcVrR@GID1xix18 ztK!El57YLV67EpIJLj&oeAk1o_%wM!iCgIJ9aXY%+_KUIP{kGlEb+qxlJ1Q=HsW?WrKElC)+RZ2eljm$J^ zHmJv__ZlS!;hl-`R)pY*we}5b1v0VpqCXF7A=RWR9Pp7J;mIyie5K8qtSL~v?NVHw zPg~6JoPY`i;Xt;3;j_mzMs%5yV6Av1e7ZEfRFgCQtcY1VdnUny>IIc|Bc~WTAu-$m z;m3xVTw)!?cS32a(nY+t{AYRQ3tBC`AwK8Vt6W?x|b)k$@1fmw8ty z;=JmmL03_d^RuWoIwj(V)%R}V z?l?`d3u~=|GMQ68$svPVw-Bg5%=$_CbMi#DT#IFcp=&bg<%XLe z)wAzQ;^+(-5omO1NXg?jYqcbOIyIK?QX+iM-rG@s}0z)<_8Ya0$80 z<*Wo^oMMb#=Q@@@bVwqns`1rqYP-UL+%Gy$6R1|G@f-<6(h?LRzP67>r+KDPyHOVt z_ByBA%oTk}=opWssij0gh)Ta3uGG90)wgMQU0=_uTj*Y3TwP*KH~wQvUZ=jn@VdoV zBe-M!gS4!Ly!G|UYvs+_Ge?64ZUd%}$gCnI2i*m91a(*$ZdnCxS@wf3d!IRYgm|2s z@(1PUrsM1>;Vw)tsrm$jUDr`bjEA<-;fQdozMEPpEbWB_N-% zF6B;IFZs^hg{h7~!5*RHqCfq-Mb7q9YjO!u@#mOQiyY2oaUSutS4(T&O;c++OjG!m zfmub0_Yd)_w;Y{xH`>eC&Z_0_u^OQhe$&y_tE`GdR=r2jZx<;XJ>g@weH4`FCcS^a zvZ_oVnVt^uM=JeHPF6?N_6W&(W)`Cuxu`mtk%{#SK8dyJw1FS zxt`Mz|7Y#e7zeS@IpBFLrLjOhQ?GIvpVd#&eO|rS7(xx(=u3*x@zb0EdHtEX7wmPP zxuF)vfs3(OYG=hkC3UxKq_j3pWk(2GE-F3-ruSo^W?J;y&8G<&>o|-GOe2=Ri28iv z{o@xAos>GJv8H~k$bO1=)<4q=BM;Z$aGe} zXA`P)S3qgu;8Mt8d(b-S2 z*(XZ;UQuKC!sW4#?I)a9uWHPEKcor#ZF4;2ecZNJW_(4~>{I_~bT}v<15(Br_78f( zMtpM0`lh^zrm#^d` zS6SDY7t5nZbnwe5^hlOW+L#X_GPuGnsN@H~$XAtOcr2l%O8ON(nrTq~Txt}K(DoHK z?oPEe3#_#5?IV6*C8hO*wY_Y62=BFqad)hGs$NG4o0tZ6=q9B)x13~Nq(;1ti5#08 z3);hj4lT#>_UeR;h}ZP^Su*OG-xP++L(YOto^aTt#Hgz6neTluWsnSw@#5FHR8ow) ztDuwZy`k}ZmL*(2Top4|Re1nQ_1$xe!(TZ}3^uZx%9m{fiUe8dnX#GHVT=fcyx{%Q@1t=Ml51UlUWqW`YeI+!Xch5Fp89KO)z+i%IgGEGP5!>Ip5s=g2vb{yLU+JO{!@HU9gG*H zlDU3gH1|KY{tJ1?M#O@t=jrKbZDZ3peMdt+>vKi~`3B8{qqEn+ILHBlfj?6uCk}&t zBrA9ICVDe9Qy2Od75Nv*Ki_*>!r@GRv&O>~*8N|qx8LRnhy^|-H+HaHJ68xWSoRm)eBYHVY3LdTAZb z@x&-@Y|7_Yze~Bi0>}vKnl^lhytlcWr~FMnkfYn|OoKwgRe>PeX+ zcQ(${e|#%iXY5sNOiYYkjpGxs2lF8xKgvF5^N8dO$S+-EulS%KDv0|;M1oaDh0VbP zxk-kpZKist`yZzF#F+`@iG^HS*4Rv}JNphE5*X~im5npj!IyHPYgBQ%8%`&v zrF)NqOWl*TTtJ?4GIEa1xbXRiNj}5+bqD`c5?2ASa9pX`ijM|c&rAPVHA|s8cfwkN z?mVUl-gR0VVxXmc50Esz?B+sVt5-lcr%mY#cE{4ZCc^k;wOa&Z%%RX9@o zK`%eRRrR}D!+FGlI=tpM9OTUBci~%VIfH#6=~y(uP%L>}mI#*`g-_Q}&a>qyJ!Z_{V|n zm-De-&&~`aD{q;;zb%wfDkUPe>hQOuBmEzz%c*JX`u z3;5t1ZQ4U*f~U2A(^>p7;dscbg@=dt)lT-mNA;UIzbyv;=ADBBc+%X6fsbCJ_vO{) z`74YM%J}cO$nqO9RT-x4D)?y@Y>pHg)H-k8zC*gP61A zilE2Q##kwQ!F|Mzz!D0`uVB>Lzk5qmMRJ5j0zEo_S{4m0h|giED_Nit?zQDT&f}-A z!~OkHrmDk{!l|nboBb%n-#@SzxlY*_${S3V3>mMqGZ003bB{MbA?V%qAO(67DtcPS z@0l4g1buJ~-270UT5@u-mzS5%^`*;qEd`gS2U)jqiAPI}S|~q1kaSwKKft34=p-qc1zL&L%ff%V{s?;1F!L;pA1?EjgF>z{o!i*{L+%XW(QwMKzvb#--o zVxr6HKr6bzU8qzct!GP#c0#r<7^CT)r(mO_mz0)1=C&UF6Gay-m$f^e#S6W(HrQJ-n0J<3MuSZ-nZJ!^v8o!04tEw8Am9;oo z?clW8czHBY>#|KMN z$?kktR~J5$=A$_#=rdPm8hsda_V@O1NuQlu9?#f~mWUQvTSY}hYlA4a#XzP3g!S2>)nwK8U4IJk z-Mu}C9)5&EGXLRns?QZHxbdr3@RC<&>&1VIbOw`q!ZTI#Ahfn{jE8bn1e`a%3aC8E zR*1hxKv2>5j&C6UB+Fj$<*fX0H1XzGsW~jCk_*8B;ivxI2#jf;}zx&}DM&M6Q^PZElGc!}u!;P_h*wX_u&cglW-gQhN#AvzI2*k!}w}1FY z_BLIXo}Qlm{{GO=P|5Ml&CNx7zOditzb@tf&CZ6WruHuH>gvT@7{BPqhI$TE{tdDH3^k=~Lx)XWM?)*Sc>m4q5!NytY$Hn91wKJIW z+-GkAqbMz$f<>p|6FK_j5Q{bU`OaJm5)zW}_bON{jrkdVVd3lI#w*52N`esS&qP_n zmaD-5uo#}*(w^vu?B}UHb8p}TFtf0j_au!K8wgLO$j7n@I<0-FulJIXX@?6qwBx4P zG>B{sd6+8fGYyZCkrDVj3ny|oK!$o?=2Xc2V5O<4sjjZh*B4RuqOxNJ&eEGVZ{|Xb z9$~kU`CL3PX%B~Bbo20d@$Sd=#pymDAD<)#g1`9N6r@?rKzIFumU_k!<5|JYZ4O^?<%RsZTt8(=suw&r4;gj=`!S-q_pADJVR3rx(Y>#brI}4((hN z^1g`Max{=gQ!X@U)E`0fnQz0Xng7}mw8R%b@=Y%>IhlnZ%JQQt?Z&)F&+zr9z4PNgmE!JS9!v#0yZH3V2#SeH-D(Y^Q+ ztQtCF*_M}<8X#)IgsuRdafNE-fL^1=VVMy1pz^Olp2@jfj+VYkj@exHTBo z|JJQreaS0odbQ3hG&EVT{jiL~UP5#B)n`Qtw54#O01WiMehT&Mpq1HQDq{>NL|8I4 zH8uJ1-&0dlaS_VV?|_O-`*FF~$+on#bbhoM5*(Z^fR+CSwtZx(ndII*`~4-wnt9Ls z#;dcNU3v242`ekBeKj$Hm$>J5?UP5?m)gnMOZ^$rMcU;=0?yV9sAPu1WQ=Ukriz(5 zI6K4jb0Yv?#BMRSKl1bUr&CIzkL^4^T#rpkQgBIn&H?ZPYTLbANC=L2Yy{)fnF1u| zMp$I%Pgvow+Dufybvv*B{Y)Lwp!TXer~}PPD4>rH0|&>+*}1`O-%QrQ=qsZcRFlo| za(P)B0Zxda=R<{QF+%C_!5OGWHR#~suA8l=IH8eE5YWoz=`}Utd+MP0O8#r-T z+rX$YlHFWyYCyMHZ#*X&f`C<5i*S2IA~mGdI4~%vwyH|E-rWgKo#XG*XVH~QgPon7 z>rgB$EiIROlCPmgL2#~)6Jp=~=J)ykTwe|*2F6d|^=AnyiNUYCqeAL>pBNAZ4ngytK5`J5M!02WqJ3Big`f8l4e0+SG5}o@W(tCU( z_^zc}Ei~;_1DUX&1Sw69mYUjRIA7yzUggwSHGnN6|*YHl%4-Pj0aX z0pY8VP7|NJY6Ij0iyOjaz?H@%M8sq3dAhd<7_&QxFKSD|H2jYVn?Zv~=@Cv^V-MH>v+jg`5;Q{pRn0a!uXE%U7t7DW-=i7h347^WGp-vZQ z7X6IKUVGJ@zy-(H{o-V|BbsTsH}$zo(!HsAkB5vp-LVM?YwPQYadD%NA?rPkHG@NF zRkGBayUl^!7ap8OSNXKPc~NbZIu+(9>2Nrq~;Ap07)XlyJw9IWx*Vn=k zgKR;<`^K=|{cuG|y-seVH#Rcz-{$5e)=IBZ%2pw(FS2iE5}%5*#`iuus6a!S9OpT zVb3_n%6(PveYp&rzRad7FN7I|Z4=H}jG|E9Uszs#0Sy2&5IjOcDUddD;R+TZ10Svp z!?F)1XFY=F559g1IjZ;p&zps%rL)shLC=#uh#3ew%7_nJ61j{m3B?I}pLUopg0^^b z_nvTa4k}W=Jy=nxDJ9RihZ$J#qWHFNJg0efPR<}nPA&uhvPf@=;3}>vcf7)1_-R!Y z@56_&ur7T3`~%788gI4s(~KXio(>9XG^jTuTm-ACAw(>4W9Rjkbh{aj%6T(&6tRhU z@elyUj1yjY=KjMb6Plcu5CA|BMCScfpcAc3dlTLf@%2TqzN0yVzZUb>c=>uX(~HTe zsf>&aJw3furGR{)!qdJS| z&4F8ZnxD6)*G0F%qE1a;=B6#__tQPVskK!+8d)~d>--4S_rnKtSf^JvCF^*`r+C;h zM=?>#CFZy%SpW?I9YKbGjD@PIYK`;e8LT+;jVU)Mt--SA(>n#czn;VDd~8cpB16*D zpgJ7g``3$$>~pht#jssbA0qGU?%u@=giV7JPdN25{QF0V zbb$USBClWPY8GS(;QnfD74*Is9UH5b4kLvX*SEH|h8hR02i(FsZ0$>(`PUd6jD@Gl zPqb{ASKS!0vcD8ZgE+V{QF`5BR{LER_s?lUloP%DOel|yx(+jeyvuh_N5`jAFY$0_ zpQmhWY+OJOiGqgPmy8M}02Kj*3{STt^U?Oq_WC*#K?pQl?x%aEx=ptUQyH#)i~9X}M*Ciy+WrG3ozt5mXh7;Rh>?(zMtayNc`gx@qnW z0kakYA$)a`A(r;_iLAcu0P%o4~Pr4Z{SA@J~%Dc$tdQQh6mM@7;f>H_I2NuoOun zk1%Wv#+S>A{PyTEkL@kr*;&IHLo@o$7K+TTcV*Wr1t&QbCrmVgF z9`rdD7;kp}wL)(vz{?vq)MN{NNEEH2v9U26LOUoCChd4N_tVxwPZxv2^$n}jNjp_qD3>QX~R?(PV@1|{V`K!C7fj^?5VaYA11 zP%+>eP#-?~V72Zob|M+>E&L(I{oS6!`UfD7pP!!zM|^DT3keAcU?EnN4hRd83qk*h zsVC!j^2Z@+@%dubhN16ud|mdl)5h)IG&v1fO6rcxW5J6(I(vP!>DW|s3X_EK3L7j0 z=-yui(9^qtXVKNwg&yDi?7)I5l^^OWBcmMkJ;j8lf8c#%RP&NJOoJO68x?4=u(3ad zhJMpUCCmSbt-Sw)`4P7Kdq&AD4@NFK(S<_2e`&E%rwQ*odz~>PuT)nJTif z8Ic?==4+a^sJ^$Z1CmCQM%45hAE_U>e=QU)HBsT9e_EkavVSnb_P);b{vW!t$^Z2N zoJmET%p@B>kWrF}l%k=zO`XlGB>U*Yt7ddUS)iU|BR!8c;gF-L z*wlQq6mHbbv>h_N6o)9kZu|wlQKS?ScK~T4JddYb%*@OHv;O(>M~Zg}4kmyoz`-0Y zTe?(9->C19K6^c!KMd!G5W5XNQ&CZ|u-Qj zcCEej@$z}}lIfH-PTJLtcmc|MD`%%ym&XP*{^SXP0Rf06sM_@T#=-%`dbQjvEJcv| zfMLORPmjE5yVv{DT7d1)U>t@9!gi)%s_-SJb|m@1N9-5-TyqvC33n_Tj+WM#o#tL@ zOQ1+-=N&Bu#Dc^$Bv(%9qq6$D4CDJBp{Qgp#&b1fEmW<==?+CqCAn)Zy*|mW+qx3y zgv~DuyigGU(K8{byo8#Zl$;Dy-m6!yn6{XYw`UB#8>5Qi0Sn50<><>8Z3!I(aD$;C zAq6i>mjG`AJ=(Nr|Ctyit=H6-FW%{q*n9I6v_aZLzBv4NnRvr{f1Cfphorua3D2dP zpDZt{BlCdPKo2PXsru|}Wm5S@TJz;z=!9X($t0q9&_F+soEz12l8RiYobj%OW&)7l z`d?%2D&T*h6f#D40KruTQBA3&5)9cr>@m+)vp?D`(OamfMLLx|9UUF*gm{c(aN4p( z?8S`xTanAig{YS*X9r1wNb@??RzrRBsi1cOf_P)Y`lE#3!;%*cRMMfk-;G=0H1JrD z8rs_GA7Kd%Ler4?%xxf7B_m5dmgr(OOXm6W)Tb5$5meHMuW+(&xc_dcP-Ro2;PuTI zo$2nn%zoM829K=n5pA5`cl6N-qd$MfL714B^pV`^F9g`2udff>)ZEIuDvr9T?&USk2l5!0kFfxu*Y}x_4S>cxH6w8XQ^eKof60P@d@AGR7&C%bqVO* z2P^^{3+zwh^HiZxPIc7L(NSt>_)JWO@-;%B7Mf8>eL8`@3XT&_!z~9B6KY8nKozf` zi^TC*h`a#y82Y=AurM@V0Np@K+W~RR)suIZqmXg9H;6G7pq`NHj^RRk1a!25?gyg{ zULNl5{8MSlr$)jC_zu7^r`ORc#32xHwikuOWGn1uaQ_1?cNn%}i(r2!V9np-@YY^uE_Y>KT{3$+ca4Re-LL%qnjM^ec_sT0QECi%exuMOo1Umip;6fRp)mf06A z#8K06OIH#hS7OAfCBEwK9-T?`orHd-P$R&%-8Sox5Qpg>g} zJT4C~f6Pc=BhtU0vgxW5X^<-%hIs$Mh_9W)LS0o< zUiMs#oR1+wLXIA_HLvAQZR)by(EdC&O?};{bt^aR3Ej%sGCfyxIh(w*a z5jySe)u%aQa6EMenhNyCZ0dL6$kv1^pg12IbqBrXe%<#KaB^%}bb4+;KmZ_p!2DGS ze?1R94ZWXEQRIhv65g6eCpi;=eSlgvQltZ2kq!cMj#dDmK#VB4unxBVKkQtRSQzD6 zno7pEZmE_{)=NAaCl_f1RsMc(95(wrSpOmq6x#a1(>3YF*1FKJgO$wzNB(=yj+ zw;P+BDgOT<8)aICX8MxUJN<>SQMAxXcmq^yXl-j-0^WG#Z_(qZkHV>7_1p!N4H?CD zqJo8v4h_NYyul0XFsYz>Uvhyxxm%{w*K((?Tv=bg*nO$qtj2HKw=lnIfSx^>+H7P;L-b&rWD&l zAGe!b*{eA^v)Q$U2y-F#$Hnxqve z^XIoto@cO;it=tIv(jAU+=SwH@K<8sf^j0m_)fb`Q&Dw#w&u6)`j~UpjpPYi9)#gI zJIO;n?`I#dul3eruGz1r+FX*{iEUAA8O}K+u;pa$Zg6ZNG5(*e*u6|EjEC6WO8j~Q zp)6QyI)v)ErO$WqoQA*t`P)Z=goFU@dBv&$O4k(Rf1rhdLBhet&g6L{mfL|B(h8*v zcuv4@2n0b07aUNiTK|!BMSE_X^Pl^kx7++O4UW^U=3bTybqhKU*r37+Wb>z!WVYeW;&duTRzjDLLq7|3Ng zlnasQm?Ms^VXR%m&Cl-+>K%MUbuJ=i7Z#q{PF977hl7yv<3J?VcMXT@%-Napm z?{yZZnca(wR&$uRb-sVzzQZN*rJQFSfZz>oFCh}$&o9Xu_N10|cXPX;I(O2asIeP1 z`TAU)2K_MsMr~|#6wp5?5l-#l6u=pN`@l60dbqx->HttK&fC+;adF_$ls#JMf7KQD z#Ov&!+2GtjW|j%>Bu1-}kCoYsL>gV*l!6afm8 zpbp<56Vk03n%_SKj*W!NQXTkqnR|4!;s}Hr&;AcwlL+g`kx%CLjm3fGrMXS3Y9GAV z^CR(f`$vn;Z>QGrwo%qDk9WRgzmin*DNw5LfzIWA49xQ z`Cvj9ZqY4|Nv3}#u8Jre_|P>j?F+A1+`+^IW61!tY2X+DWuEZUk7W%o4c!-ikIe_1 zuC0ddjS>d2(xQ4NO%>3Jf@Fl>b2?_`4M@qrg&S1cFV?y3e@ev!#zHCO8AEJmBA3DSn3?U(KqDeI)Ehx3LW?> z59Sj*#`}Lg+lLN8{2(VM7Z&ybTFB8nZ$Qqc_;SI;Sbfv_5$4~Xk`025nC2g@uLn@R zZ6G059-)V&i=J&=rVP03&=c{8(z_5Z%&Jg)v|6)k>9{;9SJ3jF+85f5_9TT`j z8X#eA>*!2?M*u7=sawpnX7otn80d!05_F?akbX>mxmHG#*W3AiP?W0lu&SL*Jx?&$ z?+B^sKC(YXOz14vE$>iNpw-h_D)H*~FH=+mzRsf?Uc2B2%_w}(!d|=9j1AMm_A(>n z;TIbQjVSkCNkxTzo$C`s!29=R=H`PWzkd;D8ouDZaJ{?n%+N?WJad2~R~;?SPgs&P zqod#(PtwG#uva1%6)V|WGonM=A}|y{&mytyyEWrgq7i|n(ujhX6hivcvG!nRuLuU~l4seys0 zQ#0-Ml(P1au@z_?;KVDG!ZSz ziwG3)*oV6A8l&$i;Laxo485^OeV(!yFWBRax4geK8je7=;%)eMnQi`-eUbbk1!)t- z@O-ovH`Vc>cW9t+bXFi+Uj0E=uGnq>UJ2%3!FN*}aUQp=rwlKfj&(eg-eKt=5Ukek zNY#QHD$c2unT)1H+TF4A=pM0giVfd`ogT;qUQ|ZrgH#5{pIHJSdH4A08cnYLX|4#M zbrt#21vJag9{uq<`Zdw#jwo?!V)q}{-%(TV_oufPjYPJ@A3DK2^Li@WGhE+i@f3c( z^lp3fj$B*BUhKSu1UN1q;wi)BrbS(nR<~z-7L`J>&S3>{F9@; zn&~~NAWk^CGPKg<*pWmHbE9mRV_ibGzB+$0v&X>TDZ80vE`@KYq_)CELgb8;CfM#U zJMR6OXTzVQMlrw%#jri{pTV@vz1)S_^HY+R+@B_w@zICo1XB#*bI?36MnQ!JaoPn^ z=ZysDv`!#8%aTe~Q}K}kivBP9S%a|+oM8DBK9i}n*ZHmr&R2)bi-@n@GSp!B>vCuRT{lZ#Lw;K7A#kG zYGK3u8?fc2zmW(D!&X* zzRieAJr!7fg}OP@<;&zjZh&b?bFqE9aF5Botn@L3AVwT>iIbP`X#R-NE#2|k!OxTz zM2DPL9%)e+sL0OATe1mq<|`*lH3v-XOz51bp(YwMH=!D~T^XWr-|=?CW!^&jOfyyd z`S|UiC8*5mrVh}li~8R-H8t%g`K1oRQ}n^Ml|9!?J9R%Dclz4LR^wFyqeNk!25_1< zT<6=-BT=E2RB_Vp8}@URl(4+s<|66h-6_2TIia32&9u?)Xe;U+_1tf6Jm!ER3grUD zew2nQGy+FoLki-vyR<$}DU2Jd$-8|SNZDIaG70-!lmb0f{%1YW(-=XOm6DRG0FDPN z4>vBevNGm#uHh7l-_FvsADwGuJEphyHTsNHXz(9BsQ)Q0*1}<34R&hbRbiclfbcN zu$>^j0&Dm*J740H%6?#j1;H`R`Fy=yb^gw2ZCbA4a#B|8-Yb?D`4c3yEK>2-&N&3KcF>|d#q z8obXS{(yK$CgkaQaq<>)JGd0B=PA!Hu(2KAyouwn)3>zjAAT=a?T3SJ_9?-xe6Zf; z8$+V*y~G{&jV|U#Ursr#l(4*0xs>iy)KK3k(|yCTEE4gewcXhx{*I-cP@T;A^@H8} zHZHdIZgZwWt}m9<^0wZ{)! z!RgODPuSVnOF(|SF^ADVAln8xi-_C$^@U+{#BQ7O%Ud~#t-nkb?@tX9S-fa|>h}UpLyFDo;W=)s9b4rKPvM+_TN~Rrr-TY3VFgi+@;ppgU&xTwht1 zy93w-xeE;5@0NO!YyTrIZ+sgMw{}_*wg(BTmLA7bGhUz}+D0!b=p5RrUU0q0J~g$P zUOnD0eiBOF-pZgyTW3EO%{LY`mENA#P_FGHTX9);Q2Cs1Tck5}YcVdLz-1JGnJD0hx62y!4C>nM@S-frrv_%oNyEfL5^2#*pbx0t@DHC zh=_xh3e+M**^Ot#F z20migvNu(j3=eO1b{2d=RXYLe&i4!I0&$bgRSf4FzD2|?tAkhyBe&y1mdDZSMfkM21cJ|JId?jL zBYU);WB~;{l%v4Pg2z&2!0Qfpr{h3Vj8OCXc$Z45E0JgW=Jg2)xw*L=e~ac?R(suh zpK#z*pNk3DWeN->vXM7rDzH^pc9=Lg_0F3_`d!B_^Od@JD%%VkrB8hSRZ{{l;FT+Q zaXi7%EZhvV*V`(sL(NuuqcNp2)7sMiw>ibRMkS%CnbxD_=pT6xfPjFF@9pelat1+q z6g)W*wdo2;U@wJUpa-m)3+;EO^z?2*q!#*&i-W=E)UsP(Z-Nk!tT=*=f_%kWaei}E z=Bh)4YlST@k2p6l+R#$Npk+Urnc%dEAHW)bsXwKHBn0`nV+F1BeAzgLO#5UaOiKL$Db0q~Q#FV4 zes>V)esH6uBINAuHFAY(y1nMAsxet_9^1<*_1hJ9zkVFU8L|)#9)mBksQ2Z}|L;k_ z2DBdEXM%!YU3>$2egc;jvPcBR%P(?^K(0Py(Ov*M!@ZCEjw?I!VZzf5UbX3JUKql{ z!a}J&5IxZ0#n$tP<@!xE`ZPj$j%70d$9ujfc&xx13_tB%v-tv3d;DnV%oFK!ApZhKmcl0=bcYdV9$@Oxi&@I`#B6F-s>kk7kJwN!#B9F7zU}1Hz!;`xZro1 zX0!1GSAc=R49F7T;HCl}5J*Qb4nRsu3bP)7V}La)DlOdy4KF<-15{Yl_vZ?r#Gv@* ztM6+V>9V?jtadY*03JfHx4hdN{eEMM7#kZyghxh-xe%+zo0yt9ZH^y8;1UoJ0Nw$& z_~WUM`9y`MjF=Wm9CZYoI8CPUgtdf+FJV35x_x-PO3vlpmZWyEzu#?^hn-w-XHTN|{we~g0D+F}!Rp9$UWSs$uYZ)%g*}*|xPA-ARE(JCLdiHPN2=1qlʛF49aD^oW_J&Q(yL zjcPSU^3^WVFEM-<8#@BSHLJLR4IeZMU@30qJFr}?mzS64&Djqawx;VRz~TGr*Dvtp zOGr!m91g!QaBTwp>aNDapcaOiN>nSM7ADV}q9z?ff$5|9qdO^)FmUIH;jqiQGIvs% z(X@Q2Cj&=x^0>No`z_m&2I5OFWt_x!G`3U%7Hx2wgKK#YtbkeA9Ye(iK35l~ps4|| zvo=**sa*uh+}FxV7x25l5D9Z^tSdfD3-@QBA>g>Y|Atr@T&+2C1VV4P>g_y?{m@6t z5mBYt17!(C6f9!%zY}*Nh19cmc6M+{_%kyz9oPSE!kg73VyQ?rRckXc0YA$oJ4R~V zLK^eOqz7^YOf7&&?D&S>216i{8rB(P*==|xME2%n^**4n8L#6h0E6aW(}E_)-UpAN z5*VNN@875JIY<=@6X?Ht98WHk%0>`!_}3XrXXkFdR1^VG*1eXe2F z!MHLzD+|9t_fgtUFw#MdaY`jH>rJV5Tvcu9!R@sA=@4g-_18E{eJF&2t>bdxy>;1FA#d3IrA#91EZ^S&o*Z_4kByq`eVNord*;P%ecLCGhuY ze*4&5P}J3>AS~QS8Nx&GKK39jQieJb76T03?2kSH@1>B){dr45!(fcdpoZVV;jBIQ zxu1~uT)+PPX5;VgVfDdK1eM|n8jKsy`&R9?quulk8Mo^y)h>b;P+YtXs|c>8k|oZ! zt$#fcYO3{pDvvu78BLD78=8H<)~^i2lABaQb! z)50NLQBuhYr{;SzD+03tKp(Qg1;X=SlfVLbQ|N)rVLBKh5pcfc3od2l2c`k{?nmoH zd^3Ot)GJn4sQs7BF(Tx%_SN8d#|-=#7mN1wx%sPC4ktTCmxCan=j7z@yzVoEx(4Or zChKmwD*^{*WM2&yr7=)89!R31p@GM^T9tYGrRf*JiivRfsyL1A;~o?I$xfcMmw^S3 zBB{|p00HwFEV=`f9WYtdIs`f+5RqW)$cjfDRI>?y>!z_l_{ zQwMBG3@eRs&9KY2z3I$94LNbGa9(>1<$ypo614wE!0_k8FiD}-U6`8!6%ZAR;KqxC zzkB;Ro&;D$p-6*-{*Eh^VQ|C~f(Is`ZtfY-9?U&S+A*r*a>QGM9sr&XFs`2+KCg9L z{Q;*HV&s)D*#Outd%>z;Z*M<2If+Zm+n25Q81lC?Zz?LsZPKtxI3ZP4Roph?A6oxG z59YKzEy%}LP1!RH8;7X!I(L>NCMFF2w#3Q9a}AWWzn|aNfcU0})kH{FH}lFcm;ytI zKR1DP9xJzcc(k7ebAB+R2riGsg@v270GO|}(+_#pFj zm;RaeSj+AMQdUaFzp*)|~wjc=p5PvYuv(rP6~-qqlb(u+fcnh=r9Q zfXhJvlP-^800Dq8kc9nk576yGnZbJ4^7L-i^w#p9YdYa(;dfgPG=76Y3IFlqT2S$! ze__!oA*ZEXfrT_ega-s9goluj5IA-KZ#X-#o(L6q$TT`gx-c4bALrem){b5iw4S>9dhqnj z0ILBQ4<aObgU)!*zS+L8)z$(0l4G+cq>YqP79UZ^^`~i=1<}u^cw{Mh0 zb(o@+ve~cicz_t+{r&w|@SuYhGTRrcOY|DxEnN6e6?(4-0|!#Ver3bbOg0qEt-&*K zrWxNy7NUeTGz`ID1Q~cR0ZSyPze^UmR1mS?+k~xv$%D`{Iwm@j^>T)OI`T^A?1f4KpC(D~l z;YnW;pSh@xMAZ`rL=~#YtKGN5zVuIg{oJ#xYGV_Sia>|bZJ+GGCrvTsRE8g&y*~6r z9HqV9Vg;fMv;BYMraS1l^UWj={}EK>?|J!BcTZ{)5JF+ido-|1&=V{x?DEj~tYCN6 zU7%Xq{`j<~O0;?pp;;`Oo8`(vnDAXkEfdbG8fxt1$1PusUbXHTe(Fy4H0&h#)t2ar zACgb*-7UI%Z$*+xQ%vYgqri0%p*%{>#|w``;r$)n&X2GbKT#m`yke*;d@eDXT-}TfvM%1HOjyAw|cnteoi-; z)|~WLuxw*;@MfUDm&NE7g#iZRU8l1G7QI7$KdcxcOvm_Fe*Y#Rq|CUV%*x6l5Y2l= z4f%{336w`H?>?U`_28K&8JTl7ZvwE+&jD;A9DY`6EqLN12D!9qwv8ymXMMf+PWko zBSTQ{**Jd^<`Ews9|Z-4FJHdE1bK#!p>Jc>syX!KAc^Zi1;^8FWINWuO78aLKM>D;7*sDR(3MY5aa!5zU$B#X|Tv=FHSf=G} zv;Jpk-8zsoTzyb5=H)8eJo|FRG_~Q4GZJSGGYooCM!gY9)lIdykT@XiW!gQTL@lQt z^+>v0xJD`N8-L$}HRom4uBrtKMSjYs!Hb^Lv?LC-G`|>S6bgo~gfvyJiJG-4Q|eQB z2_NxFmwCZ%2< z5s?Pp#|B15^aQkytVbgBc6P2aY1jL!j=~m$-Ia-%nS|h@5`+>2MdQ1HffQd?qk9G$ zXZ+gAq#sK!SmfL_QQuqWCNt1FVlGzlE$g*@MvC^{0{KH}I@^Oz?^V22_j+53GPQht z7Nw|F*;LrMzQ$$O#d^zJ$o#^sZ&a1(CObt4A-fKc`YKF}F_;mQyIFX3;&`Z=o=dNX3Whd69MBiNd`+gP{mPMQE^G85okc^{~tY#qB7|s?rd<_fW zHvlCl1qpW_JZM0TgNm!9sOYsyZK1HAM9xdQ*zd902kw1ylqbvb{TOQ~p!t9?&rl`$ zmmF!jiMMB8pU44>U(R6pM14nEInm+3yx2Ud0(8aN-65^`r<$0+sWwBzh}XlG`V(eM}~oJ!eDoEbBHm~F^u(o z-@lG7s<3{Vwkqs$yg;(h9|Woi5TKuHL+sun4y98qq=Osy((D*4T-M*-$lsnvozvM<_?cUiweBVH$ zJzjT9Daqs2V)*#H)STdOX}D-Yw)mW&Ghl}=pz|L-e2A*1I*vkPxEiWY;+}}A`tP3w z0#uKvJJBikLlj{hF}O#>f42kgt#g_KSwlw+MKY@_VuYDh)AFBNe|a@pD{&)GsL()| zpdLziUs0P;89_Q>omf?>nm5<;>{!H6D#bk4#CJA_$ld;$yp0evD#_3Pic*c}$6J|U zR;xPe;2?*-2r%5t%uN541EeQ1Zg~8g3NlJto}z7zZ(k@Dyym`WMLBz^!YhNDny{xP zyy8}L)aM+U7dbRl`?7P}Bl0@632DtDpWm`RQHbioeaFgMh=%{ikE6ng=bm%8&1ZNW$|Ysax*d6S<;9p|ql()Q zhWOw?jWqaZQ*#4Ob9La>hLN*-O$$)uE>^t)Y!;qV6bAf zQnrubF3DbU4I7SUbZOGwYR4`bMr0n4UlM6{^ky=%VT?Up)oBrUp6Y~v_=I}{@!{)r zg;fee)r%#s+9HW@JFDFP?>Et^z0MqTVS?oXSTxAptkaH)hN{2r>3LOkM{!!5+hJLx zxl7&Z3~ln%X%9xdK>N`+>WQ}(teh#|r;krkw+FBf?NPWc5iO^~{I51*t)Xe9<`a#m zev!yRHR*-(6xXcRM|f-}f=eaimb%7UFQwfl^Y$5UU5b7$C-mtf8SSmp(JbeA>@HrT zpP2Zv%fU1_?H>8^%EXIXSN{1Vh=#oMVRq9xAP^=fLz{lo!pw|~ihOOb^^W)|yEF}X z6YtDHYU6H6&%kk7o%0`lQ8MffV__v7GRs=v*)bhGVaDKdIdt;9)G+H<%QsH1V!9XH z^@K;e7aIf!>M!^l&Nlb1vmf!CCuf&gj3UcbHnF%q^0aeAD~{wO*=l=;0IlfH66L0w zi(YDzEoXLTZIbY4YIM~K9lm4HnM}wbPgB1!dsKPzAltz2yip^D@*3@Y`l&#wuv+D{ znw+KOf)zD{WGi!F@(f(9^h25NAcTZ^lO!S?o zryBZ875W*%kIHV?9t?REI%n&!BA{+sq`a|7N|(lQcR-!s7<0>@KZ{#{JS zWYGHd?~?d_dbXWYmF!#zf4#eCr6|7Me&MrQLNzg*gy{;Ui(9j#t^egpevNv1a?&E# z(77vXE0+Ue@;`dfT^|pUJF=s&@ANcP+FH$~PD;lW{nVIjEzS|~jFA+@cv-h}4O=rC zn+@PwC|x)?IlENuSd>w_E9{~QQ>05Vpdj$ai_NKj*XkzhQza|BFtD1v>+X|QYwXP9_@-!%RBJF=!*yn~GX{?gFBk6yymdn#`>E6Htf!R9k*l0Xq0M4lo1qA9#6 zf<1LI`S;p~^8Bu#m)D0#s`#mX(5)>PtcN@D=x8sm5f;YN8z8G=;K_5x_yb-o2AYA2Tp8@c#XK{40Lw-<%S1*}Y)+ z#ctA>-JABtC9EknyhJnqnrWU*tj1m;X~KpPB?V=Ed-Lz|DyX&8HTL=NQ(fT`l4U(* zUwOGfYnm*c?7EA&$HM-dIXO3&4GDyQY7|?~J4deG3f-;g@*A10=TgDU5|z&R;+LfLsK99UR`VQ zQMh=y-Tl9JR~g{AwNCM2^+}*haIf^*`$pmEdyXRUfG@CEe8$So1Il_`A70eweZPi^gZBjbp_RX)n6cj6XSb^lzFHQx!KyIw?Wf zxa>oP{57ey?r`rI4ISE>GDU|}6K@~$nwAxJ3_Z?i#bvUfY^(IHawKT4&$j)#d*D91 z8;i@l76#du9Q9LW=|Y<8ysoIt_=Zs*%*(8Kq06klyq02q%B9y2{>;!0=O>RJr`*0B zc__4N>gB6fV91GLL3G01yr;S%E~(`+t5VCka=cOfWf*g`FZ-rWSW?X$gO>+#KWs~C z$C}4QkFw~?oo|tK_4^OZi-`(&vJ2T#^mR#p87X&sHrDY2+}_%9=mpYplAn zsLl6Q)!J0mgXf=C7f=A)jcVk@r1KN%+z$kK8XIo0i^zzvSigqDAv(Hb>zM_qeaDeJ z6K6iEz8s9EF3;wPu?cgg=AMs>n*z;~nqLlnn6|lQ6aQ{;m1FTy!WuurBf}p?eN&7< zLI;^&_uGH#ZQmC?zP$E{W&F;jLj2f2v<`M~1|!RA+r}!pyxhaft13*>V`sos^S{bt z4^I5x*Z)A5r`YtTz+>54sZ{^!_-p46o1~FV|L9-b@cJX`e~Bg9yTy;|jq)kA{m0L^ z8m$Z_6JqwX{gz2sfBwwwqwwC>J>&$rs!+Fx;9?b_ay{+fFY?lJ zl`N&tK}}uhO{FJ+SvTz|Q-{>y`VE=XPOD;lZT71rhP;9-n_)3?|BEBd>jeo+)X{V^ z3cN0V2j-`xjsstWkrax45T8H_k~1^^f@{@X{dmbsnU`?)T`y~G;Qc`Pu~c z8+|$dXxeWvm-gSX`Uvk!&+82h?U#4YUV7*~qjapyFyob{06A$3TK}J8P21?r(1t|T z`^l%jk<2~ zwf%f7*)oZxpfOo}jOK2AP=K!>6BXd@MUe$C`iN7ySA0w%otKyQ6Ob56pAm(L1G!mQ zcI9V<^_zO#~r)Re{mc*8g6hCWCVBt=97-j&TQP=(T5*1C*~JsRiZ!r`7>!p z#Yi-Oo-UOe7H>TcmCcf@{%kH<`E)`5>fkw(fUZocQ!iJ)zx)=N-@t<d{;U#7ZV*J&|h`6;>sd*V(*_n(K?6eCH*t++!5t|6B8LJE-A@!br>dq2znli zS8BEtx0i)vY!=nHl*#0T>^_r!KIzP7#FLntE5O0A3N#rvfWVZST~pBvJGr@^JbHAC zOTM+EL;v{0XAH-$T+xZu7flZM^YbLy*l!F^>WU?%iQ3yT>j4^aK|S^*A1`Za&O6)H zm+bm->Qo#Hn&&?=@lgJt6YnT>-#MbI>-dTOQc|cb-K@RC0NBn=Ax9%f$7)>YVaY=`3{J3r`mVAX z9v3vxf8OD&dXGGTUe^C0hAhSajA;+@^UV%u-+u?x8Wp_D>C?ymFxWO`W$_YBOih#j ztM5;9S(umz{+2ZhRLe!Qj8z%_lp`huS5lI2=W=yT>9wxiEK#6Fz~h6jd01CB-&YXx z@S%L3+Fng`sM^B6u&{WXKVz7zc|bsTw}mlE#aa8#F@xcneaj+QStO}LA#uAOl_6&8 z#h<}6g?isxlZUx(T<@A9?!M3)L6Ju!aSt6jgyLgvz`N^(<3j@K*FHr9 z0|WS;BDL>DN7E#CIB(|PyVvR!y-R50w$BWHia78>4F(JvY3sz2RTU#lKsH<{5pS>R2am4r{6B4s>|D$BTqAz2N z>AG+vQ11bTdLwu!xEWp%E&wA4_L=@R&CL&Jg(`$N_7woNT9Ik)f!?v9RO(*YYB z8`}32=SajvM2P%ROW3)Qdx_k67uCq!_`ZJq3U);M!TYVvPWQj;oSYz;`-X=0D=U8( z8{5VWqveJAKPfgA={??{w((qrN78si<>i?PNGCjZklD>1BaUdL6Ja@NXQvy-OH6e$ zt9wdv`}Xa|Yg;sgf`Wqc`wspQ+m{&|KRWSXw_b4E|Iz|neEdnWXaD^Z#nJy=W|Ao_ z4kqGkkewnh(A?ZywIlh0h2>tn@7s$?1hjhj1A@sNfO@N;dapm9ZX!+|vi5N3fOx%qYg7H7Ge#aMjP_`wbe3S_DLfMFLS3n&xb z!~fVq^6TsU=7)AQC8ZLtp#k=Wrt)^Jcnf!~l^;svZf7`TPX+$l4A=|~U}k2%=VxmA z9&@E2KOeRzM8H%P70qDB#0+`vV)ILFJT@p)^K}7{>6I%3rU%jB?dZ zT3R-MK*RT@V`O~QL@Puk%)vdal`wb~<^;frpdjfPD>oufV{1Cyii8hQ0OqH`v;%6U zrXpTkm?jWZ+34t8|M0jrRDkQ zS8_jqq~N|0xD-=Z_$`uo7og-97Cu@Dehjh8LVBeb6)S){@T&FMGrZi|Cc1M#9k6k78vgMEZK0iCchtVQGxzp~dj97=l2OsKb=<9btqzQ6iF)7+{Vs1-{lf0#YR9sxV*OkS+d@}qn!k$JeMcBnE z-wY1!6Bm!v6S)`k)cnkg))R&ZuM1?{(Z`VY_$0Yo`^t*QVcL8abMpaEi6Ej~u?fIY z0Kj(`EId&ViHyrT7L>B0qLc(|CeD13w+jjji87I%p7^#2r~r%xC-fr1!dj7bw1a^0 zKGqZ*9}{DUuH)FT9^}g%JNC-nOF0Ywaaoyk@u%a^`C@7Vu+h?*B)%P^LSZ~BHLvlI zXU|j{H3)NLB+)3z3A$9=Zb$p8{qODlf#L~*j>T#_nJs%4d zIZgPzk`l?1KNgRXH3dVlT<>Dh*%9dxtz@%25HrTh@|FK}HV z@Qc(JICVb*7p0XryZwDY3hI;x@nvL*VIWXJ-e*BpzLbIG%<#bD;hg=SZ@%v6aC3Jz ze14}BVv9x2wPZIU3dhCe>f!=BNo(sp`bs-u(hg>_JY7vg!(p(cpx)e@C^dTd8OYso zF){_fyUo1Y1#1p0^mx8dA0X$eW1nQ6$r=(s2~`aZsU9M+E3gLz&J`ghFvVyeEKSMg zW*(F`PlJ_gwP&Tefh7j}b_H#140LsY?K%aonxlJr zU4fgsbyRe8=j+!50)`kCi1Wj>QAaP>ds|@0BY+NDBmU3Z$;mLF|3p47DnVKr8fQN4 zzI%CDj2gpAEK!QsXfWB1=BTE(O_bR-fSbL5YByD1&Oa#DLgn=ztd5XK`TqR04z?Fr zsC{l&$voZ`4d^T|) zts7|{Rfv$DQe$J0wqXpo@ZPMZKfS#sfo;->dUo~atx-I9kMgz2Sz1~WfY@s99kY}i z1yT!iA0#gL4aCA3Y-FRo=v3*m=xJ#=jR~chXEGRCD+oP1Dv|bWo|^i47*B07H`nh5 zXL3^yOl!Rmuk49_CF(gD{7(4EF+D8$pez&3`&T`9@%)KTs zh$sS;WTQb`lx~3pXV&K_#6rZL{8rnQI77RVa+Iw1}!U0+Zh)h^B z=9neA3EK@Fwkfsx4}!YC?LRg4q1uf9<~KiPd4vXv&sBO;-8^lzS1gc-MMDLHcv=@X zx1Fr4ljGx6_$}Y8Cpg*JQ&LmUy1IU#WN8HJFKu0|EzXAZ?CvF{MvGrRo^r(L-p{*n zOd78bRXS!8cuyn}qe715J2*Hzyn*tIG*o@?osbME5&0}G!JxrdhX|4x%40|=5bMXn z%9^=R=>Izwnq0--u^AanP0;-;%hQdZh=L9d)wz1XiC zX2#nzdF%q`@#LVMAvicVjL*-<=7zjnPxuyk_8Ke-9~wM!3kzL*d}<>ij-aOR$v!AH zOZ{2zDsjSSh(I~A0U`w43D{Q9CeNQeLsggYMHdnoBudaxIyHxo7`+SLJMN{7E)4Bi zo{9K_W4WggsW8(EY0ILc@ICWYsy;fj^RT~j2nc|mz2NKn8zDau4_Zt#Em&#_O~mnWrc#oU+*k(Dd_re}uv1ke zQFk8ce)9&=5Yn=;o~KU>l~N`88Dfe+w28%(7-d#njmc{T6=GiA1f-v6GNtEc5?sPp z=Y{n~hVJb9sUIE_(}k@ff56txE-N$B*39g7M#eDqEA$bixye~sst`k2v#V({CF1)F z2;@T4F=;M>KSe+Oq`Y6ZZHz) z=;=ZCzkBmgNY57wHBmg`Lz|atpplV2tjM9d9KOP;d!CHbvM2kb2)x$3UE+8eq8rmZF z13?Xt$Gm(0-u8))8=guTnRY%M5u_IU&C0~&;N-;0!g8;?d<&8f>?rrxllA6w3Qk~m zM2t{!M#dA*F*P2ZOyslS6=FNC!@38Z1s0&{IOaqK^wZ$7%x!FjG1h*4QG$G710hq8 zv^evzh3Q4yxibjZ6Z;hSM{HV}7(Q68Wv#1UAaQtf^ct!!G)DMUZfyVat2!_+kV3bP z-36U1nnEyAVa5--RK&>>)zFtg1&r1Mi@VCfgQms(xJ}fg!+m{y!^2mg_{GR3{xYin z`DMatYNH=N20-Hg6+Mzgu;7|YE(+<5Xce4z70biP%ZoG~J}$1AiHW?jGPIn}F{JU@ zkp(Gz^2rL`4?3O1{he>$GEh;uiXAkFz5n>cLdnk{Q2iQA+QP_|fx^k%ow$MQO@D&{ zzl~`vDk@qXN2`$$UtPTfjuz6Z>gs9)w&GqY^70bX($HkKKx$P~QiAGg1&P5|;GS+< z+DA_xi((s^!=TNz<2LdC^EZydQuH)qq5z8WxEAunhtc_C@5AfUimmo+!2|+a&q1L z{9kan3*Zlo2JBzdw6x2A{tWf^-}ZH->Jv`T6A=@8fK59V#8od_(C@gbbpid2! zZdYaTx5$0G`tVF}{<6C(TX)-%+=n|I3+r`d(?iuB>4Bp?@iAi;{q54v&<*Wwl)ky^ z-*gD0^WdKy#G*?h;S!wGh=T@Kia2rm#z6V~`TDcUmES!iNG-YB z<{j^Z66^QNQa1FdvU*e9C&ZrDp8>f&rz6n8^kid%zA1ak{J3ofAj1a;xWTd}bRv9x1=;x&E=2FZOHn-r-= z!1ra8S16meFxK!f9*ax|1+JU%*UQE<>6WUFJpPYN@6#s`{ui50AQ)GZHyo8H5g{e4 zoLD=fe7?>?r%+^eiABiym%JL~OS9I%*rh1}FfkXtQC~UiB`Pke552_m)V0x$Ubsn zR}2N|(Wk>-Jx$e#jS(}~O%&Wpn6$WH|G+<$l$7wpu3cJ`@BCQ^1X5eqmh#)T*?>C) zYHJ`nsJq((jjFM+F>#q3rho>>mUxYH#^PiIETXBkHy5ywfyBUWShDb(mo4fQy?9Mh zz+7F&eX~fs%#i?1OUguJ;lN49#*dq$QT*Z^IK8Z41xvvc;Px~!t@7G;_ zJ!)XI^n`9Scc-e8LVenhG4c-fN5xj4-_qsr7DgoJoaNoY82X*u*pffPRAo2|U`~Yj6OYQl- z=RX9GJ)9U(HhDWlc~LnF3PO0?uu^rYaOC@;?tBusehCz-rY7kqn(J$S=Nn*lL=I+J zX6Al(#T;@$rjQD1vci@dC)II~V^$WmEZOO=EQ_B=@G=TjsDkg@I{Sc(Ag=#`tYwm5 z&VM6iU&}zRxdsP;V5D*Mu+C!w;k~((&Ej9u<`t*${XF~|ty=bXdmWapaj8_E7U0uJ z_?DWOI59UjQf|B7dPm=$yIGY+`bI|3of6N~!IGc~#kF(8BC0ug!IoUU#J2|}K zx$8dI$8O45&6+3oz){m$JCP(@TG`S}{*?F^Ws>uhZLgRjsly*U7+*R^Tu!C#AvwFW zRBBFqIELf70dw(FbXvCr4v)Tg;f$>akmn8t-l8pTZ4Q_@CnhJ~#7)=v)1uFs;cBtDwe$2 z^Ncmd8W*^K2<#v|C3EukB?oO-KG@X5Hb5l#a9w&_@$@lIe=h0v$LPzi)e$@ zpc4!%fDAH%vqAt&6bQ(~(gYx&I<6v#J170LEA7ZB%V*iAils}Q2=VGu8q_JZr(B); zNje?vzF=TX?UECcE=IT;cr~I(@ov(&fzLw1I~on>cr<{PkWU4cAG5MbgNF(=V1VkW z-}m9ggfPwGD{~?XL&PpIG0VRJ1OgL4o5@L5E-ot*69NId4j|@Cg);{42d&($4X`%q zN_Irp9wfQ@JwXU(3-wWnPrDmIxC(3;n4st^IVBDgDA|B|7)1>k8JWV)t!oA6AqhtL zR{XmbunxMmlaIVeWFI@8JquC4@UdgZxQ&KqkMry`q>cVa;FA08@FpXg+73-q{yP)n z-V77*+h{E-5y}o+OEWL+0m-+isqUOZJi$hy$-h@tfEwdW9b713ozdMO_OmZFuFe5# zGJFo`f0abu17If7Xz&fJ2dbqXU0qzXs&!4g?=%v4u6!kXQ)rF~yXLU;?d3WJEPX^| z1Z{?Si(CB@}@nz^vrRC+1 zpYMJKq5*d+BPdAdS%uad8H_`p27cp{n6ydpQ3gPK=@=M@fpUOww8H~QP^S|OFtrgd zB<%v+SeW!5bq5<@(cQa(o4k0Ys_XDJMqrz%zEoG@Mnxce!hJPu|04?l;%V0L6_|H5 zz*d|N6sis|4x-5?sNX47*?5VgVBn)&%SStn`Dvj6tUv4kQa=GpfcJ12s_|&Q1%Tc@ zIDG+>6yffF+t_;?efDlQ7Fh&;LhTg~;_KF}&6QbGpk!mZQGmgP3#O>t3CJC)9{{ZrY$|1xMQz(do^>ufabuz>)cfa0h+h7I3K8}kDtrjqjOT}(J`8E1%QB1BY>nPvoCt(QlsPKKY(*rU>rhezrMZ>DK!T>J23Cq^< z1?`vp+TZT6#l$==?Uai%yEdh8c~H5sz_*~hJP7p`U@I{(F@mzsj0vACj_?Bflnb#l zh&dQPnq6uD3Bq~$62L<5EUJ*Ezw7G(SFW6MbAvZ52%ZL%rc++SuNNNv1b75ESn3K7 zD=S=w=2B9nXhmzRq%U9o14tWyODm|Zn;OD;>|~U5g0=qOJ(7R-P-_e)K8agSaioxq zCe`-bWf-GV?Cs~rr#)4UZ}Gb8lR0UBFT^r{trRo}cV;)L=m4@Sci+z< zR~06o_fTu&kTdv05qx(D+=65Zfc>Z@3v=_WvF(4CK`%pC$IjNW(h6R9lU%Z z=ZWxuU*X0@!lJSBn}Nlq?1t$bheyZ6dB`TV0>^q=YuYvIV>jQKz3tX!T~aX_czw2}v#KvR`Yl=W)1(~%VFK4@_Rd}75u&=S z$F8K2)TvhXK48Z4Ha$UzLd@VsK&DUb{QK=9b!WRXo*dbC#MXXAtbX3wkna@v%@Z)v zeu_V?c4JM+g`159=zU8R`1R{JJ~lM=VHXC zZ~w!6EvRnw5PkGT`q-49>BjU27dNU-2MX+t-&@x1KUvfIo+9jr#}nBg`o^b4(wt#y z^ExMbR<*xQP2n`Sf{}>?AuaG9%UE9le4&kmX*5kwL=6xvI5Kd^kV2q;2Pv5R#mF`i zaOS_Y7g3ms^4b3_cfpW9(wL{q>b~*5 z+hUUOC#*TFS)^B&Qgt-8Qp8NDm(0SRB`00BIxaYxW3}2C(Yjqk9>5anXgv2D&oRUE zRlC6wc{=H>2Rib;$E{CkIh{#1yF7bBrH_inxN}tEOzz^#9qSSJb`r_`MdSx)AFKJU zOPmQ2_xWF1fI4BcdFZ7Wsi`|UJBgx$r%#E+0rnxJhaJNieUMxUfGSx!>RS5J@!{&v z&r-AEuqW2IpbICaIwI&hW3L>!Mh5ZRwlYqYdI>;?w|`f%Nm$0)$Y68`u<@#7DeXR% zaKI$M=H#AV#?A>;&gMz~Og6GwR;(R+PX9`t-pvy=$`WF-#e3%7l}16sA5-TwDAf+o z5@@v9>jlYzyV*IqnAzv9Jw3pEZRmsTp05?Xc~+11?)OU+5G_6~&8-m{pR!$=E2U>D zEWbPZqYFIW5P%@0`?w$zBKnBdLxK~%1DK3!81-()$7i->XmuK(V1A1B4b{hJj}GBu0dJAIo|2r4!hv@hp- zBRF726L+0ml6Bw(zhjb5G(|wCh!R1Z`J24H%GA6wUtE=5j$Y;Dyh_J!+e+5fc7xU! z&$X#H2c4P3iXU1?iqP}FET2?%!760L3Q7X4X29f_m?TN|RzYZ=E~Ln0SllmC?y` z$vN=^n%_KkxU_R;JFVvOf5@~`T{F#CDw#PKDi{-gF}YKXw{zp9k9ZDG%d1BPtzlU8 zI4&_b%3e0%7O?TDr<0OtGti28dq6Va%Y54`ZEN(NR2tfKD{J0A^DWXn8J?Ucyi_A{ zk5Wc+e^ghzC|&>1;`7&RtpDp=`yc<9O#C5{!l?hS-ku1GK(hjg%L;LgpMDD{ABX>Z z=;^7%-hN0wPSBLK>C%?zNVl`o?^|s;@3oN)ye#wc%IKhH)=;NNU1;a~vn2xTSJ%Z;&RBlTw8-}( z2~mrl{FI)#xZ1MvF_?41qnj(M`&i$-v=s}V(sjS$XFZGxK{4cyDa|r5T$L0D&+&S1 z9T=?kE}OD(HpQGz-2G?H|8!>F-Miyc#OwF(IZJr!F$;rnh6w5z!#lN&R}QT$51x}l zQ3qw@zGHGSow!j#^YE}u{pm-K9~V4$u<)@)Oi{5PU><-n?7hHps$*$Tx>88mkG=P~ zs>gHGjHSR_imhtjS-KCU+j$#8{4dip&(E9{xt8t7j+KZ1&63Z>i*B)#tg8GASPwBf zKtA8`mF^o-^HyVsxL;6UZeqg3!!wenwZl4s47C`LSmI`U@uGZ`c81*PR>0k;eMROT zIlF-;gxE#qnMD_tj8s|9nnT9*eed6+Oz!=3VRFKo$ddUle~T@1(jfFM`(?gv3u|kV z2%y&Rm-4i<3}+rVHtzH(IA#y`agWTy_1_>hwmW$eM{6SPgr1Gt&QX+mPYV<@^7icoyl@26l%E-)4-<^+|Btx)F1CL^4l}F7 zEy_G5j|cBxS?P&rRaE;BcA!8bNC3@BfxZvY9AtTeg8-g)kJ=u62M>_}J8>9fHW>l`ne~nnmS&Q_k=PO+ z8R@?=lZeLr=4n9nKw&Y5h=mn+xifr%*!sMQxGva1R#q}XeSNv-(0Xs47Fd9stgL5F zuNV+amzI`>HXZsEWfc{Ec<#Dgx>TB7ru9SGzX^_8);e&NRhPea+UUK%$&_FBpYh~3 ztAGF_0eXaLhWt?A=J@ukcz@j7Djvg%Ao?JW#%gGZ12nt4Wgzr~QEO0s-~FV(wdwQi zBm4JTSZlyScT6GbfO#3(1@W6BWqxiP_oW z;^J9^L8AEelV6(tKg>ue{Pj2f4-?bjE{SoRHh05b_b*3!3!6^bJ*wbgt<8~d)B+j_ zPuyn>s}c)wqAjRA1$+=50)DEgK_!dpUCA5pr9at z?@vumCL`eIjbn6_*IOLK5nwbs>*ng}>gF~(GlNd-^_Lpq6OcPVf)lEa)7O$QRfS|_ z5C4x$4Llk#^%pxG-U6|;!@JfKIe~YJS%eF_hwz+U=F2JyugkKV^E6!Gs0+h%rLT6tAl9yi;Q5Re_v~0XcAe<@i@;uC&%W(Bx#-HR`?u!){ zUG~3VOdj&mvuQ|hoHn|`nJWi`MLxG>@{hIU>3cImO*9!|n= zyE@eg{nMxO)CEmWoVaHqULB{?3D(>ayfjYRMDGa#`7?$yS`KuV#KR2XZinU(ks9)@ zy@F_QjEs!Zwf%9NDm^{@*|TT4V@2zX-J-{>KAv@6Dzj!$PXkkf7 z$;>f=AWPKRI4>M&78Vts$dE}SBSR6Al*FRC4uykc5e~qd#zBi1+9*c9eyy@9!zop_ z=xU9N2BgHr2_f}|&Xpn@B^bd&%hQ}395~H(4Y)7Lc^tfXAOPoEC5u6>o02kslR97< zM_Y8iv~=WrC}K0-3=d~$<~@4)^eT?9Q&C}|>ieK4e_oAzA$)66a6jM8H(OIC#w`R44 zloK{OySsyMRw?oDN{}Ey8#sE}6n8nri@Skr4`Uy@H7+a=n-pq1BU~5-{$l=qR-ATw z8h-uJac0P!vc#d>zJlEd?fBSDyo|)zKtT}I#^?*L{`#T=7dnI!0H;VG1R$Q{2XYnR z0N;n}q_T}YpZJ45B#J7XJCcAgLyy>rgGl~+D983GDS4PRt(lA!n2*oX-nQRPD%sh5 z^K3I)>JST6$Z2-#@wd#q?$U1#GL~Esd_dl0=qXZ7;dINTs4-_9%^t<11!OwC^|{lqQz`YE{RN9rabCNm{><&Ru15_f6emQ58b zr{@No1w}CggRlzWwD5mWrAQvZaaZ6oaLFKU2A7OXrZ>5!i*`(ONmJ2;bAFzl2n&IV zeBNE|x}FG<;ZZj7BdG1m@;Y{N?8gYt!4GS#QP@r<&t{~hjR(^sodrI7=L;A1eBSg2 zA&K=JC^cyaATlWNJw~5>gc=7YuC}$^pO~!t-)1oZMb=i*yJOBK4(c{^c4IVMp0@=Y zXPT#jn>&j)J^ud|GfxMua{3_hLkoULE+$QxkZkviUO(U1NyUErt))~AkBg(26}1Ry zch<|Osn^!8NkXc$#ig0Pz_ZWAzn7d)M%VQ;In`IWYdnNx^sUkO_xavklIBMJFLLa+ z2H~!!&>`JwS_!2pqm>@ix%2Woy}ta=FEU{n`kigPheM5a@7-$(`>y;U=r{qbMBKdj zZy6^FAUXkCk(CwKwaBBQBOg8>XW;$dU_JIw9FmU7+86nRq9`Zb{6g`i+A9o}j|&Eq z6R)(ULW9N5{#lML`7NE3l!jf3i+T z`+%^M;;sc7{vf98pzv<4F2UaX<;xlYA(tjP$x&rJbbc zD8@#!yu3_2;t_&F$q3kqOo&$kP(j!oWc zqur`+IT>=hIXP(a>g_Lm?wLNj%ql8o_RRCh*yJ$t?PVqEP<7PrDcRX4{}{r+jRJ>w z59rbk?r+#EBA6tjvXxk6r~f9gzrmSp_$|MIXk?v+;~eJdM& zL(jxnJ#|8Eq|#c=-2Gl*AtgbYgSHK1g7p6V4Vd4ZDn%SGUjKJ}{Ak_3gy*KZ9upsb z3_BwBSV(w{jM^_BRMMft4g<~J+ODO9LfzZDzPPXTpDkh)ZxS&eS0I@@ZO&aPOmO(Z zDot@Pyszj)wMzOq?e-yBQuUXEbUIgcx^*&~l@v7Mm=>7rz5SK4x97?`zB_-GdE#t- zo4xkTOv8u>^0a<0PH3SV%Q(hL6HY8Up#AgmTEvc^GmkP3Kq#WsijtFAuZPwTnF-fR zT9FywG4p+MR@I-6Fc;eVHe|a(Z|_pQrm=CH@dHjaq8Q2l$`w&Dv4-R=UV021TmwzP z+miFh(*foUv`d-;r$kREDIhTN)Tx{zMHgQj1J&Y?(D4=B2^0n92Mk1$u{}3loYYmM z?#(jOEpkx3rI!DFm9Ip;Zu}xi5Lb3;i^qwzYTnl9Ns4Sl zM(0pakOuGsM62mEbKHI=S*%pg{kHOw`v%jMcgw<4W4EKCUsU%itg_dy3QS#(DPpkC z485c_dl2CS*qb$$MiEl;7qgj&aW@6qD{`Q8Q+9rxKs+)EaTO1rE+*LMm#Dmyx%rH4 z{69BfS&GLknLChaYG|}zr-#^_Sek zY{fFr7YmTaHv4dzwx~N*=&rsyKd8fDp4!RP_PR;7(4XV~%i_>a(Gn5Ww-UV(c-Pgu zzqQ}4EImj#bNl+SK|!g51C1iJH?cWoOtiiZPRp%Vi_Ps$t+`#=7{=sAX7X0-4BGN+ zrP9C!+Z3f?Z0^LQYaCC3S>Lp{tr0S)d{(y?GjC0`@9m?$L2$ceg0FZTN04>%)xFhowTtmK%{Z6Vnw>fZt=gHC^eCMssz=-~Z8* zC-on9s3_emQFDF~cHz}h!>qkVP2c`;N%NL-qO;EW=C4#3L&_^dZ0TgLj~DIFDtQ(C zrd|x_>)pHR*hr_uBvWVz=HKX#za83y4kxy$S&o?B1~f`ezF5WJhTw^aUsYibC=}oO z9S?kHqAJ8PdfSRh5lwrT7AQN~wVkR1%&!h|{55ggFRn^s{98W86>DPky9#hA| z8!k4^J1zN#*pHN&QIw|VC+{}ryZXP~+T;cv1*=9{u#bX**VZvtRWb*sr=t_zzEQBr z)w!Nwt*uRdD#z5?n&?W#VA{>n-v3sIY1UxTpjnw~;6}-TWBYpewYlNE?h|68L2#a?|e z!xEJ{OVmF%DRiXezLV0D<^M^v*$pKXcD+rYTG$<4+5)Zqb@pcz)adAbE?Vv~nu4%{ zAcv!WfDobk8@!?xrl<3pJwmVaIwl$vzFTOdOB5Zs27ydK>j`7QJ=rh6-;dF5T#Y(x zRv?+?YJ1@~r&5-5@%ImJlIa;98-X`_0^S2%1UjSLvo~BAinAww{p%SGdZo3sO+p9` z{`rLa+B|VDPeLcDP%*!6&po!3>JGoRe#^^$ zCuwGHmBmkP1+L6o{PQnRZJ}o_>FLmOF)7Gv0L}FGv&~tlsG|bI_9$kQK|UrVR7Bfs zU&G*qIvrK~@}ZTjztCp=xyje~+yu)v9MI^v2Se38Lfa+~$Age5^TzoAuf7YM_utQg zl=`s~MUtSZ>L&p8D4GWcSJ4-g+Z^EzyIUW2!2T*+lEWWADtWhJ3Ski*^ICsCI*Aek zObehKD+uI?a>KH=(BTU>l^0v~MvWLV!?!s{w5f;O1mQn{vTkCEVb|Cr zCzoJJ>aA9E29C5`fa?<9K=CG7b)cva(~} zuEui7J7Ot8HMqREn1+XfMjWkaqD939xyB*VthRU7(5|0%caQO9J1Hh4)B;BdSgL1J z)L0uHfm#fwRHa$lUryYc7SAR72!-(bt{?Og;=2OV+n@9*rf zy6CNV_PWrecEzGECANq?xdDIUT+f}0+t9JJOl_b%H#h(J>3&5;G}J4gNU^SBUD?Jr zBbwP79^JSS^|o{UX5`V&qQTFTB^SQ!c~^fV<*yT;&JavwiSQ`k#LDP<+QLMUJ!;lx z{OUzVzR0K^fHoPTi?%Gr?@Ux-koXYY&1f2M^D)uUM59omp5p%f`}gg8+tCpf7YAFG z@45djJv|V|@vq!nU18iqSZ64>LoVuy zCx4v~qB>ZW&3KOZ003+J1tJW+S$Orett(fr{zc@S10QEUJGvyiot5zxvNE#JHI%OU zVgt`0aR5Guyn!H?@`8OQu$U1|DWakrOiWeK0yIu@{dXbq7ws}kpcIspP{>_`7CA01 z4k9^}aWS#6(0HCi^AAk6F&J{)zYv@(KI!Uku7*wy7gW0dy}(oZkZEdaf=oiGj~sE| z)Ww$q5GfFi&nOC@hjV}5L_V|c$n%795F8cfw3-_@V}|Frcc z&{($Z_wZ%NSSVA;l#ooxkfD+}DRX4VP?5|kLxU+1QVN+uh7uxV%#b0HS;-h8LLovj z72m$!_xE4x`+fiKUe8)jk0;#sbzSFq?qeT&??Yec5&Get#s%m#GtfRSG&M1yq+b5x zQF6E*(s>NvnH%05NOVOBhmubLBmuU~pEEOP_|IQEGAxDKk;I)Kwi3+{&Z9S^ad{zB z!Ql({HH7f!cM$|ba1<2Dad9hndJv=x3^e4cQZ6c-24FRJLx9Yn}9D%aA~&{#D-aE2~eiO@}pq&=M4^EGx3#y=V(Ek51O*4-J>Cc|Pf5OM6be?W*><&(xJvi=E~r1Zgv$ zi>IOF!?4Cf(rEiDJ=lWx-v_tin85-EF-G#=z>_7% zDJ~>*38n_DuwV1@nAhs`Z%7y85rn=z)f0j!=KAKw(K(O(`}e2ny${9yF|fHuZ{IC> zp00rJ-!E}-CBY(D3P_5b9P)L<6h$GRQ$d_2K%#D1=Z){1pJ7s9C{guX133jWh5a47 zyu{r!F)J#6+{x-{zbjV?(5|Vh{EKUYq8WNJ(2!D+l4g~OO*T*9I6M~$>J`N^q7A>k zHPCxNz3!PRh?WR|J_3KA3MnAc0F5~~=88)V*eK%a)b;jg=EujyF@@H{jR5O(OJifv z%N>_5&tq?^C@;t6h%v=4aj+@e{Np6z_QO?RDzXLvJ}f4s)k@eY|e0lo0ytH_lphW zaYDlKk5BKve0ee_K8YuZg3#2|{H1X9+*|4;VTsAx94B>kv4P@A6&DdnfQ1Mp4bE>B z-lEbIacX?U*X1;!)(znO{Q0xfp^B$>;@lr$Tl8u?c9@cu?Td<7<#@zE6V3}<5um7! z=4R+qNRF}r4h*rwuBT^ahT6ix#s={PNU$YcYPgOeq5w7h#U5+M8=@NQ?1-O7<9uMV zgi1MJ3s$}>{H({&m&(i(0s?_Y1WQ(E|Ne7aEF((?NRf?M-IcsFQ|GabU=JkOc<>nd zIipXEIs)S>4@Wg&N)BjVbsX)(#(bipP7V$v588j}C+sU&1l_1?Ei8OM`-18W3k|)F z_kzX_V8CY{DN0&SY@ElUV`3i1$G?QWvY-G#uYCLWXRYTP_k$;u@GmQq2a00)9fiFZ z@c;>J4D|HV59w$r-$HSXkqf-S!Z&_&3Q$no?&>-u{s2=ZY4DsO%MR^E?IeZbk`fq^ z8zDgi-oE2}1jX6U&v!r?IX5p*N4wEX@351kPZ6|Kb7BoKC3J*(^Mx^WVu zo*+ge!pL^Vm;lzcMl~K zsByrRa@|Gl4(wrM1a!07+V!`2X_x`-S+{S;1@yojp*ALES71st+{mXvN$)t90LeBl zaY9Y`G}6~`z47{)rH9Q8TL_3LQnY2cM|NbSfv0ClaBwgl$EvCg>^V~x7gY2*?)202 zMDN;hg>s3)=lpzQ$4))}+18|ob$>ZiG$xa3ZvHxRM#k^`#diDo1@5erl$yc8BMJ(S zm3T3yZ@8(6Hh9|8iUQ2ZR$}C3G@Y{E@*Z(<7Y7H9ojb1qA!D=iv~Gjg3$TT8>(-)I zuTXa)0@M_o&w4Dw2LO(bALoDn2K*)k1mobGhwBgf0&K#%2-EB8az{oo%mG{rJ8nQz zwJnf{9bG#dohL$HsbH<z@yLL_2vLEA;IYvVWw0ZKl7{Mgjq{saSB z5)uZIcRw#C<9>i;%&>K9YkNB&3O>cb0ABoPFs)OcGCE*=K|yUQ+sz(uV~6OmF*`jc z*VyX?`hs&m#L6>h-&G}@N%h~Y?^(b9_0>;BiMn_K% z!vhX8ahPP;*d~7d9BM0#JEf^f5LaGB$>m*S*y0~lr!lGq>7F1i@!8)+v+vrz{L$uQ2-MSMTjIwO6gv19p^;A-y zrKfX2p$Rw)jEhPDFa{s?`!92(E7x568?)!#<83CsLB{Sn8>i>He^$Q;OtOtiZ=S9j zN=#Rt{JZ2NN>!@0bKjcXDpkK!hsbTGLK&}b;kudjy`HKj|1+EbMkS;UqA5z!KzgW& zA(erWK*l@^KG7Q{mb2&m)2VBJY%>Wa_c(xifvs9y|552n)5(t?K0sN}hDs1wG_0fay@_!jW#*8n;JG>?kDLPytsBR4D6}23;oIGeMS$ zz{k{uS>}OEU;kJMsOu?qE{o(b$|~LWwR~AL%S6t^QZ(qxdqOW~+<3z@SL}#=#%SG` zu<57$hwSb;o9%K~o=3Y1WHyl4^wbmyI+JSA<6XjViV*}VLR2M04YPkZZOUYKY<~Pm z+TOktnnOYXVd4I5PN_%47+z~r8K1Aw*tL@P&$Q9;+n9{NAncbn!2@n5>%WKQysIk@J3DE9GR{D-Kz#!P zQakUpSL}_@OvFF{+;$`i!qtmZ3t50TP?ms|K<v`U)0@d73_QD$$*NYPk8#Q*tw?3!}YMExxpI>MyK46}6oyvBxrMX3BknT|P zDcj1!1}j(Rf`iBRh4q_K^73ncU$=YUCdQK^YPFL8O7+?!dub6qF&ENLng$u`OJYqq!@wie&l#f^CkA9{Nah=`y# zKg&Mo0=$h6iYf`f9H%f67RR>z{pkdj_Uh%!*MTF@Jk!){M3Iii5@#$7JB26}VG@SC z9IOp?i|(EtPvs*_zf1EwRzpG>&xZQZ&?Jw0UbiUn@H4yG?f&9ElZwK_V*`RGWdci7 zV~2G}KVqrhXvTH{y_4T(;o7^~hl~QzXz|{mIfPz)v z7=5x8rC|@TW7osR8aJ+MY<(*7Pw74^5(!&aifX)LjfcF_8TGg+HPi5a8&9p6?%X&g z@t!uMg-D~>+4n_eAcX-ZJOfb!ummq^`G_Zw97O2AVrcQ|ARho}68IZ>MPV8NS|?7R zI7>R_r-*_d$2Y1)l$+ngM8~J5UgYH+04#&@e`;zCjClJ_FB)a%+os8yhK{_w(Thh@ z-(|ZIu4<-sYNpGR@@u2U_($8wXqkm9B=vQ1>b0?5_1VvSR=!{)4h;DILg= zfX|wg!vP!!erFMN1RjJ>(W=nhAP5I9kIR?Q*7sR!!Rc~6`zQqwqBe>{15qRUPQ^)J zZPA8WmQONwaaj+yQ+qrCy34VA^SEUkoCf!iYkK)R6yZ~T3eXz!H>! z&oC{#c;e(q4AyhU8n|`q7HUHa9{#QtP=4<*Oa_Qhgc$MLTu(+rr{`FGKIBvTAkVrkuiI;KlfF6ZRuWbICu01a z^^>dg7ISNpL%r8e`#T)%S^&!TaDMWjNnbi zBi!g&`4Tm-MV4PYZKVq1!Tb07Fa{AWX_%i8wE%QK+qM7AOMY>2(ik=L^$?Bo@yBm? zN-3xrFeCSjj!rJh0dRHT7!dsmgax8(1UU)dIN*>tn7rjIqKS-+?>KMO3%*_V>_!T# z7q}9X%9!_U?>w~7BRIt7x@mQ)+&*Pkx>!^47LDkeTj^eoEiZNF^WJ~p*m-E>ftgo@ z#`4RXTF-kj?y1Ab5U>Yh2{)t7$`Lv=WgT+6?6 zN8^l3s5l>=BtAVB9a0oww-K*!-J3;kD3j-S$h&6wlGi8izMbH;b*}>TJ32a2DwFnL zhi>7VI(v3J!vrz2;J@DG#l(1^*t2om7 z5#|jT8iV{7@FS2F!aW9)j5BI)8_BZi+xAcX({r8xNkv8=C<`P6j4)86ii^*J=_9dV zc;G-_&e?zubawt(yA9b4o(^otPW{X(VoXU*4g5dQ*}3Dv`_e)cj0Ew$ZMm8jY0=sc zHm2^b7PwAtcrW3kP=an82es+AcajNCa2|5xFK``{^x{*gr`qaW@!fXEZT$9gj_oh2G`kddTD3wbNiX2 zU7GUeFqkYz3lWO~6pk^&`E;tt#(`4@+tBVqPJq}TQJZp13jr9=RmrEzT7tF%r6^uy zs~tXM%`Giak&%l3A|E|M!eAm;u}6=(eyJQh_zIr{nS9yMcl<+kmR7pK<{>I58E^%# zQV1m&iBK|P#@6`PuLo_LACgK4m6g)-Z| zwZc?s#|vx;?Nr3W(A;>#uQcSclP zWd>4O&>jh66T=KSKeXvK{s@F@W!qnUMrZ2y4uc(hn*U;aS&~KAdfCtPJtt`VBi^QL z`FKr!IJtRm%R9la?duzV#y(Z%_CIbPEC`X*5jfs|a@UAb)4RyXlL7_Pw%3DC>tqsy zo}gB>_JQ9bGdEu!$V-f9|6W+ls54{dV3KfFq}u2S?@1v)6HE6r&3?nxj*4Fa_N&Ph z^b(WIJ3X8&{DehC6Q%4J?rV@JH8jJj#Tjx8@rWQL%GSmPju^P`@X)YMhw#!k^h3&w zlr>1J|4g*)ZuMH$V5$czT~lL?asz7?a%m_YW^qA*B42IGNy6C+BnhSS_dBQ}gK&TX zoZtn7gjXfCyF!vq1{UA1ms-%up(TqOK}yOnGy9KN^6s)V$kc76U8?%hqM|ccgt(Lo zzkcbRaXsgledin3_{lo0u)kp@WuldGPjXJimuzp>6_z@9 zuvUSOCcq4v=fuc}+piI21Ob6zn*CLU+)7fdnV&C#+8AF#&4E=n2WO*Md3P8kEjhMW zNIFm~$qHjQ74r9xNQkvedUbRh5EcCkEh#`aUdRo>YkKbO_!W}S2xmT2C)mA<7lb)? z?Q`BQ=Dh#Dq~1k_Lg5nOVG*l-)m=o*%kfb)v$zK;)ynK#`#w(~m#Ield`6yTp5I2c zpy$B^Y|rb(F&51e#+Q{vdhXWHj_G-_|6J<5W!=6{0)`+?oNbq08M3Iu2Dgs;555wc z^zTYBk!x0buvgXI{7T--MWOtr2gf|BOjter&}^BNdW*K?J0QlZ9v=T=vIcp-jEx2l z*6hP*PhlW;ysDgY(U6E;MRfrYFvopk0|S+H0kj2h+ITNZ=Dv8bfGC-|z)OLF52a72 z+4B9H=&;bGHK26a#(vyI{3kQF&{-}k?M^=0<;haLW8{Qca>G2H2ca?%nJHr`;@i(A zW-bw7yM4zW3i7_y{2WTH%uRx@m8D z7@~}fSpBD^R3DBYmWedGjHnqJIw~v6KJ7N8pVb8%3tg@)`UBD&&iJ$-qH(CAMbJ7j ze+?O|D1)Kh#^w*P-3*3Pg5m{h0-GKNzS0Z_>LTI;KTb;ODjt!SmHOcqbkyCqBs5T+ zt$8_Nd?br0;rCUB;QAw>cfZrQ$gtZ!Egp7Q{4@FQdRcynM0cRs{OhzS=kDa=w2^_2 zpRpN}XPkO^S~2h22YDfS+hM;ES-Cte9})Z4ztc8`l#>F5b_W9sP=iWQC@Ii6MZ zkKc<(jp%f8?LTmUQT-#jE#P!8V4SEy`voSau=iuQK62JIL=MFa3xN7aZ$kx9vZ(w} zDIWU<8clEO>d+eUnr!FFsaw5|_T{x}*N~g_<_&3bksX|@=s)oA@Q~_#)oBe7f9??a zYXo7Xqo0Vi?>A7WCX?FZ{bGNa$y>3>QGt0D)w?qa{pG5*%Pqcgo7SxPfh zRK4=&iu#874EZ7D7+#kezgf)=V%>9#hNUGpK^lbf<;xe?7j!RPe2z?o*cO;HoF*Ti z7*YnR0pNr_9z+WG9W*gutc*Y9QbbGLy2`7WPa3C)Mj`MI$^%qiVpboDK`&sNoWyqc z;|EDi0(Kl!KA7$F$@@|E30R~u-pr(6DKHHH-F>YBZA`K(?6OXLPgMvH!DxlT`2a}| z!^_j$yp(E9+FAO02!F}@D)Abti*JWVi0(iw8GCQ`0tVA}XVPc0o@Mb5YdyaH`Pac_ zmF?u6XLt7fSgEprgpfr=|I1?yu8`P8G6J5_e`qzrGJ{Sg{%}}c-ecyz#2!&m#K^9~ zq)bNOq(dAeHglK+a7PD!N%xh~D+i>bNr8SO0U$jvGvky~_f=I*t-h`8C&(%6#lR73 z07K|myFpX%dU^~H8GZ~B!UbN&P%cp9rSe~=1@zd=HZl{RebheoV&s})EsVDqeS7Tz zpa|mppyJMiTNm1OCyHqkSe}61FtWsHin6brTAAFgzra@NVk;*0&HZwH%cW^#B!ix| zg2D~VuR{|QmmQfmHvr#yE}x@P{|L?o4JO!g@l!O^)ViFRx5@|%Ngnkvz40!JuK#|4 z$@{lFGXtz!vE^*5cE4z2LlB^R0f(9s?x&-T140A^3G^5j*ldr-!PSF~9DX6UqBqXK zwe`MpeeJ#bjceaLNYM%8Z3KkA!^q={E*m~Ow(|>I`cWu#FPuLQNI*(pbcqy( zfF_%=Ra4$J6uFq`cMoXQwBO}%mAk5kDs6>WU;DnDQwpBDQ$Jop96U)pi4OvP4v$m0 zTz)$*Drz`?tFG2sALgN?O#}Ky$B>(_c61!*DySI>j)>4l-U~*D1_svSiGF`?S0My; zfB$;tj7=G!{ef*w;_LJ|?BzQj`&F1?3_BJk@axUi9fsGAY=9#~yx4sWgSy1L3&@9* zn+dq^UDg817QnR{W-r2fq0jB}t z>4ZfV`t@XJpuUc*yjmD(ji! zfS35&-@Z>=+!v=6Fdlj(v{4Vs8(<;8C3bOgYQ0JKe$gWm{1)~xU}py`8m#xEUh5oK z?=}=CA)^DtiMVwO7Z3oT%wyVkQjOX}9DP{mopD@a@GF^vXLo03IfNIu?x+pzr9Wc> zFgkr2Fn_9NTM(}@*;U2b4?_RR#z(n`9`N-rsbwd;e;gwoo}L;^lqHKEchEa-kC)28 zy8!zFMLVddB(J=Zac_b^vM>_8P0Y-o{=pdQOSoS<#nU4a62g?kVE*q)-gL*-jV+0S zK=B}mBB#aA-I>(GfR`7j7L9sXG3@LnaW1c}thjIbI?%kJz`N3u4OTTWTEbT7H9Ppj zDD2(X{0ge_vt?J%_NN2g_86T=uqAkIaKr@%2D&@IhNfp^w8go36?5tM4)XF^<2c1g zar}0)q;kz`Xm;=dN8+AC-i(@x`E~LL3{!YPk#h_M5IU}iy+L2Gb$B?r$>uy42W4^2 zyxC5@WodwO#2i8LHrw|~6{C%&-8S0MCPbuiemJ=lk~;9Ln;fsT!u5cR3O5I9h&0T5 z%S}c|I_~X~(m>|*(MpX6Q=cz zjC?l!mVuB-PEH2xhPwo6WVv*6-=3}eg@sWKk-A|L5^m7`V5dhTfK84Q0u#JMg@qmP z(a~VM?B>>orv#DF*j~VI66gh|m0MZ-|154rLwQPbr+kz~IX6#ta6g(YYEQzc3)W)UJhP~ibIJrYC z4#c_7#bHXk2$0^&edKl6TyA#uO<+0fC<*cL#nmO)#h`R??aTj)g@lMwczTfhI2X}k z^hXm*0rU$@8N1MT${tNH{Lniv6NE$bbti10s117swi;$uRvGkrp<={22Ivp5i1ZDQ zqqNA@TBMR^x|?Ppb&CRefPfpU_%Z*fP|n%f26bD~V9!ZWy{h;Gzyqb8D1!bVQ26*! zUssnjewXVeeP$GJ)Om=7poPLgkD}*=$R_5YKv{+}@Zi0dZ4lUETL2k~sG8vSV+ifd zc=_@u3NRd^EIRT55)diUOsOM_6q^9)IEKnC=RB-*NYDr4j&!G=eUEW3ZsN+IK*cAV z!^5j+3G1qw*WD6}*|HuRW$?kTDy7ih+&}Nz4rrk!U;-1Y#_Q zcSU!^9VEdeydCHf(8-6*{UO{FI4$f3>8FHjBv+x{0kf!shQc9dGxRkG(tH!bA9wHw z{AJU`fI2=G_5u{t*t5_A1V+c^0?MHQoeD^eUWGGEsapd>y-_hX?m-R)L2RxMZtt3vq;_tOv0K-DYBJ zEIUfx!{5Gsz2)r0rJ|wH(A_NqhbEM$xJ+%$&1-PY;)+*bfG<|S1um{Gk^ej$Fj5zy zPU`G5fEyIM=6NoLI7J@?VgzNcC|ma8Y+7pSWe6{z41pvGlQbY>MFt0iIOwEb%vJFH z`#Lz7n)7lH?U&qUsFPOLma-wiz?;7|j$btC?Ddf%BqSuqqT&zkCW>U-8(YbE+@Md; zTS?m^F6dImhK3Ro=?+aEhG!6M00bX^OUn=6LJ+9)rI0s(Uu-#Pk%9dX&C+r#HFPlG zKSnYvj%-r=!1Eh7f$U+qZXIAMDZFrdgMaT{Y&Xmn7DIScd$p@H^xYwe?4#=Wil^6g zsC)@D5;Zk7UC;tFSFi&js2)qY#97sW#$h|@%><*!-vpY*z|Z!?h=?EJEk+!>ci(QG z`QmJYcLin=p9Y`{cLiYTjG>`997fQ2?M{+Mm^t)rkkLTC@b-b_DePu2I^qeC6-7JI z!PE1Q@g5IuZS6LQVKCwzrzf&*jv)#N5CKPmpr9ZGzkw$il)309u!+%hoN8$xKV%`4CJ7tsp($Y(Ebo;$~t-w+U8~=a@1Z6F*7@fL4`jubeq3i(%_-A3kq2rfg)d_PPt)mKATtu^zV6vlb z%}afA^Xa*{W57|6miKB8T_WkRr8!<2*3~5bSNVtdNT}ba7&tXEK0$Ms{`9F7DgY2c zR8MdEF#A)Iwu|ova#|l=`kit(`{m2x(o$*yN7!)`sMBt~s)A8KjQT8)In?tLSPihy zfo;Uw*MUe1f`%)@JoCv zR)`93#jtiKJj_@$5bYq6Z10Xxh#w?ySQ!{V4)F=t=M@9fj?<9rm4`tKwfXJdNXk<3 zDIp~Y99$LK%;NXFl2ko!bf@c>{N6~1C+?mP$sC4$-%id0qC|v8_%10_D^$}aE0E{! zB1Vw^uu~^eAVL!dtRHwJ+()PlA>f9O9L<#7Nr>z`Xy5s4LMpP_pf~Lfxm$UllyJQK z3Voe$RQhUJ)t-Ho_YUukmLHM2pu@3vRqXRG6-x^4yl|Hsaem= zm6~GO@9EUo{AWJRSGU(#(qnEz^RhnBVSz4$ba*Ir=sfgV*nQWb>p=tH>C?TplrFie zfZoS(fr#=?ua>u^UDoRRP9c{1n(Uc&!8M)R55`tFa`Os4tlxUJdirm5*)j94*L-qz z-9F)w^fE8|Ls{2^ZOd0M?0bDUP9f*tQtK{)$_7g4@cZ`xWZkb^fg^9es}ulIEbCCy)L`m}W-_}{8jubr@SQ;*^qrxp=4GTF455X10r?Uwzm)BfP zOr)VeqKD+Q{YUujQ9Ny`ezT`Sm}xD7Ns_@tb9DOi>c;5?anEh?9#LEQl2$Ef~mn=cEgv*R;4p2hMUx%Fo zj`DHg8)*OmH*egyXlo1f(lI!gsL7Ah1H%K(Vm3I~H&l`?murSGdIx-M35o@YcFY2{fdBU6pW#)xttaC6C{Lp|6aZQ^%zHsgw8S(nYl5kX0y+TYfQj9*HI=KTJ z-NFLqCa4@A5|0hi8cAsFyVfXIE|CzFsdPMNpSf#tp)5gxn7IWbWkN&Lmb4(`4`9c5$@r4$%8|5x0F!Qlp(3q*OwF* z4?QW$Fw)=!@EoW*fPP8XDXYz!6x+&-N;$hdv_9A?5Uyx~kt zgwl_0gV5C-0=ER5Gllk6LqT$Oy+*^Jp{9b6hq9YKT zqZ%{B8^-MH%+?O=)4w*y%##*zpEKWuM>Gp5TD6|KFxn_rAa2^DQId6|{DdJ7?VgTj zeM6}{k3~!VvJG8e>%JB8i>K$4p2ffb%e+za*~dF|UVPQ(<8wOos!ace|MH_-#-HR@ zIUflecy=&&s!_z-;IePVsH$W*9bv*>c46`Xt#RiUuGP2Yi4_%xAvYkplFPbO_$G=c_*w2{6&>uPNqmVN!q{%u=Af5SU8RP)7#;+;>uG=hFqj&TVqu+;^AaB>()T$X~I^y<;^u?El^(FG=9L)ur;8*5mkT z;chVMpFbnO5C$G;RE2_q6QnUg?MYGaF#-0Lm963&+|BS)iOB{`4!{y1699A-P!5bW ztH$E?g8OX+-z_tBamee)-|KRb?ckUIZ=w``B6T<|_IR|RonH3&?7noTsZ$OER}`a} zg$d$OXs7EZ`hRrm+#9Q*=IbwwYxT*F*G3N?$k_79rl*-8n)#aLqB(63*!oilu%FZt zk8aGJjEb30zizSE!Jiiqpt_|q`(eIOPfIDM5&p}_+s2AK%IZt6+cWX`2FqbZ z&UaP*X$x2chn=D(9_1H06`YZ5bcAnOuGEZe)M%C-Rvnk@t|94ti=vwmh$8aC3g@1 z>0MSmEdMsr{rOIY*7VWLYy9_XGfw2+qiBDWk12*3F);?$sOVy}7VN(no;>f~CQ)_F z;xy}Heya79m8)$RtWsoL>cGU1f`8rIFjfi)Yj}}v7>-4FO`oTvNPt{|)CgD;!F)sw z_9eU{fR7&tieKs?!vsX&w_QSoFfgyT! zhVlJNLq-M{4JhTwb#@*<`26&{znq%-e^O3eN#)ns8oYPxS&Wv^`w~x17m>@3Wr+q1 zBg7u{Jx%J7T;|tdM6+SOYoXG1yT40JCB$v|082xt_73wW?SJK6=Jaa$^egKXc5PDl ze0g#@%c{!C{932ZyWsam1r?IVlPg*3S>Y2^v8^(BpOWQC0@cw7oLG0M^Ov78hDQO`oYd!kaICabO z+O225aJ~1r@Jv##1J2uTVI%%YqECeShA*ewz{v_XnJd^sSb)JTfH-WO-b|8ljLFHF z!F(RbeW|x`oc4nGaRUzF!`>HnbUD|0RwZB4_E~+h>co+taQkzKR^L0PwQV@Z&JEOG(9xlr6oW)*qawZ7Tk^xYP*%8ZO-4(rx;dMp zc_3<^g7N8gop4@lrND?H0q;XQM|#}mtJ$=98<`#rM$;H*Z?MpGQW}e&%g<|TpPo>)2e+cUc?zq-F@8(S!( zUa*VJFO)vprR zuuY*g7)MF#u`6z|`)T{LG|@+r_d6#zE= zGnD+(`H}sj=E>;MwNW{JPN`s14zm~e|4Q5Cz}cy1gxMH3(d?&twP5mYGAvze`dH(C z8m1f(d$ilH2!#nTa|zsyZD6*ceN8oX!Hkn8B=q^?PgBjSPd;=Rsj6>f{GByiYo8m) zJXb4hpL|TWfvj@7ZPIpIi^ZkF4`$>lP8(Hy7<_}csmJ;Gzfi@#3LifJAs6&eNG^tk zr-CA*p6+h&_}Mv?5*mhhzB^Y9zgx~VDOl|?IxC^Mb6#)sQ#cLnLokytE{mDuTZ1RX zmAD_CCRC2rA+`z7Z>QegF4tJT;aAJ1qQbAe%6Ma}V#k-{p#Rn6g1Au=;ahlF@U!`8EIi6_(4t4u8~78x(e_vQw;x(|#zZ8Gkb3 zc+YZ&*5eZsJgwAnDcp_KvUlU(JsW;@RalN+Ch~90Z1GY|&AF%d?4n~&ht}mYJd+0% zVpG@;hi5^-%cQMc+H&&pNto3M*$pCpQHz)ae@a*Bu5eXYI>$TH^i%%3dpRUb$+T8n zuC8-%S!=hs^zsoNEOCo6Wv5P9$PY8Gutj<_3Rekx=uS&&w&e@@2D$k=sH8uZ=2q;> zzjB#kez@~JmCz@lZRP@-TTMLoZZdA%S$ZnUvMLjpx%<$D2XjI}*RD&^beF#i|9FaX z@ENcJnn+{>#NGhuevs96gM@|a7`s8)&1LbPuS$Blb=DFS9m&aYagEDD+z)sptYM0n z1@{lWrb6yeW(y>fXH-outvv&5gaH;XB`$*$?91)OuL#A^aL8EE z^O%mTM4~ZMs9MfgKsg7LQ+dxd$W#KpqY!|b584KJpTv@fEc!9cZDj?!Bu00)BFKB% zjW=^28lV_*_h)|IzDSE62KOHTOw0&&CnwF2oKm<%iGEujj%6so~D_m*=5eA_)eewj%PK`XzHN*dKuK0i_bcOOW$#%|%bCtoMLK+3vUo5**21 zA38fZkP+YWs)(H-7^FD+jvWx#g2(oF`n0&XI8JSgvR-he`B;wm&&4k{0=SQXFVP&a zKwt(zax3Rh{MZTBaj3+3H?DHE@6oV#FGOJR16e3{=a%wOji(k54xVAx^`g(Ye($u# zlbe=noxgvl|F}MT{a#TuO;y{;+du+EEk!xnruG-@;~k{{_*(Y1Oox{!vO1b~3Vs)P zOd_5rcmKyZM?u74TqxJc;L9E!T+pC`lHi!7b3>^&GGe{;X+U|Xu1uz_pyMz0hLi(a zTTU6D7rTq0Y0+u4qv8#%P8Vlq&n{>dG+$_U}vazfWhJeX8Tv{ad+>HVTc~8J3`qdfW24MKUgRs-BGh^=sW&;WuW& z2hXIr=#oJMRr=WcM)Bz{`rO3BatR~fZ0{e%2FBuHnJSNpBLuQGV|sg>$v%IJ7fKm^ z7JEGAs<6BbSHTRGX`&DVp|_Z&fp6S*kAh%&MsuFIzFsh-yW4cx%i{Lgo(9oFN!tkC ztVo+2Tg}J(Rd$jFyRZSr+n?gxb-YKQ<@2xpk16_X0W>5{i8Uz#o3Ms6-GsWc7MW%c z9GM#$79!RR1P4r7SX-$=H^!S;b5w8Ve>1PQ6wx->9~3LVTR>@3EbUt?U0WK`cKAh1 zJIK7ClY|_e3s>32s6V0Ctr-;$zm4x zMGzxTIxKl_9e#d~U8UwdNA8(Db_eGOVqWH~m2Ik`O5?aedv#fkIfojdq(qyMc>7Ba z?j;%f;cpp?HBh$wziLNWP z@jKd&mF28I_Ks1W=>D{4Q_dpPyjB3BNEJLmgLE!KhVe3zmDG>ySw5?qNg2%_Eu2w~ zUr@HS9C*@Ykd-JVZ+(;?8V@(e-ic;gWq)>Mr|O?CW+U@ zXLa^dyZJv%d-#p7X~L2DEZ9k2k_u(>>^Tt#R>Z^*h;jvbk~p>QEQCa2q`;FWmL?{B zpjEB~VVoiy0UvvM8V((z5pib_KJeRIHunfw>eVSjw@Iq|(#Z~G8A2+2as=@~U`#x_ zdtX>C-7($5hxI(7EB)tLPB)jO>O334xQql&-;2VI$$SKH`l|At-L_Aa?uR%KaXxnUX3$)WwdI!+RWYTEn>r7_(S%4v6W+gx|0hlyW8p$^fngE z$L(V;y~_v-dKC05_GHu&<+apL7k~ds%$spB?H`iNdHNmM&o_fTXpYvRlkg%tyVQB$ z&ih`!Z;PGliaw)a$ zJT?07#AdhaVI3APRn_lfN}Ue-|CaD9wRuz)rS3P57Zv8dw!HmrC!e4_{g>9ki4%{t zI$b}u%*I^)&dHEu=vkUG;~D>Llwq1R{r}eO$%o6ceq?px*zHD!{dxXb+Sh!Q0RGgk zaVk3U$89o09u_c_#+c84EB*22e!FOx&;|B_?Rx@wPu7HXXK0X@Ggukka5|h!N=AZZIDE_2V8 zMIu-3`v+7tFwI~l#qAT;OeIdXv=h5d>2t=Z*OC!@&1H64a%Z}NR-QPC-LE!oFcrPE zZx3%)%(QvEeD>STQ{40?{)#DhrBa3mfY$i%RNZM4Go$?%1rqI963NMasH$adEB73y zX&exqHPPRAxle#o!jdIJUFB}y_cz0fUB{@&={VDb^_z^czMZH2xnmol#Gzy`n65d~ zVpns1;Qt5QTFOHjxtR4Ff4wv@EVdrg{R*b1Cj0&e+`dlU-Q3dO!ivpQY7lB@+Z;>3E6XhEB?Pv(lMJKZwR*j#2ntC$2-C7{1bZzV}h1TC= zB4YgWY>`X*WM6m|?3|4-_Nj<{DY_}OODHqp5#RUwen)pUtF-V$M@dtzQ&YYNTBu=S zWp#etPZhj4peX=pIdr)0*Y&uZ>O;1pw~pG>5p55sCLk26^)h;d&Jm>7xr~ef3T*#V zjs7ZpZGN)!l&)2bB^FoKgpxAZX0q}_-d6<$4pR?HOG|V3yUFwkf?kl63y@=7%_Ad? z*Z|}(As#0KPGAN1OK$^d?@tKUy^G(YoUs^vaD{&Fy^YO3vqwv^*6&iRlrdUWpMT$8 zcY^bC<+W#IYu1nZUXW?aHa>r;O6wpZXke_zs+^G8>e?nUIjW zuc$VT8$`xyA}s{!07&hSwc}d-?+>WigO!*xHLkZv_ACy?*Y>B0kc+!l96xqbT;0gn z_{u9ZvSHv{fDjtp#0P~Y3jO6Cjs<>##KVREd=uzDsBe$+6RC?l2m21Q6NKiroda7mPoU%gX>*9fa6JuBNaCE{ zVl9h6O97^Xd$Jjx!4nTDSV18{#HrxN5Jb)L%JS;J^}kK+}xJWr^6ylZlT&@TNg_RaK?fZ(8$Osg0T-qTa%)9hpR~q)=CEhQB~` z@;JZ)N%;Q`e#}VvhJUFO6!^EMAL;uAPhZtu4B3rgoy6o%awV;W>N&;@+3&et%(6Sr zcg>#)UKK378Xpy+@^a5G_i#-US!dIguy@{3{Z;ikAGN~vsy^9MD&FfIb=`^CG1p%F zupXP;t(6Dl#69NUs|2rryklZz5Tp0b5Djv_ZNGL^7B-wvOPy2U>ToxiG|d~n?DH{~ zZ{WQ7X6VNENL%V=t=(FM4u#?~l@9~E$cRWXA4Nh_o3o7Dt@zkSm#U9WqEdH0tNeSv zKI9it^zZNb-*_*Z&weh92OP`#&zJQtyeZz&uFx^%)3p1(M6^fsz>rtvgAgZrXH$^^ zxri|4*o5(1*$Ndq)ykmK*1~VOZRxdw=U3J<|5eH%)Z?BA=^Y!*UsUw-*$C+13rZlA}2-(mgkedP^GF?o)nYq={Ggn60j!$2NaR(T$hs*lhpQ zWl;~7H~Qav+r=i-zZ?q=D=VO|ef2`&^iATSD6The&@Q@dJv=6dt-edQN$#TC`tkLb zyfD*)?+-q9eBANnddc*X_^9Uer?<6Ly6aBI3NLGfOI`W%?oZVPpZT#h|J+rX&PF|J znhst{BCZvW%c-JK!Rz(+r61hDNwt)8P0$E4J??uZeHz$hfgh0E-y@ex-G%i&{dX1)WRfAj{@H_vi(Jf$RdQsB+* zI4o0R?3dkoRN#!aSe~=tmqo`51^qGo4cV4nYW7Bh{Oqz#>DmivQmKA_rh6030){MR z&BDiD{7@b$Y>T>T#o#)6wfEbNlIy=uMqH5XIJE4?A#cC8M^nUg!nt(d+eBk(Xn$G{ z&uY^YyT?KEdpxoo2iaYOMSU~+=0$?N)<0A#jfV8~aSIVOJMm;ctdW&m*A6p?uk}+< zU2}SVxy(?mOX`n$cd=u<^AU@4-u$}Wqm_?m&I(fQo%785c__g>Nlncl!e%|IZ9P)< ztgHOo6)HtKN`@`c)eS6c4zw0kWN)nSY)TAS(+wAk6)gD6S*hP?UwM7> zIrE#I^q)(PWcS5dM>b8Y^XXLw{L)Vz-B%bd=u$YKcW(4j{xsW$Vr8x1mDBOHd5%0& zlKbx+y4iL6sKR!!wr@O5%-haZuEZ+;Ih(Q8?eD_=T7B2wi={j5yy_VFdsN5X`_<$v zPsdlc-qYapl2cHe`!%!ogN zHSygyqryX5WP3kbUx7p6=zG=SJLEi6MT%`EP%#S~D8D>8)ZVV0RBzZc8#(Vohsl1V zJ!!{Eulk=5k&DMG?UjbD_~iMv?BPn@v;CvLPKEd9I}GWsPVuFARTMi#Y$#gnkW-MU z@%8bv=>YZBll@9HJyNNtDcj}Gi{12mehJCcOFutt4We?%v$~n1DsLjZUBAWYLXjn> zj&0Gg6eByjkBuT0X5o!Ng*WOletz%tO%|yS-JCrIiJT#_+BvUJJn=Q~ovLi!zFOQE z$wOIGkyIJKPfE9?aD{?Q%=d4kj%9S#y}JUl8-pzjTXVM=n#}Ec)6mJ^>O)B=B{TBg zI(2LMv`K`tbf(t9hJzg$Js0!ZHSaU4=U7&%Vt?kA=Rx}f zX5-1eJ`$hm8tLsR$+{V~O|2_S;xJGDlcY%nYT4}~8w=9&*B|`(echuxVqj#4Yw1_# zuW|BT9rA~2p6zHV?uni`OIQo5ke1;C(Lw5?Yq!hDxAhpkYB+eNMA*pF*F1I4aEbbOdW&g(PNb|WHJOn@cmwF2RU;Zb(KV{4=*jq+7 z-Kz>eS~KFQmp3$i;`V>5P*&M**!0sZ@( z;@otG5uz=EQ71;fyKlK0=O`fcc(K@fqWMYF*uS--FSY)QC~|CtMMNHO9iAXTnG4(% z(uR?9Vpb!1)=*iYhS2pCCmw#5D4{2M`Rg-l^V$x5eMd=H8`SlQ6KCHf=r*x?ypXt@ zFm$K>Bn6?g0IcVq9sI+GAhO8o4gx(8KNw|r@Gqq`RTAhR9&Sh^GZKPX;Tea4J@92b zruZ_zMh7(!)s8ABiuC1j<^S{L{}%=!L6nKb+DB~XPY?3%J4_F(ayeM>zdwDQM|xmN V=>R*~3;ZZGmnN2K3NH%SNov?B zTRpUMyk}!XWO2{#@gpm{N2d4BIvCm5np#<&y~=f&>ng`t6Fa-dg52Eyc>|Y~jWKu3 z(ac05qO(MpTQ^jkqGtzP-A=Al?EYNy@$vD;+zNQZB1?3di}Ln+s_H}?s`E+*Z-iXj zCu|U^_0x&Sv)1OP(^)EQb50el5>2xgl-XW+X`%i-CJ7W5_j^UYO46aioP8ypKRnDB z!fBp=nxCnxpnhh`kRfO=Vqw;eW#*MaW!!>W?UfqZeMCg@O+Lhe@Hl@ydq#W|etdb* z$jg3n*>r4te0q7R{R!DYc=1JxILY19u50tE&9NdYKNBfr{LCpQA7`wL+6M2Zmk8Y1 znd=qz{ni=#YVZkJf>hY-#_sSzsubata~)~P_NwtW7c(ME87BWQ3+Z8EX*uWA$tT!C z(UIjiGxCJ2I7PCH;i;2k@zQ7HMNuiWAkeP z$)@coFP=Y78+q;~?FBdYC^0MR&Rkz%>-SHe3;U|%qWBvc8;?3+B?-SAne8nQbbGI{ z@wA!!N`8a_J}e|!QDRHMZgp0Zk^>%lq4cehvTkdBx&ofo*hoZjLn`8uHs6@E}AHBlMT^me#|NeT8g<6hY_VTh_p-HR6_wU~$ zc}=lbu3p`yU?3vOjX26Ip-)qm`&HNH;K75IB06wGY!@$*8r*_w7aC^Xm=7~5(K~Ip zpY|{j5owYE^Sv86-QM^2LRf=65r($ZF z4oh12sP{_X>1$jq_h1>;zrdq>A{Wboo1&tkGOc@cu5`Y1h6$6Np59kv5yfQs;m>_^ zLJnD2EViS>)_BmSk&}-9(HpUk2Z)IH4Wp?FwPruUx=H#|o3_4{RP8c0HZH4(6LBqi z*c>|-^a?g6Mu44J|3pd+zZ`r--0*(Xjd^$uuJPyxZnGp;CZ<`#w9Z!VCn8!M$$Awn z=Qdo+a*+al?BH>MxcPJYjn=Y3uL%POVK>E6+SYvPAJd$JZ}OHTr-s#HG=4g|oX1!v zaiKS1xLp2J_aKW_he47R=2IXk*((=VACWdgdv2CV_f zx?4~;h>lZ>K-K!-amR>r{`@-ya_YnAndk{O{`lta2Q=awug?G1?--T+i5sQHpNKt{2l!V1}NE)+xkqD<%+9XL{z!`h2YY1}OI zPkU~*$DO31NsSe8b+M54`-6yx_aZYhv!$%Fb4mB=NNte1k`k7|>DxYfA%`#K{mHW7 zW=o^>mNTNeTed5ctv4%N*Sdau_Gf;Ll2^z^a95RO*+^j6n~IS!CoSz_cfLXO{vhAp za_0hv*`9=f0UcfYPSwcE_q=M0o<4ormv7{+?VKwe#K6YJretY}8*&;sUa|he-({s! z{St~kynvXT_X7U*#EFEuy4&5Gi;Itg@Ym0fVU#p9-Y~c?D>AdNxI8Lz+pq~dBcPP6 zRcMo4wtn5(+PZWi)^+*2FJqfGlr*|9_;B%OWiM4Nt&9__^6!U?A3kJbW=`%as-ex+ z$a^c}yx5X((`F}gVs^AXSgY7FMoCF&5@iGb12$&;+ARF~?8ghzFHM#wTQjno9h}H+N?Z&oqqc4S;pJ95{5Ma zt$+EQAmqkzftbQKDb>XRS?>7N&E^(yu7p6b zCYq)tM5hCn@vAa=LY>6_yy=%Ks<;nI80y02fwzUUgfbJ^5#vNsQaeL+WequUr1P(*^j^cWMpKNYG2axnRjuVGK|w= zgTPE)lJ(-VpH`8JxFU&Poof#`bu~lyq9S&@IgT4cDCu*3$qHiID&}2TKM5t>GPMsKC>nq<*+tF9Yb5oN;g}l6cSEW0Ci-HUaqEKo9HMy(hEkXd)Jd+-StKzNc zhFz2Bv{CW+z$&loPnqEUuaY0YfO@}}eCp^sGs}shAa|EsLhaoqcJ`7)ABcb1iV}2| zwT`4D7}XFwPvJ!hIy{rc-vi%cuat9+2v@C_2I>C2oZ@+QwcG}4S@yp6x~e~q#tOyl zxqrW5lYZ*p1NW=&a66b#>E%`=5_UN=ujZ9XoTq#RA{1 zAeK!=FPL`Imuz!N!IRAotz}DHwq}8pZkfYM-$TF%S_Kc~LRX@$S*bmInBp)$klxaA zcV%H%|Jsn#1wsc)=v%&h(BKt*x-&C0HWo+CsdXF6A{UwT<;xv(lOH?+^2`cvD`7Bx z{YrXz^D7$vEcsb|{89WCN;Wq6n`;XTUBRk%@5VzUl@75oF{Q=F|A`tHF9!toix)3! zIFC|uEcQ738vojtXXtBVYYU}b-Snf+Vb#6>S{?}!vNJ8KbNwrAvfQSd%Tt@{BSE_T zoHN5B7Bq+Ylu;#HwLQfQX(}NpB~_~C>+9<}@cH}W+gNyMc``AGjfW>sCaNo2yG+k< zen1m|(I&jwQEa6zaE5GbVxp{w@%m$NGGA{(%-yWX7(9RJ%9S>YpCJaJVPVrydvR-k z4P`Kx^tSo9-L2G_mDd-fm#aw_-}eRnH)Qsbu;AC}1#E65N(ZSK86^ksm%KB4twnbT0rsw8f7Ko9TixbuLK6D(*be#6GxMA%}Rp_8hrUUP- z5nkgkOw}ymQiN9LvO1d{#29#2MkiR@S2^3cS>cEsDWA*kj531oBI7wIB*#Ko576(Q z)=d6&2N03sEt5=f-*2kFa^y zfv^4^`7f=tu85M^tQESupC=$i<;*hDxnP6eZRj_iXSr3g-bH?r*O8(UKVJqB8VJFm zqNX-p^fjMjdyLyNz55$v(@2LwtpTUb`@2_-n^fG~$_4GGlOR0TuW~_ei67&s)_Q01 zn!#oAT*czYqxo5ljihS}!#RgfuuRV6RH@n7y;IB5NZ`^feP4KqoxR(Dv9)d;NXIvwAIy%4 z6tvGsxfAggnz?CBz^O=KXPZ!d*)WdQbmi1{wiEI&Ov!fZ*apTrk4H3$^24woO7qZW z%|7pF(BZDR;$a-jFFLFHUNVSe076PmPR3xRf*7))z(ZK8o4UgE7$f8;4|4%5Qpk@V zKe+JguzNe>dAHs`Qu{Kv+%|5CZUqEGMNhArEEmKeVkRsOE#|(VAr+m=&fPGh`tOe+ zU&W#7;2e}w>GiP97<^11wvdhW84JP!&|S5$lro$w?b9h+gmk)ub)9$z2mLy)g@ zcLz^JL2(X4C+1NOD|HGD8h_XcvuIo!cC(E+bC!ig3c_2f&@8mAy*(5*&c}zicqyK# z&}vu@hP>`KJcdk|nXYW*Zk+B66R;0|=$KtK8I<=PFpsWxhpM!>_=(&ewyn^tQ|8*B zUGUC`*v_X^Zw}4;TT@e0kz$^e=Pz6+v>B86M{>gxb%tD0w%cCmZp`PfpK6nYolPIq z+y_zGWxz_u$;sIn<2aPQHteaNkdRP1osqfx_1bV+e!kMpXUAGkTa_n_e~r>_yCLW> zn-(1%%{Eg|P@tQmU1*jJ;HTpUGz_BI&m)t05m$|;gB3(u*>lR@LQYJ=b_&{#2k1GE zQMoVGvjWBs6B~gl8TgzhER$viOLIDuRD?xCQ&Y2{uCC?1^$6Ncj8ZU*oZZ@>qxsI( zhRa-G_ZBr15s}G*JGbYkqZVoF{Z6oUKsPrlwx4Xd!73k}RvZu4_ll?`&V&cWRZkAX z0yMBSQ^JMP*lR8g%lcGbChvlwpU-941j5n$%NriE_LRVvX9V6NbmWQe9}Wo(9c#W0 zh{sULzl_xTKFgZW{QH|$C4li{EZGdjQ3kdD<2z$C<1Ax~kR zF9gEItaxs)O#J`}Sr%G3@ezmbT!*ZEO0ptQ~%i>Ss%;8UGVr> zRXZy=xV4<}r58nydVdt%)YOa^a{DD`KR?hbhNc|iQ^myQ+nXye+QlaQay+~lf-}|Ek8F1*pAmt*)_obX%nNP&vfIDqH(RrY8dCSy?K!B zA-wg&sgW7)Sa*fG>EK!(4f)hS1`Mp}AGd_biuEg83aPntH0Fwo+xRii^0GFdc?eSWQhLf0Eci(eg(l|d*iN}~vg@xQ zHF5?l#yapfs+r1|NSq zKHZYw4WtU!=bD=7Z`IzYHK93KZ0BbXAtd9Qn3*XT2Cc`eDOvy^dm9w4 z#7tbN3?R79XaQThLHq8`T4&hrx~X@g%hoNWIdj|^a1*uG6Ie8aBf83>@13HxbsjID#L+|J)bF}JCB(7LBaR2$4Ml0|BQ-E!$`T6;5{lhLZoH;k(J3DH|1|(@285fE? z;9(<$94!m+hR39o>We}`Wqn1Gl9HR>$vx9(^xYKm^761r=)uy`ZYwG( zqL~PQ6b#&R0@Zz`_L-KI8xIRD`qg0mQ!cg~RMFANf;sN<=g&Nc0BUwb_1!t+lHd-l z4NA4{jEGz`7X~hgJxNWSBF*UDVUfG}nT$a=?cB|0ll81|9nceWX1GP=b#$WVOC}iH z*T4J5%KBV=_@xDY^Ul1R>y+U;yD24ipc@bu0ga5;cKnO1ur55}u3tcaowhKu21J&i z^^f7;FAw)f1DP_|P*WJyBL!?zSnj;JYt;XA;#!=rvl8k)Ftbe%o5N^Zx;ErWMQKx6 z|CI6`jD@_$O=rGF@-^1BL$bZ+-FZVYQ}_A;-Sx-urS>x^Nl8qw^BC@`t$e0Se0-hu z#^0dkBDg&f=cxx2kPM7rOJj}97^wGXc7|EH6QJ*LdO;N+G?sPFzvGYvsy5U}^M#*Z z>yrZ{9Xa}9n2E9vvx{j^5L?7MAO5(@*k26IGR|Yuw6LFx!FQ!>cw4>-Ke&)#am}KS z7e1cd;1VRFp<8O#8m{kg2~+z0+3Dq9al1M|!nVPU-~s}ULF1@S4TkkG;SgRmK(qSB z#s;>jdD;V}>olm4SOnut;88se!+^&tTI17kpMeAaMHDby=|Ux5Cr1d1nuzP_MOId6 zz%7bUL8wlg7zrnW4l***95FC+voKu`YNXW5GwEu<;*qT@dlCftSx-T0g-NWSGkWjgExT?Jkt|Hedtb zY8M)9$jgCuT46W~RXq3S8-qX?QLxK1`ny`FS`T8`4FfnDm5hvxwxyw%)6pkt3H#r@ z(c)cr@6#dA8c4oxHUi|XMF*gAccdwz(u4AQ3fe^JTv5Lrj6Mh10P6i2KgwUX4gCjI zg9}qzv{^^5mH+{}uIx=Wx>K>>u3+Ac>*VX!xnLMT#weQQ<+VRZNa)N<7jnYl$s5*B z8Gal!$8U~?8Hm8ZvzfEI$<3*eD;{Pv)g-i#<-mFfeVx;Im;o9*5M5kg93&M;m%Q<7 zVg^6F>k)q+ejlg;m*c_!i>HEEmwO+H2i?~O&AM@e%e)Fquuou9gP$p{2!@D_53l#dca(b~1A++YXQm$LnwXps9?7PEKl59c$nW7;9r` zEIe8VgoO+kP=T;9F6>9m^pbK4>JpJWhR;|OVs(${uK{Wu2G*o*vGL{6(d-%j0kN!StfGovpc%-l^Z zK!cf=W6e%?zR|{5+>%y7qz48MZ;Dr{r-%Vvf-3Ue-H2)|nCctcd%1}i^ zW73)|d#o`WVIQ;33{`huz^acjs>y6}*m-z(U_NclP|ai_B>L^!ccCVQ?y;0!OAxDp zXAE<>sll3 zp~OP-Zp43@nws_j4LtvTglrqS0RpUBr>?Fa ztPN!%ZrUmY%EgVp;N}2c3yU_5Lw>Tcvj;Pwl7b3e+$7|G;)4hEb+rr}j37<{)PpjE zcuDk@;$e?6-1{F#VxYQPRc@p_1cDubTP&LEP&4*83N4n`?YP4+#DgP_1vp3;m4FLC zGg}Sqpz}n`=1PtyCkBF2E7#!3@P63uT%n-DY7ZVH>3MG3&-E5aqdc%%U%aR5I#)P7 zSXK_pSqAW^7HNkONULc{_~6c@v{)OJx=sS%8+EAm_t@Z; zzFm{=e0z)bSUa!VYOgV3;Q4^Z_N;*f`(qiB($CLN(0M6I%n-Kw)yb_Bfi7+;O=y1( z9;IHSg&qTVZMGU%>qu0`aJpq|{QPRIt*w^*oQ-WS>4m06#GwzZ^Y)n10lK-=n6D5I zn@Euk?gbGLhjA1XCW;xL`(!?&Tx zXO?18YQC7YaDG~9xOxBTG% z`D9O?p=uG$$&*OB#I5a#i;JaSN6xN&jk@M~j0qADAQB>Sot)lx$HTI3T@n=?n%f2} z$qN*iS-<6fMO-pv{;9YtH68E28hY^o9A*=8bJWX%$TbDVQS1HV1PO`BV`V*^U*&-p zr91T$fYfo>7z<~{Kx=iGPEYN~ROkM0%pQ~Tz{7|IYRcWQ z7woteH1-1sRq0&?Fk%V-0j186=dSypL%#(d+1X6ll9H0?-hx!ojqID3{3twxaNI-A zS?Xx@-!>aV0iFDL~#^vU8&-11r*)*+9g%|`K zuY_Ap<;(4Y8_`%SC4g>Iqi$=np(GIEV{yWnhs$=-e)E7%9278zDxvD`(EsfC>5hCO zSxj4Rf$;$kN=U$a){%Xj4&U=`ttT>W--eWE&oivGgN>V+&dlk8|6U%CvND5ak854` z+@8_ztZ;So{0#8iz!jJy_!@RWK`lfZEd$kCpAY&@M~d9(?WORljv&u1LyQu1bfC+A z6#of;?^AbttmL|GknqxTKyxW?-=3!zw7cD{=b8;X*FegH$&Pg83K9_5>1Zya!(11*m%N7S0qq4^G?l2gA@ zr#6UD9Y*YzjAA&DK-svsean$oR7AEWr zSb|MI&|P?Cnmjox=M8BXWPQ-vWVtI;=8A@QR*P|?Gr&A6&UCHDePo1%&<9M8&>`ZT z5zxE;7issMd6-Mufvib}WtGD&hgWS$A~Zl~M91}^9}J-$)jzzG0?9EMcx~vh9B&^C zf!R>rVD!KyK|XG9Ld$Kr-Z9?mb>k<81O~wrK!Vdb@G=pd51p|MXh@62hqlwAxhvNt z*wwOHfM){QSIr2I$V96rkXkkDJ;pX`PBhx3iKm;H?8joG-wD39;Q0 z2M|n~#ddX&xCq>b0!co{T2|Ye40>lYqyv~*=`+tU1=I);-31WKT@Zqe)T3JO{4#73 zER{}$+OLV`ZqDL?xATK#jkJkjoZ4nE+J(El0?`v-M>-%oxJ#&$xw&`Ad_QID!ncKD zv2TIJRKoIF4qO-c_2p`z05^b$zr41&Bp^{?@R)EJgz*3Y9{~XY#By$~&Sk!N^F~(m zHlRk(-rt#ZvL&acvO{!0W0bjl`?0y>LdBZd_?HMaZfn~Qf+KQ{ zA(??f-o@>?A?psbAWnL0d9Gjm+Q4J)W)iLZ2Yx^rBOVCah}l>}C^iY088sxF+E&Wg~)pS z?P+zF?@wu#0dq`~h^<#MGp0e}bvBuX64ZR81;Eflo{Cb0Vc6J4{qmR59blnnV7lst zD`q2$#3oeb!S!SqFXF~g1p$3lHWWa0tUNgf) zH3AT@3yEkj&mIOp%6L;uCNPObLkNt&G%lb#^Mu0FZzC!al$`*NtH6t#9sLU8(1+1_Da*&yTQ# zCfCd5c>xXoL0S{751KF6o{fhXPs73#RwGwR^u9 z30Y{SKK8$vAzs}?=>z3yEFJHfQ z0vpWX`$=(b6P?2U-l4szT8SU*7ob(t!b4cgAITG65PiK#O#s z?*I>(4VLmDnb|}GlpLy4r>0yO85r~|${;pO;YzrnOnCdC%S^V*RB|}4+xjBgTsXHr z4rEuDaMf`3G>oiJ^1%NPB)YYDb8n%zQg6}1Hx)U_Ul1WVt{v*2RulyIC%u5e^ohd@3;j?UMzG#>_4s?hyO~S zC7~cWi-C*ax&aGV_8%f=$SN-{XT#r#;+KQO%*>A^c(vqn#n^la|JC%_FvG34mWvi? zs&{L>N=ew0vQ*BcT7=;Am@=jaB*AkQ3WAVmOi&dSjgV~8v=`+XU`G5$ynDI768gK{Z3G6T|J{9MqJRs!2*~a?83}{*HWji- zQYbfm=Gsy_AN^Ye1ve$~C_r~r1Mf(v15&|2OW2}FqYNmKTEBv-uEw_jU@NcT1 zghRM8VUWVErx&~Q>({om6C`^Tn1NI^G}6Gt_Viio!sUX08FN(oz#N5vn}8}TCZY-8 zNGXJr`?Zm#|_^qHVMnS-~sbSz98QR{$3Hrek0L_JWmuD6uQQbR|CEJUwt{- zz?0flh}w)kM<%f3q@?u=x8QmgP&4i4J&xpHU~P~U3P@B=-BJ~7NMvLNpg9Z{_|hck zU5MtT6SPZ9QHaCc@IJJ`jy`{4f|ZU>lb^ntbfm(1U!^~JmMA_?LPb=Xn^qD83>yIbhT zc?ni$+`iLUGP-b3Fu=^-q)-6$hw0l7AYR$F9_=+0mTc5ejHp4YH(H#9d{#rk zfTVzTRP8O=^U<*HIVlmt-oKg}=b;6I#a-|aFGCSkp(GU8*Ny)!$=~z7zB+gF9h4ll znQK-wh+`;K}Mk@W!677I%)z~-eoeux7=gsImHFLZy)v{8kM_Z8H>UO zsFI$-?H{q+zjCY%0`~fqfFlJpz)7&e6p9K9m-nrr(PgN@RkuPNDtYNjmo~wYv%dHp z=Kw|2p<;fZq(Dhod8JjFG2Lpo5^|!Ux!KV|?D4lh&ybPq`DFipRT}tn%qBq4DHnH` zrrm*;gMr~S(|`Bbpo03X3@!=E4?m{ut)xFv+tL0Z(jPD`pGnJEOxvIP4)8k6YJfd8 z9kgnc*$9*Eb&dT$IX?Q!xl3E$H{kTdJEUbwbYE%!z*n znkV(Sz26(UF?t(ujcEGV2mZtM(r(j`?{>Oq{$1q!ud(Hj9))~4QsDgLeT3;=*&l5X2kd6lzD}T) zo8XW@b!PCG{vn`e_zwF46Rj5`s(li5h*D=8)J3xLueY50Ablgo07i;u^`@sZ!#v|8 zHNy805y6*DG(fQXJYwx}-SgbdA@%qlh2M$#9K;y?Rh!M|%LEUP$%j3hWLvq$<|OOm zF;m?KS}%gffBkg}Sscq87gl>#fw(Fik2JUI)^*%q7e+-+Dirz8tqym=?8ax*a1zEi z1*EcpnO3C89l{lA-E>Y1$@hgqPK)s#KmGMDUbygy z@g96C>g_EjKaNHlSF1an9KXbNe~GPpx!Rm&)EKsN1Y#||X=9A*F?fNip#lHRhr)!0*ME;E;4A@u1UrD0W>I|BGjc|Gn}bB+7e?{x z?8B@q*$N))6kK36fkvQs|NdLhQ;_mVu$myqmjhe{(R9r4Kkb*eB6p}NFuKeLu)#V@ zPmpA5^Ih9&5_PD)GY{jwLv#wYVeXv^90<|!QvY^f?M9k)bi2=b{e~=i?f!#Nl#nA1 ziqHZ2XBNmq{AVK=9Lfddas#Xz-pDZ0;FtG|Tt&7!ip`bs?FMng{s=exDM%e6q5(qP zYo@tD+shwbJFRY{pFq@< zOUu5Fw2!6>=AY-g>kp34cRX8q z2mSIz;L|tinw!<3ZcUFCH0@fC7AMw@(8^{e%oih}*z&D#zc< zJ>E!bxLc_WZRjEh;1}80WL{Im7-`D6^>x3I2@RP8ncX@jl+B#|qAQ`)QqM9={63k& zq;VX4H4#XwKoScSqEF9`+YX+6oCkDbhV!p+P-7i9Y34g_ZQllo21G6$u?6Je?EhZ} zRcu27Yr%9gu(PWGM6QWR8gNMST`$d(Jw(930)y={e>}?F;r8;-aU^tn zf!Wd`T0WJh<_(RWQ}r==!FMuc$4~RW0h|A(P`Ok#z>aao-F4q?WbeEJ3IGp0!xf-o zQo!vz-t4(k5O@C30Jua${5A4*0WcnUynB)n*@nH-Wmr)+ng|X}-TWr*d;TlSulh zY@y4F8E^y0DRhLKyc;yBkcfy=leu(Hk*WsA6P}PIww6q#L;>)dD4PG@GGb;?t&e%i zyy{vE@4I1SjQCe8?-p=Z2jr{Uu5b%x9KRoe%F?dv~Bz?Ypw!r&whtXpP&1$gaXjqNKGAUr@`6}$N;1T59a7-sU< zbBa#O39X+wz?*{;g^hQ4OGK04&{J{ceEgO8S@AHZ(p=7|3l}b=1~qQ@Q6Tkz+N$|> z^gl*BFaQAm)f#`pyFB)SBLzPtYI_hvh`Lbg?SdNwSbGVg9-2IyQBVckzz1hAh;H>= z|L;Z(kPUVtuY5`SK?8fz4(`5%&8HqbxX=&lmN{O<=1-)5Uvn))@}wwLiXQy- zkzi}LoS6VNpu~1U%``4JI1vy8NRlI1(JBDeVq4$I(fyzS7dv5_M3PD21f<3a&>u74 znNtLAIrC9aK!6H(R+%pSZ4~tM?M?gJDUVh&&mlxE9kT(v)9sPQ2$l>uAyWuWW|cVy z9M!J>zm0`VF$$3|jAm9)zkj%unF34s4Hp3`4O~W97+8AV5?Dlpe>!oxLa1)QeHIV7 zWFiif&4T=Ynny%pE*w@nu;#D7B>{-e1vn_6@^met0GZTJ3CFGc&GsSo>5uO0TgIZfue2py8eGyFN~6!S{OL} z;XKgYWX|q@U7dyn>Zv8%so_R%`R)Qz2#dk6#l=OlARf>t>4lx{LhA&8rVbvLt{nXw zAXd}`f@>AgzV#=`X@;g~>hNXP?VrOm*WVY;-m6u?t+j8Syp(O_w3rvs=p zDFD9FpK(9dSkk+H1=a$QYyy1Yc+pGq&z{K1`wQ$2&Ag^AIE033cGl|e^0pc>f^yRW6g zyzqo9RjP(IM#Qyi|!3xKfW*_aDJOa<;nPHcCh8L4!zd()sPt3xkD?n0<8 zsOTbn8fg?xfqZcPK!SmZ0m;6&@+BFhUn5s0s4dp@6I)%{R_W?FdS<=($!`-Ap+j;T z!MQ}xqLYC!?6QbM|C}4(zV5P|hAb;E2u}hd-3jPjw;y=L85e_DSXH=X_<*ttwE!Xg&u%@$+0kh5I$^+JZ4?$Z&v1-*daL-x#=WRnTT}Yw!=$Bl8OikWls1 z*LMcFL76~Alpq&beuV=AB)pgjjn4(x)j`n(P$1jT`A{Hdb*(>BLHTWK2xS-9U3_wA z+=gNG5eaPi$|&$3XfkAQTf71KGn_;aZ3DDx6wpfVIFJCVG=+2^aBOv^E3q8S6UV3d92rd6x;_2B(dt;6OwN^7o)vh`z1}rduU21wt=n#h?SI z(4OCd0W}Rqu!g24HPG_Vz9Rw~c3>Pu3%py{L#*;TLgh=r1i=d#{C|v$*ux=VHD*G%rWERzF46qXttMX@ZK;~#Vy2?RMEN#E!sFL>NK*vN?X-Oa3( zJFw{2KE(?P##QB%J3NTt2b=@wh8BZME!1Cx_I$cA{|gYHIy3UI!lvLDHv*U5H?8g> z3+)q(j8>3Fx-(#~w_u6@$7OAyCAb2lu)QGs)UKnt#srfKZSe{?p{@b@Seaz&2DLhX zNm)thM_wRP!Q<}|#r8*OxOi!HmF>Py*g!^>6oT>hD&WbK{QNe6ML^@j8^I{19z`y? z{^9l&@TWWWJ3_jpfZ?*UMrF{^*80rwwnrGZz9mxxkUAyVRXdwAgOzch8q#9M!9Qtj zUJM6>PtehsiQ;Fk7O%K(E?aMJaYy%qIUzIflt~@HsX!*fOCNnSzPf2~NYY}Egx->R z56Z>LC;-6D3qitfVYGnbZFlb7ySF)K<%!sQ=fMS_lsi_s^>bkBhYC?sD&1b~Ux9cn zEvx|8gjA_+JWPB~eSDxS+_0Qsf}<_DHE*m+#;A+epf!LkK`{`IQ38i1GMx{)E>vih zIb_2fahr-M%$NU?FJ42|6Y%eZ%A}|SsBC>nT&16kxW>v zOhCM<_wT^=!>NQ&TKHZz3{tGX<+*}HXuy6MU?EXUwcb~aeAP@E;vyIY1$8*}#NfGQ zT>%G8kb4xJY|78Sl7n{#wHjRNa5k!J-P)Om=-__z_X2>!vi0Xk?I2QR%POq;-lYRC zIUZ)G>!jOmE#f|uJj)uRFbcPm3X%-+?UX_;^4W~uhAC60ZD$yKgUWMw=v&SM6G&@< z@6yGzKOKXNQ9(~O+~;vE+?9;x{uVmY`PX|m`>IaqHWsQr4aS3ZbcRV&xC;!?1HYVS^c8Ja&)MuniMV|e|J!=RcA&hu5#0?_{jHsSAfJ4fv7A%Fw?AJ z8-C!**1<5eL1yZTN+lJH}HnxV&6T!gMLPVML<{O!{CHcWI zH01q2tKBTJ77c?@0hCg+78C*gm68VDQxBG`d@3mycxH|;YePjsCy|jhekY@LEw>e zY!-e1rq4mgL*cIyKs`DXo)7BkBpfvC1lL9(gewX|tx~&ml)}KSz=CRUmoIw3;XehN zr-yRjZ;F7d`pUf9x}*zXY>hv_X0TtRfx#qgbz#`*5ZM_MIKf~Eh5joTF1~_I=?-)c z9Sgm!-MPWM7t`Xj&%RUHGf`7Zydq^etKpYi*vF>kvQI1Z4|DdsC9# zBiG5cJ>t%i5?zmx50JWGR!CPvo6*S@T<HPkfm z>sKUv#JaDV^0kVn!YKu%izj1y{GQj%w3CIwm(*Ru6YV>Ojl6ff_!#g^L>Ni=TQ~^= Gum1z_F=96W literal 0 HcmV?d00001 diff --git a/v0.20.3/img/latin_hypercube_tuning_plot.png b/v0.20.3/img/latin_hypercube_tuning_plot.png new file mode 100644 index 0000000000000000000000000000000000000000..006010ebcb61c1dc54fcbebff57f4b43a172b4c7 GIT binary patch literal 87082 zcmcHhbyQVt7d{GOCnBgQD4>9VC?P40f`ou{cek`Omw0)j|ONs7`EDk3GNbc#wy zcgJ^apI?0EjPu7iXN*pY)?Rz9`=0ZvxdIe!N)jKWJV-=DL@X^Ou0%w%i;swC z=hXh4_{mN|d0TwjYa}BnPP9$T=!Qaa}z9j{jMz7bPF& zeKbf0tHfWL^1T1NxNYwv&~#*3kJfMf+qZeaoqmS<@$N(_&!0^bz7x#^H)rg?_cYc8 zE5g6i50z2k+nup%IeYOvzU2Sruer;2*~>GB@7=dg^7?fLy_bfk?uveN*Q`oYIn|bs zWEWUm>@FqcpPkJ*5{|d)musF`+SSm|ke{DVeCW{cQb$eAmnkU>Bc>-lvddn-{?N~F z@4!`me|oY5q;KB55g*Xk)634zZgl@p;5bxwUgm|Hr>B!PIT6v%GL>J;7e~ZtL*nA% zHe7Dqn(F)DdY&vGH8mCQ=v_yC@?=I$_z6G5FGjgfpB^C`llkJsfHcb=DPH@|(K27S z(9+&MT&jMEn0Tn*ED@0?75B_Y+wJN8zIXXS!NE=OSMO^TSe5x~dXyg{T&B0p&ph)z zm#4fhyE|IevvP^tE3Z%;9Od*}nmBZlbtdKz;hJS@EL)LK3r33g`ru=6e7CR}O z!J$m@OpeiNNFr{Ul!8aZ{wL8uFN4O_B?u*N#5#2 z2BpyUCS_%J;yrlb$DQNFdm0-X-QC@bi;KU0{aUyG%Q;EOYB9;D-?Ib493xDWtR^qW z22VBn(*DmwFS>B6t9Q))t`0uUJJptQz{j0S=xm`r+FPIbM$V;@ShKrVj4Zzb^9D` zog1x2|KBeG$Ec~Py(D+x7-`4a%c4q3O0o!FlRb9a*6mLMrJ{>{x65mKC1~wh{cGL{ zFFsG=rsECO!`q}Rlb2V22BUp8x*O~}nZS|LZFmfThf!n7Nbs0H1 z;v+|5d2GAa7RTG#+J^22(}+y<6i$({S)_wN14iHV63yLPQ#n30#a zQ0&nWEiJ9`@^Yt<#)uDgH>2TRn13oSr<8!f$XRN18d@ zQTr%%%~R}}6DLma^Ye3`=H~PE-hBM{U_slHCr=(fe*7mV@bTmR{zIR7dpF)U3DeOE z2z-!x#k0D$=I7^EzRa8Z;)R-;+6&RMM+3xd=*o6gvg0sk$W7n0Phg)moIX#7TzQ4Z`)6vo4!z?T; z#K)hdp`md$z~P(Sm^erletx%tYm8C-?;k`hr~_()KlM7uC1r1 z=jiB&6?mPt<-K_msv*mzOHsUb*-2gpYZ@ACf21_@{5U?AQ&d#s!LYmHlpkqem7CtA z?fLWP>)iAjdU`yEzrECuq97vL^3Kf3pc3+MUYh7yU0ub_!J?_F*-dyi$IKCO4NXl| z6%~U6gpzQ_cgMJ1mg{ss4-b#X_QrdT?}S&0%4JU99UdB59PeObVj5kU{T&+{o05{! zW`}BUm_|7Iha2H#L6qZqEOd0nm43wH$FWCANfqZ=aIOC9SZ9p-GEfe4WB3UTm*}zK zW@$HWpn&U?j+$D^6E4CJ6^`NEi;AuY3+v%-Qdkmx7SDhx6%i4kaS2yeO@h$%h|&UF za?A`3bIlr}3@iO62P#A^T)0r-_xw4TsW{$OnvG_lJ9L49vKpI5Jo!+jEa)R43 z%s?ofL`=;Y`8HjSo4;2UCQPOKmDkWrOm*=fE%wSah&jzw1Yi5sa*v0z-8nz*_NbadG+dD^gJ3f zGqcz$4vFdMH!MF9j(g{WXFTEiN5214H#aIYlQcCob$90@t&LO3NK1Q0Llz>-@LTW< zUM)&jovl8W_|JHYV>kK{N-R^9^vCc8GvDt{*-Wcf{$+H_M?{_C0|0f$x zBZ}L)!14z}vILYCyrERgK8?8dG)NOXk37etBby{zluIVUB=rV zK6+HKeg>8FN2+{>02$#|rHUP%@z^0Z*5ZEtG&po95KCg9%yaqgkJOQ-I2vKEB6JeG zVOZ1&ygEMj&A1DO2=Lu2o)ar*IMy*p84`3n<~1xeH|UkKq`kTk19Gi%;INx z|E;(S=gu`HCMMo^9B8s>7Fu$IG$i|FUPtQ#bB7%LieUl58s+kuSc;^jRaX8nGc%+3 zTEb^a~JyEI&elnjcc0jYpcXF3PvOP4SG%xMW2^ zL6Leh=7yA%y@SJn!-q4|(@SnoMg;~21_T@^Tvhp$ruZ5s+@IMsxV)!*0&jB1H;A^Z zJKz|jL=dH_rY6uIpW~mJj~_oW$VGY0H}H!29o8&$yWDX=^&0wSp-q>Pj!tGqM!hT0 zAd!W*MiT*65+6Q%M$pY3a4^sEM|$-m;?uWhjl{}1Bje&|jvZ50Qu^$QmP2^?vQ-3| zzS$`=9LvMQgS*<+*7nEu??s1Ft~ok6Nj^DaX=2ivZ>_#dtdfG$OhrXyhKf*SDEG84 zi=xBJ$;okZaiw_(9ipXWW@C$xqSuj7ruh9eNW^oHj95piz45zw2ha99O*uK+&9z0Z z5$vive!&1`tBdao*%D#|TpK@qy8Bv%nVyAZa^dfQsgI0B^R=-n{u0@y9zLWcF4|(Y zve;ju$yjq93!{eS=Dg0M&4gMnrKi{LI^cbhnp)Vj?s)d^*=Yb1adDoQEy{O)RrJ)N zVq+WC38=(EP-9@1s)pM(xsMPk=>ZG~d?N)(xo|P5}V{+QimJ zCOtz93RWh{F2olvUZf`AE>XIKnkXtjqTJlv!NI{oy8+e<7jpCR&TyFczj}2VE9Yv4 zTGpD^_vNwNhu_l=zPp{ca{69|HiOTsdnkk4%2NW7Af_4DoBUH7dQmO22oN5q2Rrfg z6WMgLfgDf9o6*h;Ueu%q9(;B%uEy4V@xM3P%~N1fSvK zxINLSm3m!LG7flTJf~u7wwk6ZC949bhfXmje0TRXT3T)w&4`cco+;&CtM5?%wtc7x zOGV{>b*kY%?{VF+m$1f%9swH7&d%mHhDgz)iya{*mXne)P>nW>jM6r^Agit(;h&cH zb+T`c_O9qmVaGNOMO2hrvu34JTVMa9rR9xqYez>@ZS5PE#>XcvdMu6&4G(`!SHH-@ za^m1aFdE)ly=mq(ARAFpy!ArG;%1Yw676B6bR|Wfx*Bxp$u7)zQLCTwyTHOSH2%Z- zO=c#2Vv5|WIjRT#QkTe`+EO~`!*3L`fY~ZYqUPXUcjV~%&i;BR%aB3s?CczU*;t}4 z@YLzkCr_SKry(aNZ|^>QmXB{WLf?naYh?y-R!K<-l!98&?dPvwv}6JOx|%XF8JU@x zQ_}sDAMMun`V6b{Pt%1IB;?UI)s%MScy4LGuQxd_B%}*=_3>l8aLAuY|IZXf$x3CE zq?Xp!PW>5W%aTX1d89!JhMVFltlF4?oJ&guFJ8=;n3(wfT?zY(n3!12MR;xWHGYCq zuD84Y^~)EpjoE5$c_t>Nx9{Ge<&l#P4GoofELzotUBGKP@Rw%`K6@-r#hlu<87TLb zmX-#9xqJ66cE!QgE^N;yIp)J7!$%cISan^eyDjbPnp`vpH&^4-3w+K8*9PZ?>M8s# zb8>!g`0b~{+*j;m{@$#CTEypDK~!Pk@)HWqy88Oc%1WFa3N?jaF(?D-s8tLYoDU;o zYDw$2+Si}@0h{(W~FXCG(PZJUheSFHjdMhh`15evzXliR?f0W0^#}5-X3wy8W6xk=V zzl;L|!mqp9)u}EtDc6>tT-_a#eSzCrCiJs^$R;+@zZy;y{laP{vO`D4=%}i%h`LnZe>*~aH=8N=l-n5C4PV`xyhxg$F&D_fLqANMG%*bsL{x-@ZqM9f}|92S1}BFv`b%Q)DLm@mVDU@BE53EZ~;cl1(o; zABFw{#%HVLawNnH3JO9(LUdFKg^=i1x_YP-y<{+TK5oJG8^Vti%+Gw=m6e$Zz9=gr z)AP>skn1PH&*S^&MgPe?SS@|I#*)}D#fJzDiRetjE`q2+Wct(Q7U9&0?4K^WJ6`h+TaAC!dlOwGO$mmfcV zcy4X1qJzbPwxje?Qc~8*WBrcs6F#Eo>&g=K`r2B}cP6!QzFU7$YiJxC9Vxji6huWu zEi6XSl8bMmjDZ2>#%kLS)tvwo9+SxGHjneZy|v-~RO#QDh8WF@>X&4}D47?EHl>}}JK7u8K>c-wNOZcRe-0e*n=%qAQ>O(dN2nfIhhb9K5X3>_e3NZt9yvn7(?+p+u z^t2-V!}tT21Sz~8+LXT6?4CkJ%2HfS9|7Lc+1D1t#0^*P@69GYVA)^FH;{lAizZ*_ z%%}|y3jYZ@&%M1HDnF%F1o3xA;dz1&Q%3#?wV_^XR8%a&<4u8%_5WXC)+j zawX6@1>H*?0PXrDB*Bm(>PY|y1YE`zHddWh4W*=N8eZ{cbbNlOrIxKzdx|Hbu|j&R zTbzFKC)NA@5y(|wa^^V#$9qUG6GJ+Pf}CO7i)m3ma(RP{``qgY}@(v+~vy- z*l2YJx{eAy@u1HizW@o#|BT0>S7{5j=Ap~N!}G!)e1}yeOo|?eavWRy z-kwALX&}ZB1dBWt(;ctz>}JW#(lbeAZ>!$(@?H8t85tRBI^5PfV;SQv`Mte5R8&+# zLPD#9(B$Uk=DfVUGvr5G-za-{@GtK;eQq|TD8sIjIL$g7{G_$fA8yr;h{tU?gZ87Y5?o~({Kq5gwK~P+$ zbB(K&o|EM^5VmKjx^7MeWHXt<|ktPbG-)@1Ob~OVq8wH$e#hNxp;^LEZ4o2YDw(od(#o_{YP-g24O_ z95{fj1|rmr>(*Upmy?~Xw@E&Hm%zF0WkR?G(4(tqe80K>53vBEgt@uv;O9N2;)MNw zN9(QKzwcv8f2f&ArRksl`S%6+VC*Ui2AJjhbY)f5#l;SNZ1H$(PBiFfUOT<4W7&FT zYi;WKsNdiL;Gmw6u5^?-{zqj_29g+3DzBw--SHM|sfHe~%rKRiFucM?Dm*y&;<;0;rKJo`3MrAlB<_GLB z3}lL;0OefBeJ&|DIQaeh`L98zyVQEn@+P|82R(UWV`Bq~mj|q&#n{>1%`YH;Gkg^l zb%cta>%xVANl6@)$g2nd6{ij4DC9O15BUaMrY*hg3RlG5Gl*7?ClIYK! zGsOLl^O_5Yin=!r9S~an&b!YRtTMHX3_`+V8uV%LrV6ar*J4jxRIAa4d^>=*#?3X# z?QCyXyUS{|w(uAFA1f;>J_zx_gJVBZNl0r=-u?MwvKo^Q2-g@VA|fQz2P}$<@afa1 zgS&6B^WUDRVdwC0i{=lahbBN?wKX`^67pYZy#)JhrSW5KZ*MQud#2y+`M-V{`NXM* z)|!;9dpQcx-gBUlC^1VM4ZU|SqHwG9@6=SIF>|x8Z zaPHH|HPp(7D<3?32(_%~>sK=qO@#7r`_+eSHyn`ag43 zA~&~v##(ESVKEK;Q_q-4fv#CuZ=ry|q2P0!>Sc`cQWEM%H-PqPA4R++p`sENn4t|> z4@kk(tltLx#EX$o(>YBsvud`HyavOFn#68|ufa)A4r!sh2zxz*J+={r6Sc!qY`1V+FajLaL zj-y4k@5CS#J(={48wY!0KH*${4kXnGHsh)VQ3>3xdVB&33Is8T_DYJ1Ez=d-{Rv|G z8t1RdK_0U1>Xyi}J+4cfTyyMRbBJu+=C*9y*Mg@i%({6NA=$CzXto7U8zGa=&&`FA zp2E?`q(es9AjA2t4n;s3KoN$H0A}-{sK~%+-1ly5Z2w|8y*H|QoGcTe6mj`av-T3l?ddH!IHo}3KfMlsoDrolRxP)4ad3?Fec*kh7|)6} z~iBo=>AAEMXP)Z+jt(7#{OI|?}850(^&>~6W5JM2e$C9(l zw6O1NY$iT5wRCh;v#tB?sAN}qqe5_0IQEni*f6_D@Px+(w@Jr+e`LuWuyCFI&o<|B$R5o$WyC7g-I#I)un#R-7&y4Vwr6YdLwkF>j`%%a$asGQ?3?S24g__{t~J!%nqG(An-Rf~O_!i> z<(7@XXu;$YA5OXXfQ#tK0+i^k<*%%` z-4&I}>g_*sXt%6PVbO|PfeY@>()NiqT-r^cA$St5k zS$X*Z&>=8l80SGj4Jdozq(`g(6^JWqm=*_L8T*kY3E5bir)uP@Woe=E2=Mb8y@8@Z z6rNn8RV1DSVHOuQ{G$4iQ|xDjgf;;bdcWSQXoG?_{vu1fM?&$-&BwxWJrbO?uBmb{ zpIuq;w+iIfJ@T=S9zJ+*g`eNU&CSHj>{fT)1$Opl^CmMqaQz14W9@AWZ+ND(q>Co!@2m!WitT-4O8~gfh>-%oDRaISgAN*;H9veb31;5qY z%nWQ=QCs`fG=ap#i^Hu@QdWjcUS%|UXUDFm)IwvElUXS#E3x|&Xm;NH2x!vVdnQPW zg)9JlQJr8BWmU)`i{=q{Q>m4L~>S&bKd`ndm81h z0S8(Sjl0LgcSE=J^%P`a7zUow5#MRR!YveI^P$udE_=bDke>iorG6-BKZ#JT`S{#F zKR93)OapNo9G~#=U_&OBv7E{U3tGPl0;!4~v|%*|hD91fy^To2vhSU#(XY7UOM6@n z#wT9lFOPgci-lh9L^uOL@JV>k*xz+uOlv2C8}!|*mR*R|H;|h+_WwmA055oz2@BLcy|uG zkpBF692`hzJ|?-`kN9)}+@s?8;giE7q#^3T@Vsr?zJLEka2rIub4S~F?D}<7R2Gn4 zz+%)dc1-rrp(g`Nj(qj%;yG%j>#C}4y~R$vw*8mhq|?$?e7Y}IXtUb9y%$w)V|661 z`s-I9HBk0BD%7Amr@<~Eimz^?g27nQSvP$Jc(2`_q{|ZH2mwK z2$0%9%h+|M;{{f22d&e0pzD10xP|(w2c+-M1lI+$nvt;@NVC5FkUEF(@xzri{bVgnxuq@SJ3rkBRAO7^%RSdD7Vv<1% zb1>YIzqDaplX8p{TEk&Nr@NC5X$Euv+6DGiw&wd3Na8@IP`F8miDNj;Z$jt$r@M!J zMDOIZ)fI9WtE})-qWQi(%!6>ZBvO;U^>I!Y1h=Rn`A4sR5ogQDMq8aJ%NffowNn8tXe*L;R^p_f9FNw z_Dn54mob%cF=zAFU-cResM{x3dqG?J3bm!v~|% zD%?U?$pcp#tKNeg$L-iT z$G%@a*TZUJBA3WO0AaPp>A*3Ok&*A-aZqy#dM?R!s)T0pm-3OP*J>3B2L+B?CODtt zdXO?LP)m62`fYIC>tC?ML`Odf3WD@7IW|^e@M$Nk=UWt~PUX2+FqRa*x-lQ%pQL(E zMugWgeQIp1t|086f;&n$Vcy!ZxOhau7v7mwBL#yRS&3{+Vj?79{I|&Fg0io#FE@AN z?R)!@yU9zEUE_okr>T&3o}8B+uvX7Wd)Y&FdEb#Uoy$F2 zhkT8km!k6>?d4X=i5sc9EC zWAlP``})=DeUa@d`ZFMp%B3Lpk^S%h#g!MYUiE-oU=cK41OOQy8*|iC+i|$kzsd-b z$-|3Zs;aVdKP*}5MCXUMjl}u(r^?e5oVX??mMZs3;+U&q#AR`D->KxjP_%7+!cD&n z=h0%JsnxpfRf6uZJmZ@uPLeYf1YDDr4uIVF`80cPnWt;-g8+wId4DNwN!O}5Q+@Bn z6QutOhHK{%VE94hnH#4n*$EqBX+rb%?Sh97uY(gqoG6ytd&AVut^$N`O{ECFr`CMI zbJlI`sf8T1huR5E5k>r}M3L?L90d zEj`F|!ixyVKgX;yS|XcH`7}pFWvv?)5iX7KkjhV{o=FgMxiAeNZfwOK) zrLL}yRVTq59)1GEc-kip!MKZb?Ej<=Sc;Gjy=C43sf6DP+-1 z*Gc2)(E?0OIL3IMwkI`@nT%0NacH7AX`XNgC64JSOse!v3Qi62W}_?(`zvf%o!v)l z*Cm3zPz9UXe-sfd7)NOAFC!w<@prp+?E-Rfoc(p3g@vUcnQ&!}9zCmH!wf08BfFb~ z8HvU{yLW?tj*X7W#_)49Gb6PWfA}Pu-f;@9*Oy<_Ha~xo$z&fWH825BA@eak0|ObU z=7$aL*3ZdFX)~a|(SAVuD+(8i$g}hF7ncWo^_}2Cj6ilmYRFiX@!YwDH5mv(=z3^0 zTb6uVP%>}-{XS$j;Gu14`9^{1i}ST1_rI<3wZ5}>i_u-mg3x_7RVqgfkjLrGcd&T?dq@O}6J4RUIWG{~>3dmQXom{A-lI2( zjrEGxuH50bgl--9byQ>|4E(#3rm?OU2}+QbH~V2_Ev>rgpwHMin?6f|ri{dlEPVUpi@|}0%h1c(r zwSdVXW~r;90%1E9u0X9!Zuc`d(09Zx`Ow{j#}8jV25xzKW)~DGm@@bRx|w)BUA`46CUkPH5{Gf-7waMHKCOS{>I4K;wr+VXMB9_To z7~_`o-h1TlTxLaaCZXY1f$bO)VrXq$KW#gXdl>INe~N<6;=78PW`AWy$HVy%s9o+) zAo&JSDf5yAB>ktPhS8Xrm_RN%rU(Ou>f%Ng;uyPj-3LxHg-ko%Zi`|%Hw*q1$LHi{ z7&$s>f!~5__ixh#h|f($K`2CaXi<7m9`0$FqS_9QmzhOHk%WNczpt5?UqSco?R+Vl z(%B(#{rZ9Hv}0V)eLUoP>MVb;-W5fOzUF5L-0g~Xb2Ahj5(%J@yU2n8PCY%tr{vRF z>h8qG#s-lVpr9>P+1bIN>#boWAVPFZjINFjAsqUf;FIRJgKS3Yo}y-8Nb&p1pqwHLaBrwRh>iE;NkWTAXo~QG2d$=u^QS$RHfZXk5P4RRbSc}% z>>K&*3-8LcxEsxTb}*UMK2P4MeERQl zp+z&v@s&Uuqs}Z5JG~J&{?jZYZaew$PFbqu)C$DWpjNw$CDHZuzqjh;!;ntK0Z-? ziNwhHsB3>wHN&1mY>B@Geg=SfGgff5Zv^zUU4Y~cZIrggpP}SUm*|L_hkxL@t6hh9 zKb?9LD)Sj3pG0=vMqX8HhYP~pzKi%)s39j-JnH_reC2g=>EXU|6VLv6Y8 z>N_DJq`?#j8qm!DsZF>)@FQe8K&qqJVmikYHTT~d{utoVz+#~?H$UIk*SC6e_A7#O zNZ!=AKIVlpQfOGY*M~Fh%^NB~dn8ZyA@4KbyY200^cm=EH3;wjF~HAVSB@bRbRecD z3N?c);KBU6$1nzKo0`-^*b!^lc~|uMXHXTuRRmG(wO_O`q~ar=A!9F{+Ri#m7GMIw z1};nQ3)536X=zgB+PkJ9mLbpnxOW#E97bB7WxsSaC8flKgoK&fiQ7piGx*Pq{*gV1 z?!4KtWQD^K5$}yvCpb+Cx}$A`f2*AI;3j-;d#Fc{2Sr^H{*S*_uhcmdq0RAf0tgx5 z5CkD1g21ZTdH2q=!f^@;cfhqfcZhrn%MjukfM4b1Rfq zWM!YcfA0jHCN`EjI3z{(<*(7vmH!+Zcw$jGr(pY663RMY(7}_GFj5F|)O&MzTHI(S zS6BVF20Q+ltz(Be+WO_i#9o)2e3PCIIYe1qedKZz1e{;Le?vz9V!XE)>iNFCdy!>! z2CYN-LDB(Z>5eN;!+=fW}cVwhmJrXh%{G-R>iQp-0aJd+CvOiAU zb92oFw;!UcorQ%PKpCCnQ|Iaam>hNlLGaP2_?;~ z_;JW8ZD%YyvST3f=)3*kwe@ZF;Kb$%G|KzV*SB}IV2LgH$5HKjPhwBVr zJ1T`&s=^Dj2$<@#F*_-b;FchyG#9foe$e7hnU;^w7K{RKFRv>(I?$Msq>nY$GfU8h zpqhTskCSDfqfJmapWVXzx5p7T9$-MP%mXu0uF%W$Dtx>f1~PAK@+h`9WdKb-0p+HAD)gx#-zv2-Lhc_;e4X@#5d#M3m=ZeY*x=Falm8mkB;}ircDn zAl3;fB0y0f(>;6l5`vG@Sr9j2X+uqMj|1A7`U-ACy2fBycq=d=Th`uk9OYi{W9;8kCH{!0vBZa?*sgyEWDJT zAMc8D`EG3xd^#%r1^vK%+bMXT6UA5dHNR~@%&Tv0@sIEm56?dgTYTFeroR|D*xR#1 zHTpUI<_o99^bD3ZocdaQc$gBBlJl?jg(Vafb=KF*NJ^SRG4V1V)R2|^hV8U^bH_{s z2>9Y@9);=c=0D!8t_+-$M_uCHdMr||Rvspbzo+tRaKW{vu8squb_EXy2Z3SMw?!ag z02iXWr)L|(A7}hPx&Al!hrV&uzX*FC7Xvr=7!etv}QkLcQa!AS&65cTa=Cq*^fKHLEghLpNTj`v=x%RYz6f%IXQW* z1Ho93beF#efM`*uvXP-#eP5E!St!bMoF}q)4+bwF$N*BnJt0gw`ABK*z;dK;I&}PL zYvT1l6ep*sJ(N++x*$+meC zyo2)$40Za93=B|4(^>V;A3unt6n5&+l{TziD4dvsS;mDG4vIP$CFt%5)nU5t!^q&^ zCs%TX>)41BBYhQlnX*ZC41LQzbKx0VS6nfClwG&}Ww_y$>ty?8()vj74tX;XwLW5AvI4KgJD>~Rg-nSpQWt&^dQeJ20x2csn*6xO$48C-4Hf+s z*t3j#q@xS3ZgTAR?|HabZ8{mBo0>?LB~J7C8A=2lxYSW-M83s(`CUrs!dy`K>#w_0 z=+8U)*cPP>EN!lJ{XEyAJ+J}N@cxHyJ3Rh6bu(0FW z+C8wWnDe=J-|_fWtgek@DM+$4G0=uulN4Y<53qxtUaUAR$kTeqE*@DZ!U#a)mcO9k zcd_R`<3sTEH&U#g4P%93%BEcT&SQ^x_Uw{&eKEQGElPC;L?3_|03(>z?-RvusmxN3 z_8^$I^0YbLYjd#!;t;2TUB)?%^ylGUgyc@~J8PKy5j)Y9cW~=KB|H3MWRe|mw#ck> zLFCY4#Eixwg!{Hf_+dt@1Tk|>f4`o(yuO$Bqi?m3T3e-+&OPZ&FgEBcu+@e)a_8Wu z>0t>;=5XpO_M+M*Z+epUkaClb9aWV5_PeuF0|rs~4{lkUAHV044K5uDT1-qI(BAP6 zkBf0RAg5o_D_!Z%x5j@#ip}EQTmnK?hv$dL`-Z}G9?OTmi76>d^}})yOir*^u;n|F znF>B1O0ipAR2RfFFE@9m9t-Fc0*=5u(l>8{TOodh-N?gJ06Py-o1?3%U0#zp5Mo$( zI7Rw){(<-?g+PDYftHL5mDqCk?%${IgK3J!fZYqNl)%fnguOIXRr3ui{g7JraCb+r zLx7JD*0K$fbNM%&FlBM@;4fBTrVd2xfnI0ln?$Os7(18eHpVja9rVeFtSr;E3$?!^ zc?3#Y>P0c$`RSwUoyw0_Cw2KLNB(>vXkzgHySv98(fBa2vJML{l6=ER1#u8_l6xE7 ziRXf}Rs-ciz6q3>B3moQ;JO4dFC%jiiDg#Sn>TJy@;i_64tP0jZ+WXF>m9pv z z2C80w?ieM&^CBX`+S0O(pYiNj(Jdo_%Oe@Ahf;=>JD}Z#bO`d}uysOChTTZEQy+e; zz@%c8gRy}WfZQj)lKxD8d!!xhkM_`#v4csza-wZ4(c3fQ{v)#ow;Uo@7yGG&N&ub< zW^Hn5j~^UCqr&z?x)(02U`?hV8b63oMt(q+#w8&8C1?#A;qUVDF0ryUHZ?`$u*JQ2 zu{0%@*GizNDn={Hl%Awz7@>zxPDnr`L^5f?Vew4i>G^R58m;;YP>DU z?b6L?-qe72CIy$p&x*GxA?c_~8RoCaDJb}mJAukV7&s44KS#lPOUK406OlFWT4b|1 zI5~l=rza<4UcQ7=ZK0M7*4>Y|8I(}67Q_*oVgxWtqd;5N*eL8g`fMW_CswcTU-fHd z#-5j#_vSY2(2w`Odg^9%SC{qB@C#OUj8tO~33FEx0wO9qe79Cn$%OXMoA{%a*F&dz zd(@W&n;WY=R@mDE+$en?GW19=T9}(x7j(l%y5ctT6T%~SAmWc0u{^2>dECGtzM?{e zbaApro#{CAL2*e*_>RU#X@>s)GYdeWM|5W|5oQZo7`+8DR~=e;-!H-bv_qEDt9sP{ zXyM>)t1((@+Vp2BkNo@+)jhQh41kJu;RIhrM_)RAaB^Y-c+SPy`8X3{lKa7f2SA@% z8XBEFJ@xxg#xak_=8GFQ3Cjwsg`7pp3ptMHO=#tJL`K{CSH{MD9+C8E7TJHomcrQa z>eG2x$1EqFiHcsMC3_Z6+Ar>tgQ}vYqJlW@+qE~_9^X4UwpV@!TR*k1u*kFNdbh@% zQJf4RWsP!u-l}c)&QI@#D&s`tagBxq>i1O+3{>1u(qoloi%3$Jt{W;Gj3<}v!`rWk z<}5|%(G-q%=Qs41d9ECp`!ut>jFj8_jPvY7>ukmL<}~E3m77V9AXwS@6?z&PHG~1L z6@@|S(e2;7okA8?$=|MS%zl}hJB1Cna%3m5xPgHI(D&9-)>uF^Skv?!y(}JWZ7|BXd5{Tz5JccE z^wgl~vp7P+XQAfqg#~UX=m49{(7wXMK(5EGq^ZvW@ZeP>}`Z$ zTLisFSWlDClUXV$scmm{M(-x~fDSQ!x|Z z9FRD`NKx~~ihqC+tY&Jyhc0Fb{kW1u*pXAhDlE*N$X1kic>m>)?KV4v~r zStt+?j=fiA41ve-BdD1+wzk$W_UJ=|)ViuFF(p?G;-l>BZ@1#s#9uuBAg|~D~O_Sj|FVo~&{%C^l;Wc7)2l)cOKy#={G?{Vg;K790ev%@%J(Ap5 z5gfd{)5vpUPq|KZtByyb>-l0H5t2TkW6agF5oQs$n>HDThEyekd$$*U_@D>Wtshr? z>-_h7TZWo-?WI>4o|JF8Xby;Qm@Py`6%s}wM6bg#0n~_qZ2N3O&4Lh-$GZvK(jAzaS|gF@;H;uw~NpJBE=#r+vSxzUNVQZNvXt zb~j43Zf($v93vxJdD?Ct&F_*vO!6HC3LHmu{sHS}X%DVQHeU#r-dlfeTMkjL>Xj8!pc)Ya3g zO%G>WUmOQ=pOqiaKzQRVO9uVnN~l82x6fZf&MPggS*I_>LtMs0Rva22(y0fJ1KiSw zcab2k-Z3#t=!ZEu^5Sqb>_j;7!7LFP0t~}Yx!Zr^ci=&ttj`$2 z>m-7^FOjPQ5EqoVZfH;YEbCXYKT%+6#W3+tYdoAJKR@px^P!CGxq8z*;;o44Rc$%{ zUnXOfOuV@Pdo$Dts=Ei2FK+I(j*gu2tX1fITE`CgOUcMsLlno&1;hE#+Uk7!c6Jlv zR%o%4&-(9;*C@&Pb_048i>heM(N`>u5*FNV4ux`9-n^=+l)WH5ax*4Z=sb$yK_hS7 z2CR?I4-Z9q6@WcJi2{Hpq2dpVjC5NVC3Lo9Er2!Tkd9$G!EA($<}Xg`` z8FTb(d3pKnJWjv;2M$~p7eBlIyX(D>=g$%JB%RC+4_AfhRI#^WphLSg_b$ zm@=@cuflhrs>;;h6|Oh|2_GgtA5yz>cQhB<&7+5&|G0Wxdl^j7z39NWZXfUt9Wb`9 zfmq%|p{b&xV)~U`e9?Q4RJBRHS=E|rY5%%3AJGJ&|73{)50;>#t9*Lp9MMMz&ZyKD z!HA9eiyKWV6Cn-oM?vT4?yJpAm9p8lp&!EV1#uV~ABV&V$8j2uK0wX~&r7&*<8i^r^V0XD#;i_foBUJ6XXgFJ^aIX)7+bcmx1VAELewIyq2Y%78X87k>TakLXBUL`5Y=E7 zakh|n_tm3ZM=nQasH&-9a>!|QZU}{bw2yuS5hQpSm|y~bGjw_It)?zrN#WT*>0k26 z3p6ymltEUulkq}On~`)IN6$pke{cDVf|qSrbSIe(QFFa`_H$~R+^{8$f}@nv#7)Rq zywE#Mx4=ddyl`}S>ZX%f**N33OQ>x=i!VrfAM!_y2z z^k)cD1ps{0JjVwT8JFfixYym&A(=FB^-7zapAUWZtP^7@))kL^ATTL1agn!GaP2*L zaJ#JRaaM%i+5Pv<`YWGmY(pRe$;-XuKge*4M@J15RWcJYGX>x7uW7XL8?g5Bz92F-e3TI8eTc0}w*7E-tlob$Ccm(2{dPRu&<>A6jtK ziEZR_d?v$;pzqT0x9)eVFqBFy?BRcQe~6UHEfEEaD{vWM7B9-R5)mQ60+51U3SDAE z#ehx+Nzu(-cRtORd8u{jR&2C!i=L;??Z5Z+!0pHev_RIl44SVcgI71y0CT8sYV)Jb$GI4)BzomF6q?Z z{?^XUQ`=iqzr4>&MYcEbJFoglr3wcv2s;RY1hv5lG_ba{dA+4>)%Q+1!wCOg|zkO~x>%DgOY!u;z z?sH#0eYz?ypW|wK_2$i|c-{%xIv-Wv{_8zzNA}BaOh2m;hqdX#{jCj4|Iuhe8_?X) z_&BDR4ie!RC#Uk{rg`?BVZ2c5JwKe^#WNPGC-L}k;;&y`a84zRq@|5}*X>BCd!cY4 znYaNR!*_j>w_8@A`=&BC6+Ka8Z%A3n{Rh_)l9H1(S00MKrsCywMQ|V20|I(%RMcaM z*O=`E`6<9K`r*Bme@7*lp#JaVl~?apHYge$5VLG*KJ^6tm*j8k&R>T;oVXEQ>#v5( zFmb@o^hH)=iRF8WyX-aU%j;{FLPBA@IrG71eDX1g0Kd=gL9e>9a%e(SywfI^;A4L!`q86(nHkqaE|Gs2a2zd@@u2sZ{6PE9#<@4a>RF!=oW zSKL8{LcP(1vvwK$!ws$`JSG;G=t66;?7=p&((iq^+fvojvyOH0ILUD#@b$5tdBW9) zm-#myTkXpIL7glJN=Zp3KWFhss*)0Za&ayl0LjFvwP_KX7wSLbz_AGql7PSnkMkE* z(wC!?dOl8`yzY36*`?rPl#{e{bVGmsz!*hJ2~UOCL4*e`Sa6k;mPY?b2-30OT7!JB zqV`PwT&PT7xouy`i9l1IJ`$|Lw69T#7RL_~9!8+_{BG7uiT#?I38Njw_xiQ5wRMIg&5av3u3fuEFfHxv4fNwDDlq{!y@jbZVe^sep5ES^ z<*pMlMnCtS+q1*>i(3A|*&U4XxokD07kPNpkfMcm3b*~Y^Cdb(=pGpBLVv~+sMfLt zAR$328Zj_vZsz9Y#lzg>zqdYDR?sSTG<9&eoJxn&u`x2xFr52SyL)FsdATqL$M^2; zlIaTw9vli(^ir1=f55xY(*2hq6{;h|4y0Pn{r_nC4tT2n{r{tcgsdc+B$aF>BQqqM zN~L7W2$7MQnM!sgWN#ufTPR7{31ua#GLsSVf1dB}{ypyFKB{|h&iTB@>-Ai(Kvhp| zx^0W~Fb>M%StU&kk1s&r$rIn6oJ>6JT?HPD;5hl=0HTDnLd3q|cGFBcomk%Y%0YJj zwQIwVA2YST*+qR`TRS}V&#%%_ab*V%+Hcgm!ykIl#l}XSHfbc)3L#VIG;ET8F*!4{ zo4|kcC_ODLv!{iOL|J;@3$aSCbv?*(z_mC0`FToHR#k)7+}%m;&e?1F6`w-}J8rY9 zK4Lp?>gUnmID8T)fNt!SWN%Z0yzRt->mGkDKyv`h*mmzW4Fz7>E0py5ah5zliP!7* zH-qVAo^B5@%%pH}=1Oz;+IkrE9_8m^W@4KB{=K=Y;nWRC6@abcCW8((;gJRJE&POA zwsVsdWr3Wb{&D|jB}V4;Z}UKtkV;qi#%kr$9p?v@^_Q$3k?m6G>~CHe z^lxw10ze6{ENjFG;SnG;ME8@Tl|38J9TkCXp)JP4lc1PlH(YZ{sQ&uQQta!XS(?a< z_imJ#Ax{Qnk0t(nrM#~7I_inVcCSuNxxJ9UQI@HsSIksz;XMVfQfhKKI|;XNf#FM2 z82(T#;e3X!MMq!%_n$xT{I{&JiF1nKB8;4Km7klF_2)SONWoE<7oXbEU^(OQZ{MK9 z5;%T*gFOkhJmBigvqXtCCB6I{{ zr*F-W`1uq%xUVgwY-ApPyRs+nc4l0M+i=y8^wFcaFY1W?Aovgp7ICan2BFcRq@h6r zJUKpoOi&P)5n{h&U4JoWDFp;Kx3|-9-wu%W^>1gApCjqA$~lwtYxbAVmO3^hnTQP8 zWnEq1zS2cf^|1EC9g$W$J6wB3UOqV|=ZKI{2U<(`08x5*v#xDyeD3Q5Fg(8Kj3k%KBc_Gfm#i%EznGKRiiEf zw8yYBwg7%?czH(#e&MHL;*L&W68KtrgCPs2y)~8TS{;j!kb{NAp-|-?ZZ+xYfY?J) zUa0YQ18;>J=k6QRGMsH}Y_Q%96>6S8AN%s92r5RhfR{x@*WCu(PWwRCVGV!kmBJli zoG76XY?AP3hv@?}Md=xt9CEH797cwLPA(L+IPwul6@E1=cuQbwiN<*?jJ}2kb(sS} zP&j740u?^+BNv50ZSB=_=N_yW=IX9m2-!{>fu4jxYF* z9s4;m~NhoYmj1BoZ27wt=^ixM7OeHfJX$X-k23@2t^_C{(z;h z(PZW4YiM^MFeOFpFOTE+;AJ95O#U1xG~VaTooM~J*x5C1UVVQHnE(X8x}a;P?y2$e zP&6(%F!Y?eg7xYG^8j2TV8TZoR zUjvoSSpb|HuIpNV!1kDIcXp?z`& zQ@uqS6Y+SaWfh6eOVP>3@`qoKc$HOGUqM^j6=x$3CLdroj#;QD34XYI@22erYy``1 zX$zE(@c!~ex7rQ8sZ33M)2$cmy`?mMAJrwgNeG%6^mpvoL3R5Nu+9I2Qqy$!@y=li z$LB@0*@eu?+`K$97sVaoM}g_5?XKyC#wiS_TVB|7v$T+Lt$!s8X_&o|`kmlc@swl% zmxwU&)&}6-=x5LVqWnWyL_)xY3j+8c56^=8CJ;Z^v&|vQftqoC-T{B;Q!YrkYj|k@ zhP>9gbMoWr^E}^DpmBaCVn#Omn0Qads((aO)y4FJva(P31FU|4Sj<|_q|l+0-W4{L zmb$&ce$R1Fd1>h_;;9%+678W4umZOz_8@fc^9m@VRVqHB(*%ToFa)%Z^`5IQfYA{A z)H7UNTsr&6+2nk@fTQEaz)A(AihXDzcwMU2{kdia4*~-0YAH!YkaGi9aOrAJcD6Mn zBK1bZKuN%2IDo1CfP!!^GaLW)YZEpnk`o6WAIxqD?eN5NlioKJ`o zwV+J2wXjCg?B9Q#cPpR_R) zfNv$9J#puWda-4fIwbTFkny{|ci0m$9FsP*&8MlR zY=AxbA_*{>IHHUh$DxW!n2agl2izT^h8pJ$ktgT`z`1TN` zv3&YWTGw*#?lip|B=JC{>U!pU86ts!kwSdf?OBeMk7M=PwYrg6w&m^3VqZQkipIKj zlA(F34XKv#MF-;+EQL*vUb&&fA+d{v{S^*L&>ftUvbtP9e?&l(TtT2^Vefeuq{#v* z-4{~>iQ3jM6|^zWA@~N;fYa4nDX__EyKcX`j0u)%0u3pP^mAxKz)XOFjfkLCPn!;&&~P3 zCgJL(D-I1gq$J+P-Iw{ZS^r6y1VR&MZ}0_v;MnQ$EXP>z>2y5j%^2?CVoIzME$ zV8Z%8eZp050GlY}u&xULz;|B3h&p;rh;HZI2{@?pom$knu2_N@Xx7WHF=-P3O*qq1 zh7>o5aLUGP1&CaZQ@5JYJ>vSYyBATbR~OUq)Tg{Ae|B-aDCwdU4BZGSVLv4;t&rxA zhihueyRN>aohk#0!E7N*m9vPOR`+ap^JX72vpYng^r7On%9&FcBKw`k z=2!o<7@s=_Bcyw7L4jb62B_tEJHIQ?unuXBdhY?qwC)2fc zb-hGTU|rocfb9ST_b>@0Z>@pevjn7?@aT~T9{0(eg+j(Jqe?YtK7Q1mv5q`%Q01}+ zzrt`y#}oJAP^ppqv}>=c^J3l?+P1TQbEyGplg13}6LKN6Ww;jaSc#dUqptR5nlgWjU;W8Z7B28FuIP7YD?%U@ZW9GX^ zcU5*$U+wK?x}~w1XVt;u?K=ClV#ERTl;gPhv?}zi;q1~Xo-&XDW4+^0CLy`2KrX2I z1a}|8%t4pK41;Sz04YH7AYB_dWQ|JZR^RV=Kp>Fbe6B=7g1+jzOVR&cI16F}RaMDn zpDf+`v!I~vw|$I#X!qpg`}^vtrlwxj;<-2yU%e7J{eoX2gH0g<(ulyIb%fx%DnsZ5co+~Y79`5ck-b>&A1(2JKrtMG7DJ&!cq=x)Ch)?SLjKr8YEwLFJ+xvzB zmKlEAU5BVLDtWFd2Z%|Rzioc1nZ%T|KZffcSS8Mk3Hn+EmGch6DE+%o8%QvQVX?|MJuZIGdZDT_i5Um(n$jiWQoG zT?ABhXYyhjIyz8v{{F4G2w&Zl^oN6V@go8Z=h=U`UY0v{+nq5`cn`;f(ADDa>ksxj z3cFCz?%$tvzVCXg-g^Lmb0hV>o%#>wt`Kl&glXgU1S^;^t35hC-ZS3|RPMQ)Z~L=X zh9N0c!vF=>Jv_7%&rF}~wYJn~PI}4YvOV<)E90d7V6(x)PKs}nlW*JFu3WfK?z8nb zWwjjjB_yXSvWl!%L1pu#Z7>nhd_YZ=U_oqLT$Yow$l5zOuOgbT@LFZDR`?gYZ+k;d zlb0!mOqtxAy1P|Jq~mC5uj{*aE;N|F#NRB>JK!Afo|oN8Kvk`wp#j|u;x;$t8h7Iz zCX@qzbBo!HvPfO@?35l{>hcNNU2 zkb(8`rIWciFA*d3Pc}}iV7jTSOl(e(O<(WB-kL`l{OyMFs|=YZl6%FZEi@y)>Fs-? z8W>U{bJY45cXTVL{Mz#l_+GB*!_&2eeB9i)5I^+kNamqz9=#G-E%Dp-l-03O|KT^q z0$kP_(-kz36ZQ4>;_*U0K~75QC@f3_j!KhHUUYVJEUKvZ+}n$D6&YVisEQj-!1;s+ zW_iQCDJwN~2O*iOc~eo#LqS8;$`id@{rcHX_Y>1nI=C5;QXyRBBuylCPItxULFy>$ z{a0K@X7lIgmzC7c-rlkV`Iz`P5*-#N4(M)Qyr7x1(JA!#DX+^P1NMJuR|`7-o0weRtk_J7z@mE28#?M+Om4A3pqo$QJo!(_gNmIj(J+Mvsoj z_<_jmu1$p5gNk`lSLz@(X0=?VN`HB``z9{40lI#*KK#<<8GS~|iP2HQsvR>0bY9=T zBSQz7iYpwOo9iXWvZ=4H$4m}#vjBXAD!XX*_9KI<14_e?rm@8}z;sCUo>x5%Nj3<* z5_q;(cA1%)7GJ;H)B62UDxav5(56lod(=?n8|R;ayBYc!zq~WJd>N7#;*1cuTcN&> zee%RvDwS>ur7*%lycr}2ekigsGAy9}&Pv5|^H-sxnX-Yx72j6cf{PZ7j-1W29catGq8Im+PT(fzesGvoK6jN>u;w6E~x zn5I7|wSvEjl6M+f?5u+3IYE9%=h62&}l+&dO*?Vosgd7s6{-w0=AW%U(r zt-@uBtVl4dF0O4KKPJY;zA;im&(|eVxL?TVI@8ypp?gz%cb>lai;Hvf2DcA7r3~z zR4j8b?RR+KU7#Ewx1i`0LwW?rH&O-kpy&rdYWKak+BV1q#wl4O^myg7ni{YAi3GV+ z46BHVQT{^p511-AtAG=GC@B%zpNUt0EN$~8ZfHK=vx{FL!Hzy`b=qz4*qFfg#alZJ z6=ZT#qAzzVh6>KzKf--GD(7e4mJie@V*;)qoSY%uz;V5@;)?A-PEI~p-9m>;?1+E> z6;;h$MH07FIlR>795odIpvPd{k?gvNiv=aFYFZao7nB$PO5T{B+I&J!JlCYS*igJ5 z5)}N0+8)YoL_z^Y>Usnk9xCM?4*eX1efwgG1l53*w|DKmdlcsyAB2Y+1Ch^m+@>Bn ztQE1iu<)+E-Lm!kv>U2snf)9DuI57m!B0jT9Z3k5mp%f4V4VcAL?2p8G7ysqG`+Ngy&tD2)*MPG~J% zRyz9JESFuic*Nt@EEl7lIzs zb^b3xu(TjkGHJ=(!pua>=OGut&tHeK%K6*y8~ldDd_K)%j!kUz)HQO-hbe>fpl$5z zOhz>P;^NQ6{N&_@2FIf>^T>6D<+Hqr;}uYyfCQ6d-;a}Tl=&L@Q7*3Q*4BnbMh{ie za5|v|L*6v>WddMCjvZrVXScVpVL5Q%tZxWpN4`)4|DQE){xu?mhYwr_rpwLEU3-o; zn3F6ZPW(m`dTh)XA!@W@V?*S00j_?TEYoBXl6&Awb6j^P;kwS#v!v|K*WzPhgrJ>e zos39MLO}Znq{^tQ<|7eW3GEfqgCR|Z0cSg1hPwG?e1*Gfa%B#=DBb+Yn5N1G)27HY zRlqW*{`WT8KY_l(-?uh55grOWfGGQ4TU!Gy3`;Tmt(%)(pjn*^YyF3apXKAe$E{yg z_&en^W5)HOMx!Vz_cum1Br70>LW+yaue?>@BbFvAO}<#z;=o3Nx|ts{Rp=Sj&Zb$&`8c!FHWGHfQ=70>W8ML z68sxvKG;i--=3=CrU0uGeLn9(PG0Wf(t`T^bIclX#rVGU4?@pz1x@-aoGD zzatQ|G`UeJ|2>3CnR9kYnTr;C60(h3w+Ma;KYbr*1DPe$&x8d-)_cLQrp}5fsfL7> zdtVVUz)%fOe1v!I>dL0G=2z=v+t z)D)8mKE#c_m3?qPO!(}oyu}FuA)@+q&rU-D$n0(;JOaUE?MT%@352Z>B<}9r>$4Rj zclM$n2Ly@4Arb;21S(&>3NCs0Mh*X3=H6v`jdb>igXK})@Hw%rg0Qdb7oObS8vOHJ zJL%dPRm@b_O6SCZLaM;h`p48T^sW_a4mCDnemI9I(_Q)L<7^ZJ5E{I+6hybAY-3By z=fT1Ep`Bcg5XZSUJF(xz%;M{}Y53%uhS9GDx$P9zb zV)?ifEce-2SuL+AT1pTV&h&i(4sh;+2bDBFm_mh($}53}64h(S)0}czfOoXCwBQ_A zSjnKwJ*0vi7^s1w+s|%n?HUElUJBE5a)cF{pXng*SMgq*W^|;YIJ0y zEUJD)VY`<@Y8rrZ$o;tYX6o|JI|;OYF)sW0T;4)SvGu*I6JaPWGk15Aql`zj^duqz zK_^9M9fnJYAazg>hzPcajtNj>E-fyO{nsJ6nDAW$?=%SH5qtCqBW0*E53SGBdk?KZ z%A8G#lKGywd(21I-aNRd@n@z3`ChhuD4exdLbhtcR)LZYuv9yN=JM zp9nZYW-?~NmVaXhH80Q`CZ-hx$R@~oJ`M@7LSKo}?}zNU4bRMZniOJ%yL?cIC8zni zu(0k%By`M+-xkDzW}cCn9ZtQhe`TXBmjtle#$SmR&yb22mE~U&w1+_CDsTDDmmjWyw3vM^01i=Xj9!;)SKH8f_MM z)XAx-)s26H{r$^V4^ZM~CBcG>#t`4u zWnCH;MqQoeAAAY8G9XQ2sO{5i;_LokB6$(Va0xn_#YLQIzbLFftl_+%AZ-kI_RzT8 z?x36MiSHj1#mIdU!etS);Ft@Jj8>eHD*P^{;Eg-J0-1otI5R!HJhZ_4V(08R7=%4T zRgWWD%F2!(IdXUB zSK@c5l?{;Xx3agyN;)a&{b)vNcf_+^AY z8BpJA2sF+Ln0Sht56@EYA-~YI(!3IzkI%|(-!9`f*sgJnAzGa8-`^21TTAwTE#UI? zOeR5PIS!8J8++%Zt0gz)zPL@;>W%0dQdz$C4+;rqL-@;tf%`#7b!hy#z8W48couTs z1!gU;?SEI1D%#`pG8n^F3JZm@(s1SYtBBljE(4Z>A`7)6sRI5;W)DnQ&gq+S-#So3?g&oTO0~a91+)2r6y2h7 z7Yw^NY-$4o(-CWy6+mokk|X_xjXw7D{Ko7*Tx;f{1D&0YohLxV5Ruxec4lGFsRReV z)wzRtEcZKZCI!ER-GMR)4RDYmE*+d5$YF>&V?uKhM={n8QITF&jmVR=_QX*UF~Wb0 zB#I;moTC#Gh*`Yk!`@zhew>G*?&{UVd-vYK(CAZXae%lAm(ai?7RvVHGYQaH21oy( zmJBF2x%o90c~TM*w*l4S-{oxdjg4_QK;wG)Mt8c?hQi#~m=j`ycKIii^?%rHBx=Z* z8TvJ0D3bLb|EAJr$g8ziUDe&4Jw48dmc{3ayAH5)0!_#p#6Lql39@!$R^&Eh0bqVb zd(jSFymRL-UMh|zNa^9V+t`xy+E_tMBXo6AV9)#dR)FjaUqw&KG7F6rz_L4>h|CE; z|706LHmE*GonBqkDloJDA2w{Z&s??#hE^f40$@Llz0tVBnJ2O73c^ zKyQW-q0Rgwx3Y4bNb1bYU|-@vqHe)$fz>egC38NQ2hwuxud3B)miC0|tUa(zCj`}5FUhKh*_^$s;~d~*ve zWLsZRMuL07X9{=hZqi2ZC}5+AfD$V!f`&SDF2ZFCF0?e3-&!2$Wdg{V?0gLD9qa}E z2n14KjYTCKTGqx!MZ-*1G;6_Qx07N2z?)=SMoSg*?Aah1132@hXL2F7K<+8lEr9^k z!qamDfYFbcnLqZ~v~S#)iHf3!HcN)1!@9w&0Xv43h1D|j1gd*0kXafCb(L zeDs-_0blQ#LYR}Gmm}~iLJJRsK)56rmsA@0S}h|h)0tOh@XV=OGk2{=(jvuj=t7#f zF^)DOD(VO1xR5>A$L8I;r*{7QKBcSZC|!U*oJRa0i%W1eWNp>ep6E=GG>5SL5|sbO zEBTNs!5)yFj=)NCf;?^DiXVHCp+K8+8KyU!X&2O_IBDp_LAw_JZHF)XSorS7M*pCT zBz8VlDh^ZD;w<4UO-9uI{UKvU_E2M>M!*;7rz~&8cP<;DdIZ9NLm5{O@P8DTpP;9wxuN{(FYkC zv9-GR#mWAZoEz`#Z*PAzEPb*mCo7Bm;Zx&I{8WC%`xBKcJxw+cPhb-f2-qOdjr#fd zVRQp_CQ!>eo}Oa8vHAIS;C|iQPR`EV0%00+MBk^^@a)+eD2(xa%inFVNwy|y!vvva zz<9)6A;ovok0LqUg5>SRDHNRo9YkbS(jTo6E=U~3u*OZZKl=Xd+tTtfPNG@7aQOFK zzQ3cxU=}R19d{RgzrZ4cmx!N{@;FIuVIk+MbFkOMBqp*K9J8__3f$4G3#{xn8DCsO zrl08Fj4k>r+b6bm<3v2FXwO$cVHV`mn;H34Ad9afa>2Ut@|FvGDw;!TsY~`M} zCBXxr?REJLF0_2+l(5byja@Z>f@ZIk0j(@#phh}6^3^-h&i+LvJo18x;5%;7Qw1ga zDTAay=t0nm{hg_uREMGx`Gm;Agb&zz!}W4JM!q4;C@wy}XN3btpio|Ya-`fsT!d_Bm>K%zo$)`vh`!#G4aLrz~9zYy8{r*I~m@cn$-6#<%R z5R4`iWGs|8knxpid>WP?JW2R>f%qSY^PLicNEbP`T^#$NN}HO(CyO~XU80qX*Sk{B zY^-e1Z9mse{JHbhE}fT8a>{~iKR^HWpBg*=wc`s*dwWR5?9I%^=jLvjn))}5;no6R zWI!Z#X0Z*ifRv_&3PJy&(1HO$O!qKhY-38x_px`kX$ZyXoG-b=G@o9?nX?s;!Ww?o}f*Ed38)2TO2d z3y`JemNy}N%KemvxlY?|P06v{E4dT)x>jf5hh1B@uIgC#$fthZ_eRlq5+lx&y!sytz+`rWk0fC)h zYMMJS0^Bj?`uCqNhin*JQ4OQ69J@|8m^0x3?IX0OJ6ty}!#DwGei;ot?nOH>;n- zOYAUmyF@Ejj}-WjZwQhrjML%{!_m(n=Y#0O1dt@i#KO6WgM0V~GfFVIf#Wvct5Ilt zJ>~p;U8<*caf<0y&OK_MDYGTg<=TBl^V_^!vl5n>Jy?qa>`!ZIHm8J=u>0&~+R_!0 zD=Dtw0&JmO(5C|=2g-iq zw*7xt4yxMHKPj|%qmGhDu*I)Xc|f(QNXv`Z;^Mp9^~n*#-;bcAQkE2v+XoVzb8>11 zfLV|J4?7p|#i1cBZ!4cY{nF-GhMr2; z4~nz15B>TjAW&8w(!01ZU;i7HdMWQBnOk4CXSg zWG8c#-|&~D;C9*2RTQ`V-t}yNa;BJ{7iJa|cmsWeod5-zH>R$^Y=?s zs*&1%BF~imzZM|W8*#G|pNHo#<&i}&c@iLypMF#ey7Jdkh@QR`$cSN2N8rs_tn_8BW6KWws^8nd#mU$`Dm$VlS9yR4Z ze&bj4J;%OXyTW$i6`^KFnKtsuDGh#31O#3+G?dI;9WhtL>Cf1dv;T!r+uNh*t`9g) z-*U+eI%Mxw9C`5jT;gs;Qb$uJ$Elqy$J5=r_A1UfNfGbNxAS=r0-!Ird+bjg6wcM! z^6^i^sxsMN8;gyJb+{tg%YMAz{OhuC&P(+4;*X)nruzMu_25T9f4R0lv!%yhD_
    yY|k%zG}BuC%?5L_pNWT>t`o>b8B+#lPp!Nm;mcx&qsF|laZ zbco_mb)j~{9wqnzn}$(0{Kbe9wljfnYAogEnyfDKP}jhxuU|jhPv)}zrKbLulrgiL z9*KjUFI(=v_xZaVnx;VQFVBS*0p+k2?67G+pD;rP!Q8a^2_O*ij4Ckx5r+}VDTwAF z#zox@B>dMeDgyKX@Bo-7W!o8Cyhu)skOwAdZ)XQZnYEQFN>Ip<3CS6@*%I&LiDO+fdXpg!Lt_Fj{+TmxILi*y z#!|!yqg=q$a#9CDLR*-hI@y@#k><(86F0)W@$9I=9)_ATS`;%(+sS(VQ~b5@Dm{F+ zk&#m3XA*@@wFdJpT@jI&-~mcbFw#UtMv@SEOKBu7=G{;#GW_6vWrUnM9WY6HI_5YN zJr@A+1(92$T;Rdx1NHfKjn}>d#rxRn_Uot_=&d6E8*#j+*grEn;qWls8~ZJuQrVn@ zpw`z`T0#To2BjXTWkvvM`b=2sCM2SlrrhL%c4?8afJ615V5UvGDL}3V86Kt=*@+giSa`yL(cL z_r<**)1?u&ofOjI@7{fl2a7&l@LG)_+I};$wA9pFNbG=JLM!C$0kAgGUTZ~3Nh2VT z;0bxkBgl9E=Jvl4XT8JS1AII@jg@fdnS#lXI2vnXYs<>PF*iEu*EEI>9bYK2;9~{3 z+7wI?(u%$#?^&yM$q|iJN7=})N|{jdIG1&S2}CD;R_=E|pB;d;rAu z_PQ`y0aV-dJ_ksf%PMnqhv168cv|L*8(3gP$Cn-Uc^YA`lu*4}0Fa~l0 z;|1!fO9*Vjy^L!WB?T&z0lIGshL~Yf{&?~^KV@9b-Ojf(&D+O6IsNl`b$c|sZ)ix? z>@5{C0(R|T;y-c($_I;SZ$17PbTbIx0rl_;wpoCWeqWdr^8XNHd9MAU;+R$RptxJm z{j?*c6&&RE0a=m~MN9 z#>5z$IkRi_CFX?}r2d-zUobp!M=eB3tB=&qQ~CSVKmQ}s!G~Mv5;7_a=oEhen+Q}e z^C;>j5Cj^HbqwDb$-y$Zu(I-RZ7~h_bY9+1lyHbe@ZA!nVA%ncBfhj$Nfc;*!V8^~ zmxn-%u9c^Vq9YC;jE-ht2_s&4+UjS+m=J4m>|MtQ{lC(5x>KV2L)Jgpx6sh9ZTWPJ zyA)?-O%d}j4yoJ=Lu;g!5*1(NT*N=jk?u69v48S!Xn6QGUO{>J3cw`T3XpLlS@aQ- z1(mkL#8Ifo0*Q?`PR>tX$O;L0uqWDK53ZkPQmj^Fony)i9xRR`2cS<7`~u%VYa6B# z5)}0EVDlIlWsZ=b^|dvkj5tZge8lbpkL-u!<8-8f7CNq~x^2&ZPqT0QEaawsOf5#L zkP3%eS{f=PP&EjPsKdwv07EmcBs&0lA+=&;B&lED-o(TS6l2QrV;t;o?xvL>i|gR= ziw_<>f@<-k#Gy0F8g}d5hp^~@ra*^(xx|uM#ghKa>(oqM*_*9VSO9Q7I!PGgwyWObr1hhlfk>gUG1iP6}WJY>QfL2w!|I-6&5* z^*cxh__Z*6LB}wiFZ6b=vgSqQHp&#z-Pw0|^alKEjwCBj`YrJlcNsKAXKBU9#ZgaO z)&Rz$#w&X9T3L4XyEhiCPlS@3m<4ZAIqWUVd+_zh!Q;+C-#MIfl(eS8XA9nzdi=ITnmzX!4Nf@6fQ*pgZ^5S}HzsF8D~7x~mKK-# z2M3ZamD?dhLP=v!Ffm&9_=n>^Y*{iF`?bF7DeR=$5i$S$eazog*K*hS@nP*msn{No zTgdzWOmS8=q-WT>&icmf``YvkxclDmwoP+=#z|8lF5!Qsjf4de68wy!-iKy-0vo+>QWz-I36bIElX4&__h94h=5e|2!TKw|Ib#{Zii#f6|8TQ zW{npsI0g|AhqO==VP+I9AnFNzxXVtw(wj3j+oZ57vVQ@w6Di^LQ85Q&PkH^C1;42rnms*Ir5*1~p*o>vS{3x90%mO0DY z4dDe8(_nrKKOH_NaBZ5=afy-Ma~HGV4$bq^(ySi4f{@OKN; zGH0M%Nc=F^9Gv3L#F1MN75)RZR9p5L?+MfWy88RqV4G8MmJp5m8lQY!n0Zj(PaQTR zLDLh$3fx`Vot;0KUMr8=XGll%9IwZonk z=Q5Oeb*oR|dg>{>3tI;UZ0zANON5~h&1`K=_3p1jq=Zoh`Z~|UhUjnf^ooV$<^i7J z1H*4YPvV$%7K?VQ+3|%6UoL&Z44O)so-_&bp8W4|)N*a~Y>c6MIQHL4V zKQ9?m_<+(>(sA^<&7I(f50`($9s{}6UrMpxpKFE42A@1xn4gb_pNL_CBjdP>py)I$ zt$?@TJ}+qDIKe{ zvbOFQy2v_)5)IeDsGy;l5;FKc|NG~sQog2?3=r~1oU%4z3g(?D9h?JshgYlkdOm-48-1q0B;`2TP+}E1 zFP+4~V7dB2csiY4&aL(auW!B6${@vo_iSQ+|B>`Sw=pmP(U<0SvoZw>W#R0w^#SVl zbEJObDg;6Y4U*B9mzKJrXLhlMH^px_G7-4pz0Gh;jfT!YBqRjYLvHR5m?e{G18TgiA_JUI%Ju%36sEt_0p~JYKLT0g~=(jO4y?$4caZW0)0YK#kPxtZ1oq~@Iy{EkB?tP+SRU#Oba-MBwB{&{-tr{KBn$`xif5Hn*59V zt{^m?Z>=vv2Eh@3!mYG|givJo6|!uc>@C3&+!QQq0#VI($|NMdky>83yn9smt0Z_L zi8dvF$q77DU<+Fp?3h_up}T~z)A?v_s*|pSMBX};l<@QGs$ls zIa_tuWas$RC+f^?Ue7w3A|;002XF~WiO}|8EWqve+B-ez?;5iVxW^U3bnY%0Z;EC= zS+^+jRxdX##nqS#eAm4@Jju{Fe2$jZAecYZPf~u@ z>!9mzF0bZR-@i;gcYegie&`Ba#!ZrnZ}P$S?{RV6qoRvEE*7Eiwstq$Ni8d^9 zi^R*S2;qy3c}mN~^cdbYQ&WKOJ@*eh#SQF4ANSOkq$`$A(Rxt#*2*~Wz4gC;%e~(c z{3aVb%F3d+<}pf$4gc{I0r?~5#%2v zU$FJ`W@l%3Z*yr!=x3uldX35ysNc_WAK_xfVOfruIlv2W{;aMV7n_Fy{sJ(E%4CCD zvx-j^TxoXZ>xlkmw*qxoUpydUcJSiFT{67eanh;`nwotnPYow_FxQ`{_p`$Fi4Z1uoS`{PdWh+Esv)qQGD0keGB|2qSF0m#_1G?(^S}`>F#|AlV2Im&SjRa zSJpl20eIoK3@@Ziyu9h9rM+OEP$UL5;a2Ug)*gA!QeME2X_=^^%N zS_X#k)$zcnRK+e$p~5k4?u*~;2X@cILFfFU-u21932ruF)-J|(+S)w}Rj3!=O9>I` z4(jBU)#7ue58jG>EZ1!qtwQf6GyOYUqU+En=3sD_(?8FhILR7e^?xE^DAhLaj!Ooc z2!C-@Lf!FDTTMe!l;mLKZX>me=A_Rl?Z)(m04E&QdiKGjL`{5GLd#Ii?Jwig{IEt^ zvd)9AkGgcsh&5!nnkVpH>BbRH6*LOx#5IxNvb^D>c;GC1!6xg>)3o%TWBVT$AbcN+ zxTw5bX;3gP6ip<8MiWQZt~{cYm)BoU~B3`bMuA!Z0Akfw^)cR*M;H! zg?KSFGxG%Ol~7e&nmQze8yex!c*3QaH}H6>=h1@)c{wi{w_JHXYEd*rgoPAsKa_TB z+U;*$UYi*#6{^2JZ*L^;Xw&=Ykl{Ktw}GMIJzB-fncFwJMUMy_+ebl4|7QIHEomg+ z2N>yKLYx*n8cV4^Lc+yBTquw_P_tOXUOk-Dep%XhVteVA{mfKI(CLkL%N|c3>$MI} znGdU82~x;t_@Ts(SuyK=Bs-z06GQ3-97UVhT)0Fa6)Og18vpU}ZPyXZ4^R)J)w2;^ zl0O^JWGNwij8gF(!u+3Z&*Iai}YTYOUWFXjeFKACai3j+F@N zCXtRHvb2>+IE$`A=yTiU4?4EhN-1VS=X0si&tV@BT6N)5OoWJCc(jL`#2Y#NJseEu zABlZh6z?uNZ{X8jnsn{Ib6GubbNr&fD&Kr}spUAlbpOYO-(xl*SHj8FZvh(bRK+y6FU=F6Wyo*emGcpRUf1oO5nSQZiF=6dEH~Rt$e@N5f z%Hqc(IV#b6@fh_NT#Xhojp)7S}1bsZ!R5s8tTw&S@R1 ziP~7yaNVt_c30!ucwhtT0Y?xora4Z)4OU8s+OC@=>}@;zoo*fd%j3|?e)}Pv|4(^u_r{97Xxsl z5>MOp3zL(RpLM%$(hoA^^~wdSz{{N1#QZ`GIU**=y_yZBtV9IfBB^=_=^ z-P1arf~?G_vcJB;TXF*Vm{Hk1vLzqbGLG&DD8$T<9{{M@S|ugm=AXC~e{;%Fl6{Hi zS?Mf1SbcpbrkCSV_v*?fdgdFubqx$uPJ}=XiZ9=fTHr2V;vgz}(Vm=0*wAEyx{6cI z2bhL^h+A7$>^MaScqqO(2Rw<6irTTGPsZnO%xfJMu{+;E>B1(Gk$9Le z!aZHsx8mjPP1h(`yP;hnwoA12qr81)#_-W6Id%mct}iD@PP{$P}^y})<8g`e>~)TrZ0EfZ4{!&M%?9|T?gV$*SC@wC^PuuU&9 z$^jrCwPu0b_NM-z=jQV7$sr^l_E%3aJ${^Yte2?6+so`OCs=z3Atj;g4}+`cMGDI}vSv6TJ^571i>>0<)wev?RwLjYGnNxD}MYnBNkmA`J=y zP${%E%MjPW_>q~O4p*hPog;_@TQFo# zA5nhb=fo$Bn?8T$j0LFM?k_Wl!zr+{6AQ51qnfYsEK%2#Ka%4mf{HuJh*nxWd#?8+?{9St085n}3=*}G}MT$$C(h~wEU2@W5J6oJ`<~}zz@<<&m z2FC^FIJL$OFa5YcNlIbXIa8)>&-kY1QxaHIdLL>1<{1($Ml-=5gEi$m_2Iyu1x@+A zX=91v&wQpnEd+&hKX_yni@0>7dJlaR@noSs8;1Fp_vJ4tA7*@rA%8M1muv$&rof!| z0Nx{B8~~fv_QE=bP?-GrFE27O&ZADKd`tAvAjiVSCRZ{Nln)uQjd77-R2Wwv`k!T{ z#&5SOF_1}}y*^NpJG_Mf!6y`t(!I=C$hHm4ebp3Ba3syOYtNW>SAKF{IvZ073JG62 z{C4Z(;}bLMqo0ktK8|!o?7DX~Oi=J}q#D-$t+kY$qgTkwo-u;$BBtQ{h9Yx&IX3QS zQ;*I^mBf@nwF!zvQ<{0F-M@96sLyddmAX9$3Jf2;dE$J(` ziH@OF#F>WT_neveu9 zj~?L%uVAk?8yWUtC;@~%V9itgNEN&UmpVENws2HMSJ9`T;H#U&y&bU+|3~?&c z*OnV+tmpo=7nN3%l-}iB_WCjI`)X@WPgRUu-l|d?mq28|ja%yK3vnN(2Q33>!(J3w zjSOSE-0t1SPKe;n{*>M}FmOub$P55hD`2w*%F678+5ZH+uMQ<;b{H02SAZPB;;9L_ zu>IdsNK9g{J1p}jWr{G+-mYx4vdOiXMHz&2$N%s=?7%u6vN@!X=0NAP#BQ ze7tWIT@ae7p&C4)`!+3Ubl22DOc^4X( zYcFji@`6KYMMY2SmT~fe-GlxNFu99LN_?7e&*&u?xRo*^D-z<*&>(LTe6X^|uON2r(3`q4t*H2yYq_Ff5XXkw`HG0?lpP zYpY4V|2)7;k}81TMrb#2z%LiYiFj}9iLMP0xJba9hl}Q03l$uj5YjarH(o#OaDeCu zcGsg6q^0P$E06JK_{JUwTcjy-@E^9&$HOXn1f!)^P5JX`yf@1Cy$(_wLkSKK1RVKS z;ufa@D!7>*9HOQ8;jpysB^oDCJ4CX;9<|u-k22vOM)Ly0$8BwILKy&%9Us6QtjgKA zEAjeJ1{W72{|94Lpb&^$E~Q*!V+ot=9tl$B`Sgu*&#oJsi*_8>va$+=nr0kf&j7WA z6anQ7Jpel>RbEfrLbgsVG8PLxhOqNF85nvnjEI#r9!*FnSs)}m9}}f3YHHZH8EI*C zbah26-ZMh;iZA91U;n{ye5QL&<3oL5{!ru=^X8MEY$rBw(o>`$D;f<^IJ1Z;Y}Oz> zwoUmV(z3Oc#<=0>qP-1nT8}@=d&mrHPUpmUb5Dk_amB~r_6Ocq;fwy|S z5a3FJ5D>+F`nfq9b!Q$k@jxBwYfI&l@2wtc5B?Mv@!V5Z022sEyT6xC0cyt+J6*8j z-#!e+frvBlzW^;H@Ny3l5%z1>aNazkg_rHdtB6wqVV(bqi$^G_?P%ANXgqBGnoHDz zg92E1`jM6Z&He+c&ENDe?X?z^2>#P5eu>afq3}4%6(DcF-Ab}T0Ied94S+}=+e_>4 z{}3^I3$nbz(|`0!k;DAyL-&JE+4~WDr<{mospy1~5z984IUlwbR_f{|;e5@bhehM; z92|^|vqO2n58l`M9D2X;4wS~m^{QNSws~{kH+h>ktQsa;-28KSKtG?U7qE63HoXvCt)pCmnjVpL-&7}m zuLE612q7c@_9ZTKkOW4zo_&@{;SY$|wqu{eWhjtn6-gD|9lZ-d>OS*(Dcij&Hs%Kd zivV%%-5cH^tK(JR?g}i6+U=vcC@M)}zWU$exblN-W7s=zbafcNk{^_1<6IWYHH+l@ zb)lylQoPM7YSy5glRWnNfwVljoeQyRYr$7RnDdINgt z(Rix9E$rxFmI?R6!ibn5;51rAl-VdRp$&V5GkX7&cB)=Zs%qJBj_}X-b9GrL{Anpj zZ$&-K=7=(}_|@4mcH{e=9b<+8wtvqzdIjEta-qCcH>z|JVBV2*G zi~dG3eYw@={~vYV9nSUpw*Rgu8TC$NWD^=BE6FBFGD<{7BwI$5tSB?9sAOd&DpHLVJg;~o5~Tu0^JKNwH*Na6 zaAe-i;_~wb+VYeMbMCp3KZ!@Nu`k`@A`PDBX)K`B-RFih9X4}7iki?Tgh7Dk?}-3% zIp)WpmQ_+yR<2v+EGa3$9zc)edAaRN3sD>u!qLWup>K^wSEf-qR11>`udXaQv+#>x z#yZG(hfXYoRMPCNY>8P3&7b@kcmAPKg%90krIK2t@MvPDGkBroWD-`h>+402#=f`Y zl=>@7>cqs-)`=ikN?Bd1G$^(FZ5(Tz|l@08>;XisyDc21yS^KN@utUE8_}Y(Z zFh#+3KTZA7vJe!zYW|x5)NokDSA}u~gAt#Ec4UtHGJAVf68?9YXho1gaiWZrl)}VmsGEw8fx!7ELhH_W&*1 z%gYOX7n7wkOJ39gJn3dEr@c$neqZ69;M-UAs_XQs$K46Lo_XaibX@_C+f32;PM_KG zw1cc9!=WX9mxAPQQ-lAc<1we$i-p}%5slI`EzT|9U5~`-Q29orTA;Dt zm<&VWs!7&^*EDp(rrK~w#Mj${nH|LclfE*w|M$bcLJx$yi5=yTOG=80`US&yBpF9y zE8wh0Y+F>X-{xB2K>4Pjp|IN?=&M2Qec{u3dca+^4Gh2t+25;C%?QOE)Jb?oh*UzA zcM)u{u9#^9AD@Y-y0I>YvK=({R~N*qzz`(tt^biG?jP8762)!m;L_3(lndJbnsLiU z@rdUjQ_oa=m(iU>71gCoXrtbmt_2b^6rVK9Y~QLG$y*U{Mt*C4MvMQFF6^?}yvtHmpG zlJv%nUMI}WVLqmZv~3F=E4J`13^$z&qhf0aqGh>BCL*^3D-29qV2TQ{UrC7^20hV6 z!{#mwkE^t_w5rM*zl$lCmmYpN?Dx(BC0Q4$C8SJXLA)*>&zlW_C780qvDR5Hcjt8*p>O;j~sx(4-KVcWH4dwlaCWgfhJk+?{i4zAgOO1K)V=L z-(P3lpP%}Uzy$dF!S&o-Y;5*sX7B?#P;P+Az0LyIA?yfM$P^JH&^!?izqHXy{8K%} zFa|-THZ1k?$B!!1^p^41i4Gt*Adb=BKZ<)njtZ|c3PqzbmwG7tF@n;z4K_PSrAv4q zu5;OZ>uy4V2KhA<0}$ZF`S8I45w4O)*g(9Mia0qR%MRcH8-MFf&^ytx4!eEZHpLll z63N>>pBX$>A)^;}h`iF_(j_-&b5!X%^G&BmNB1oMMMoB#;o&WCmvLNQTgARtBx8oM zvVwv_+GgOR1Y0vx0oGv_uxNX|eGPvG#V8~uV-PptNq|6&`#>`j2MwN0{q}7n>kaez ze?_rs!Lw`~Kz;Mor3`x|aEXC|0e*OkD9EGDw(%f3&Mz!ji7?~bQG&NJjgyWb1Nzne z0TweA^eLOh>zn|Oe==&0jk)mT@s=)iy9A++-d0w%s;*>ww( z-fy7Bf#y=>6+xoMCt6trk)bDg?Yx32g>E;QVh1zx3p^Dp4#4uXb+2#8qqu`ZFZeKw z+>asV`tgGu^7OJd$YUTK!;WW?UxLx1juQfkynM}*S9=-MR~c1L47@{*9?gG>-gp^f zg>2ipK--ML21vE*##t5`9?p1!vI91&1j!|!pAtWenyLNMC;A`IB!lRswDNTWDQQ!2 z9I@371MEBTx4I}7Fa@|^xdCfDKlqVFOG}pnk$^2|VM&S4+_!M-SkTLnGr;Ul`tlbk z_(`M5{az&2u@+$qBAx@wz3uHq-Rp%|C;Cs%VOe+G9wh3@n!OCS@7~pETVxIdsPF?5 z4O?1UG5bhuQnW$j!|T_Nb!LW#&H1CtK$*pJ1cejsYw%+TWvP7jjLdBT6+}7ga>sUM z$3lPsUQg(H0KzB?QF?!w5d)K682x-5vNHzMUnEZBv0b2yI z6T+_@ysGeVZWM_{VM$G~5vvS-M5ra07#ZK;32cmM%#~+U&uBXqL$g~FTtQg60bD&(IX!`QM73ATuRP5@kgW96c;D>@2aYs1I^HPVUU-K)ySK^ zKVbijhi%&!vIY9pedGXJ*U!6)Ax&)l_)!vjO;mK)NeVsN;ReU~xL@v-s{5eZ_RKQ{f-s6!$m%(5Ou;~wwvJ*c2A-9G`@C(M8$JcUA{3-BtEHVc4T&(*)3 zPx)7R2rpNF5ApJ4a_eX=)}Mi)Au$OCTtOL*X@GjeHLvMzbu~D)t0G^W4?;Q)yAJb< z7wfn8&kP&34W`Be$Drb-4jBINV|8We%As2@cAj1G(kT|8UMp*j!~B@m_I5VFix2Pa zP)e;{qTBA-Hi#SG1WFBiR6WcGWn-I~o^G}*tPdNQd3IxE5fhP0TeY>J+efmG5t<0F z*jsb%GFauabB?ioeu}I8puqv@h^rna`Re%^o;rX+0)7oZS)bt8wHLGLqujgqJKngn zbHVsDsM$L5jIo2W?g%06rgYqo@n6<%I1UH_b{QYHZXEys4D2PG6_xkPFJCG%~GcFtxO2%wx%pSvFWb4lgpqIUQ^JMLdElPJJ zpbuXG;e`drM5G1~2B<*_g)ovGD_*E|N`!&B{?)6LAu~tTgeJ?b1t*lTsMS;J1S10T zZ4fQnx)`|Iv4;@DfIk7p#gKsx)8Z8mx&@A%J9(ww_36FZu$cGG-M+4ri<{d|HSN?x zk)eef;_O=o)>Mjv?Hl|6`kg+0d<+IcI8`xIGZiOGd+g4njFBeL~t`Y+U6- zk%NtG-E$9wWPqdzC{Zx64;~4pOviUU&d317HH~ZrtcM!>6B2lpUKZLn;3I6kd7%&^ zLEpf!3`pffXYTH>`pwC?FO6Q9>gnh}nvPViY~9JlC0jm#R~c4l7@H=V5U&q>w4>uU zA~#Mj6r_(tFWts#5cz;Ma%$=@dfq?V zl5p&k7yMRr9iN?fiT*&7OQtP9vkK~kXq? zZL(7cxH(>A$VZD2Da{n^Kw1`_IB_Wmem;J@j621fLF1sHs0dv^{>X--h)Jt^)Yuvz zGSbLsBaos?mynj;Pe_=8c)DhNRcj?B84W2^7tkZ(YM_q22BS@fd?bu;bFeO%0Qimy zfyW=#C2ch~Zp?}?HF@M>qQ<35^`kCGXfeZZhmw(S7ZN8ZU)0mrX6m+s!3FBeL#R;^ z#mv-gNj(&*E2W1aPR7?A&K6TT;vWh=in%au_n4H^tiP>V#$aqaov zzq5308y!S$a`gzrU#&AQzzc^mNbvh6XSV#Ded}2{zr=Ubo#6aC78-eY;7Uiao66xM zY!Mv%Gn1(~=?Xi=l$30Kucq26>BK^F+zUGo{^+c*{d>)5-LhF(c)O@=?fjqGzJ7ej z8rQU9kIhwZ&P$F{nc0s@H1;W!EoG}dpw+$
    J5JP194e)#7!@iu-z#S5+Pn^Bj^ zQgPWp3PmTohk4TKmlKO#(>5{=GZo&x;;we$#P_yc!ZD4b^is7V{vt5#u@-lhXBT?u zTp4JPrX(0D$HW}iL1k;8q{nE!Z{L2O+1=V~s6N>X_&*D}>BSr{JjKp&@AZd~PuTlR zyAMlG;hS`0VWqm$k9l4l-_8B{>(=HcF^p8!cwk91{UvdmG0G1ZaLB!@0_PFpP%~-Q zmFDS23XQw7R+%k&tu@Q9S7IAI;OyWS>pOW}QCx4iFD(vHL zqW<+tY`_)%Q?gEj3T>X<_9c^CUBjv7NB7v#2QprfcX7^>Jk}Q1BjFc~`BT?n!j3r- z&R2o1|E>GXiITECtB0CG*>`pyMY;OfgamPWPsqE zVn^o54=L5MQ#YM)HZZ#WI(U`b6kHME4klHT`ot7<09f?J&rFmj%g>Rb21i3pPX80Id(Qc6|+oKsRDw6 znC{;@Q1U58P|502d49;Z?!gOkNdxo!IQL(_Rylls?cG#S!-p4t4h-=r_v|;bFlOjm*QjGIgiAt%84@b4-twWk^`xw&n@v7}Mzfn@cqP zW7P|7Y!~w_^i9$4{J^LoU=yB3^L@mJ89``4q5_kU!>_Nn8*ZnRE|VD3{PtOF-@Y7l z68KuvAcjwp2~YfV_Utc$bY^L2b5CCno!Jb=NQYfX&0sjLqs+&M2V#IM3JblvH8xmD zu_!^GnLPqDh>`U7pSEpak9D;p>@}dJMGRyO=P&M2drjg_tOX zvgmoYeXA-*BojNkH+nIsa=xekuTXnO;`ybyVD(y=u=@j+m$>~gF*z?w?ONx3se3VZ zRT-mMz#6vpg#86R-BaZ0h+Q<*&9Cp7r{6pj@g17X67RcbJ69>$db_{)dG;@qz@}g7 zK_OT`DI7YRVZC&4Hb#65-C?xNT-q~rkVxO1FZhku zjT-jQg_pi)Wr)RUoZuunM>^B3;cbl(M(I+Rwi#rfJ&scZ?<|Ovj#q zEtg?ALFT%soZK49`Q75yE%2O;ijJ<2OuBp5`TMUu0(-Xb#woUW-d77V%f4JTeFeRw z>7PI4Y}+$Y0je)Ekd*-DGg8m)FgCoNYJF#`ajfRudHojX?Y7>W5!bogr)eC1kKyg( z-;s33LbqFC*b={gu~Ip99OX z`|R^Nc*jP+m#NeF(nWPDfs?@IG2GCPb6KQyET38E zWTj4Vj72W=J-=`}f-i1h9i^!&2WSFwYYL`E8y117wFv6UZUUd&y=au*U*VoPSr9`-o zib_bFUD@vqWQtwhM`kN8um4Z%95fK2bUKmb1`7%(Pjqy)stUeF<&s`i?L1PhvSS;& z%wvsK@r*yp<86oZ`EBCqS50aNTcbX-l7cz+2?;qM;q$)}#%&0N1^8HvK}8ODWRPT2 z1=J!(6gu~e3ode4GQc#@GX1r%aO2OEL0wKcQh16R zdaLNs{W+s2-{BCjmppk)NA~2yRLyDi+RRj{_qsbvl2zJ!4mDArIVW)OUezysTO<8--oJSF%IYR8!2-$I@^*X9;wB+G0y2n^uS!%nxrGdi z)LgqTqZxymMKEP~`3-u^5YB?e^mL$hWEP`h7-7aTJ|0Xqgi}74N53%_$J@CE;0ubc ztJkhERe2c+>(8@jXlac(eXjMLJpX@OfC+{({&z0t^nY5WHDr&>*X(G}7Lzx&W;TvZ zeK0S2BE&XRaa#E0k@^oAI6aaMT?~|Zg@(#KYuZaLC#&e46O!-TYq##CsEw`EN0tAh zEhADzD+TR^( z(_p~gvQ+13o1oYODJJe4_Ae04m4Y=plcPIO?@c7_h-utIm1=gQST7?tBcl|u zMEtKycUD0=ia;cTcR0342;hlmGXl_#Z?+8|)@M8^nv%`;@_Bent zo28{rPA6Kkarx$Zi$y!Q_ub#OrkK|Dok}@L360jZ`r(~!%KUe<=MVBlV}!{ESat#%613=E`E8i&Zid@cVSIK1KEsJoA8m8>O)c&vu@3)bpYr61$G{lu}y zgQE5jeF&Kv9cOwjf56eNz23nciz%q!B9$`Laomz7*?oV1Dg*VgMYn)*(s73OOmgw# zHPdIoJ1XC_Hm$jJBX_D#jkynbjb*1VuRqns4DCz?>Hy7*ohoU{3gR+RUvGGoPw%fj z2GYghR;~56bwwpPkEGyza$J4FYH!!C9VI#B8rv1a>Jjc5vLYfaXs`iD#at2Vc92%L z05}A`4)qYhRnORso&U<3=ZYfi-^Ahzh#=Ra4Q9PPd4=dTIRd7^P=ftWRq1+L&Ng< zREK~E>3N-{W0Pum^F~ruHu2{EnZ>8z3_{p)f*~dUV$#E)W-9-SbjzlCRC8n=+`%4o zSQwtDS+7pJw@OmgwwTaT`lCC=vX+{*EQ4JS7(%+1je3~o#|q5mMVpQTeGgg@JW|F zY>#*nKQcTRp%Rez&fZwxEwM7etsE%hwWxirfqu})+xr%A>McdNP4_3(Tj|rp+J0?L z3fRN#=X<8f^B7lx3V&0=ku#r4GXa^igf=&9waqDC;`P7X{kACR9n-@38=sFIU45Iq zlRmR!Y>wJ4U?bZ!B7>kMwmT&0l3+v#3GtaRZ1`wgWGxFbB9MB4gT+=}%N7^&C!g1@ zWz?lZr(G^t(A_)tj|Arfz`h{DxZyeFMzqKQ1nCeL&(YF6o}jgAcZf1w*to)N(16*` zzSu4oBY&kW-rhmA$>kq+weGbuW=7JYTMwcXvKfq=$DGh~be&-}2R6V&Z}6F%;+tTc zZ&f~V$;h!#Pjl#@(VO%$d(mi-_58hjbck}h03UlD<3hn^VkPCP_6E`Ie)3v--?x}? z?c(UNyL^<-W*^7}0Lh=cT zFj|pntY+v?E&RN0N#5R+wQ9q>iCdT3D=AdIaMDb`W?B_}!q>0;(MU&*3x8yQ$>C2z^i15E1}Pp zR4k7SC`U!!x6as@pq7KF*4bMeggBecI1DN*m3igl2(~fOP7Kpaje7^> z9t3R2KSs!u%sd^l1^@xq*61UZ=^F@+fkrGeM9=`Jz<4Hbt_(&{%HMWw>cH5tWe<;z zX`0p#3KpMeF1;bg=v|dO^uWy|xz4uB&;jKK2p8b>5|g*zzqfUD#K+>Sg0d3aCO~cn zCu|ApWGt-%Vq$F{KN9n4aZQko-BrB|J}O8dr7p3%mrK3{?a)_K`Py~0v{;!coJH&# z*fcuz9UOu5AMyjq<@ZgqGScAi^2NFW-j?sr?gtn-o;~p^FnTrYN@(ET77=M0tJ5O4 zb8OdDG@m;QTy)M(077d*R}pt^*QI=^;P9^Y!Lzp?Mr}3_xV0V?L9jJ4vQ*;B{QI7&cuUtv@2f+@lybw@G8Uegx%!j0SXB+n zH4dfc8dr4U$b~)$i!$n4Cu(l2dNx^?_J@}(vM%(G_p2**tj58{u>!5u8%;9JSOB`z zqn47TZpA%yM#4b{+0Y77iDZ@Yk_ynM{#*UTt=Le-M~G7(g7fph?Y)~3_pidi+?uf3 zYCyGzx?^M(RCf{Nk*c!idfp$bS&8~bVQ@n!ta=R;hkig3tVbOJ;9wSZ_`CHl#db$g zB`TKg)QPCKGl`hWImGaqRK%FoMc4)X?n^>Jy@nSICw;|Rn~z9K!>#DM(h$hBMX*iM z1#F^?iIUoamGIIXPkkH`asuQCR1|1VU`+$@PLIi6cz6gMV6T~!&c1WQ+>RLox}96M z>co6Th;}_B*1KA6o#|J;Xw~^C=kv8+`}Ma_hJJ!Nq~4WA z6Qro51dR(g0U%O#wzeu#EiEoe9X*Pv^XMbfvPJXs{;z zhcB~MwtlyxKA~8~P|rqfbBfKItFumOr`rxvnAGL-+&zIt@!$6a{>Tp1-S0a6sQ*#@ z{*U*)srzqf{mDs#83h9}ma-Q!_rcM|YxE`?iY=^;jI^IZ*G}s^sH|A`qbRxS=YxjA z9*=uCL_rXqiGF+Rsv+%d3&)EaQ+RFezex;3wBTvobp9Ju4wI9;fg9r8ptWgk>fzOo zVL}xdNmkn2x7KITa!Qa45x_}Tn~kJHZZ9RCKnjoR>bV^f@WLoBFwW3w^PiS3jnu}N z`)*1ZjgDFSSku=vSg#>TgK^~3=aWUro?>G15`}i$Kbc|;{H+Zf2Ym8M0rFsC$8Rv! zFa}oIIQ+Iw&XUHXJ6*li3$cS@zm1n)`;9DO=_4ea)71sNqZ5!2(Q7WG)hzsK(YNs~ z#2c6|AUSOU!->)9X%HDYO3X%IK_x~=I7~`O4bFcb$;yAT9AN%re7y3k?9XrdA49z> zK=DLY8B$W|@p~WUxFB!Q;79Lf6)_itWz!`2(0W7A=dxa&q2Ot^v1JH5{Vs=oaATKZ zo?^x=jZ>#;e3)R^2C&6oBD2ya*@B%f?~)6@X$~msDs>wwliJ3g$=_J}f;`pWVD8x} zH@=?l9c!z4Pe{cA1bG7d#mR`w9W<=Rl&q;uoPpPRs zqd$`^P+zg-_SuWv{uEM&=FH@x1!#}>RQiB&w&sp@xBSOMG4qx;553H8?&r{4Y^XY! zW6}5GdO^VD-*86+j*Xt3Gty}n7tzpqKn%a<=h>uPGasjK4N>{^=g;MOBkRGxuspbl zB4{9gpjm6+5$lB%#lN)+s$K>^o->sRfBcBH_tSyT$ZrAnIh7f~O(C#u6l^r0y9KZb zgs2=f7Fxa-yMbQ5*Xoga52A%9=l$mAnT!NF8xwP_+wsPG&$&l-H>4~;v2(uQ48~0C zJ;Oh}^mn?t{I;@mi02de!t9@28R?p!J^naRd=)drqtVxkk3X5w1}Cxe#!j#Scbmy ziUTd<+a}Yu?)r_!e`F44rNa2m8)xRac_2;}WD8l=vu>A0^)&6a`922u_oHd$IF3S$ zI;iqK^`kg$nraVUaLs_vm$;P_$R@bW%zj)gK}wN_*S(860KE!U_CPSBX7=s5{wDWp zvd-JKmBvv+50)D%9s4LR{cd@dle@MKM}OX|z4<;78nQ!{QZL$KJRha&cb@Xm&U~D= zgMQZ^p{peF5<(yPteD2Ea`+JKZZXRbAji~my7K%&l8P&~x^xM#>r?`v#|XiIjLXY` z7!Y)F(x;e1%1Oc4cy;D(nww6;{j<9I_C4*7<}7bbH4cuAu zd6>~vI5vyWFZ!Xx0Uk6aHO_6H-v4QMcH1E`)_%>ZB7$lw!^&;hc`r&+#koIyP8UZy zBk3$em=9m>dtOwM=)PXZd9YFHW^=>@BfX$hn(gZ*na8Qu1y=e?kFrmq(E%eQ9FI^a zf^Ci-E-@qu{HRDyd6Q4ts8p`HxX`mx120U`X6ED^C^CQy0!JRaw;sBEMaJvX$|&-`X_Em1i4{@bj-(lN^A zFH1Bi2e_oSZud>N;_B)Z9g8-=jh`?*>5VKD>FT7~e>u`)Hv#nJrzbTdzJcp!kZ@cIjGw*Gl3$uQRV$L_hzSDx1R$ zD$}wvEIoK^AnpQenk)qsVSajGpttvY>gY6C38!wNw720XWt|9UzfUM*F-i{G06-u_ zvl#V^8FZZo14n1w(P9P#5eF?J-L;m^&dW(|rxY5v7^rvWi3@#;-(p+*4P%nfY5RpN zw({ZfgpnoVR%JtK20H1ou>F~X)H{?IMBh2g3}Qfqm$PdWN+w-b4HJ`ooc3wi6)LX--c$#()3-Cm+}t z*IfS5Gr)Ol<&S@s!XB1PZ5xZHzVGsLck=j8dG-Wwbhig8^P?3REA|Q)fKlmH2xBtY zLpZ-PJogg~SFfeEyMEvis{0E4{2ae{KEq`_IWXFY>CNGi5v&sG1_npKKu$G0#YGmc zBx|H-vo`UElgqD+MT}c~Ml%Geq@%X&f?&fus2i4ggb2xVcVa9K#Qamdn`VxL_ltly zlfeB*JI7lOI50*mfJX_t-fPGqAb{qN#`S(r&Ywng;rYitWO*1W!ME!?*GR!3>gnYb z6!Vzc?8ML~oF2>Z02M(#5ASI=Ek^ov z)yWB?dsfVI!SY33tNX8W=OwqER9r?{e6H%q4E*WixB}FC3h0R$->+3I0*By{kgN{fn~nx^v}Jm?5H7|OdS;n3lUiE{WcOU5@{5J6`Y zrUe+DLk2N{sbN-?N&MUaY;nC+i8G|HbpP1MUuyw;BRZgM2UZN|8sHZJM}%55fQE_h zhwvu^@eP|cyCXaf)T62ZHE8vqY2;h6oC2S|60{-b2yp#ixH2rM`Vq_R!_SQKT?fRQ3e#w4{@ z-4Z0$f0PPeqjNxZ&_qLSjWkDMeZRcCX=nEaE-{f3f_Pyx^ZPfpoMYiN3>PcCB1(F@ z{U6i&EhXfIpdx}wY9O%D*UQf?89)B@W>C;^@Q|Mtk`#Xa?~{R@Na48y238wUwteQ#{_R%SaaY7Z#E3Y{yN*FYG^FZcmkGDWZK{MVw|iK_l`?2UQ7Q z%kfZn0Fr({JGVh0Py9>{jF}Ls>fgM;(^Jk)ETWdX^$iU`X4XTSKxHN7ByU?HaRtR?bwJ65qB4|>rktZIQ`ja- z{BR&PTT@fhq$30nV;HGoU;q#xtGs*}O=hJOCxku*^^;;68(In^=6>p*IaA^K{d8fj z8Ji&9vm_LB=hx*wf+P!GC|IXp8u0c&JRxcIk&QkCf=!_cHhD~!5{6Y0N*UdKXFJP$ zBc8?*GU*hbJ=(aXWN&+@S3n;5jV_1U`SVA?w>Wun|DHYdaCQwWlK@|2ZZf~-5iKnP zHHCP2?dkPBM1$?~B`RM@3ak5A)^7u_$R-f8dq8gga6tq!mPfPo;>E80OmE~~91M8- zLg3)!Y((NwXEw|V%y>Gtk^PKVu^}erqKJxjaJYQg2V%;9%Vf|K8d{I3&xdf@EFYsG zE1@TCB^pcjx9FV_rdI^ajooeL7k8mkqWT>lruNnQ?{@ovYWC%h4L0OSXrrVd~iStROT#94kfk_SByc2?H7#6Tz137*e|q(v$KG1$Rapkj(~+~2PgE_Vp5%N6u@lcBYsa2!RmqD#R)|A=lc@M_xn_= z)G0ZxfxNoWLAJOwQ3~lOlcZ zqYvBO)~ys8*lx0Hc7xOcpHQ=1TtFa5HO7?!P&Q$tZ=J{!wljVFCf^d zJI2gRl7|n@H{;s#5l!lkp8^#6wC_vE?wo!5LGz$|>Zg^5@3OFfEUUy9fk*>@KwD;rpZ6p0c6kI-*`& zkFF0?28@? zKNG=_hM)~+A&NAAfB))gIX?xmn-OUxb7#Xwb~g5{)o<8-r1*al*S!9*Upa`vQRrKA z&WV#JO)IX3Pj`!y?<8KOD0(i)29RPRN*7rD6sA-d(|9Ptz1EQfq$vH#jd3xT!B6Ra z01t1PeiYD$s2>mKM>!Q@o9{GC_N?vg8Qd61D-6J(z(59J2k9EVl#F{@4PPLUoF1r& zynTE5Uq-xKz(xFC#M5&H!-#MAuJP&fsE~k_=b2PtC~w;<8-QZKu5qRryPP2{!bjgJ zUd|X03f2{x7h%QW+WXUH2P&#CNe0>jRN)szV=L+5B!m^YiXzdNEQ%-85^?h;Z0s+h z@)0-{hY@urcu?t(n3|kI0+Gfd@HkYkGJ{In(F|?jPqTdPm2on!qAOh!ENxR&w>* z2Nnpu05YvBw3EEHgw~h4F9(oQ|Bnl>_n}=C+o^L{&)z~(Qnhm@8=)hkz)?a z>){HP8^KP96|L{-@c=~`Wf4YN?w#t}hM+0{ujhtpKgXPNTfn%Po|t&=IW;lifh^$9 z80r*|?0rk+s7Uf!(8x!(hKX-pJOB6)nH%GYgs|3oIxunsA{>;N@xK5_J(6<1`Oc!` zatyI1J>o!1gR-FiB3u&iPue;|#~|N-YzN@bD3~bnAH1`WbDv3m@#1qw2i^e-_iOWl z3o?htAA&S_R_XcPxg*aX&ZP^^9Z^yZ#qV2jhzL)8AdC^EE+E zt#%>xoLg)7RQ2}{Hya1bO5p9NS*-{usUun0*)C;9v~|^H+wX zt5TUU{~^}wZb_itS6BDrmPzDV5qrEi^pmkVZDPCML?-Tw7O9zoCpiE@fKbfM9yr#>MvnK?Wi2uA2Q^%sZx}q`ipF!1Z8lj{H(vU!POl`VP2bP(UJjf#1}$W{PTF z%l!!PVzSUsdJ5GZ2(J*!;p72(|5-r+_gyty17YNqosCv<(Uk2f?hpT(;egTj?DLlp zseOo(XhdZ0?0lfc2c-ulx6rtSwzO!U?{((C6{PV{>X4}D=}bFCM^xTdmTaklns~JN z4$)S4t}8;!IT$l%qt8pGg2)Z%w${n51IK!i%nQ^aParKy!?plvI;JNP_8cht5CtIl zCRj>v0qQNXiPk=wd#=D7oGu`Uj7&`O;4wq9=RS(?f|mlP)F|+g8wvhQNkPF5+v}eY zvfz5ZIlnb2NABrNQ_hVy5y0xRA%oUdyxCvp-M{jlpVq;rsg=!NZtO7UmDodeR~1e2 zd}C-z8yIj+Gtm~8E-n523u*+##zf6AMzqSV0)v2%4ml1(CB{DVA)kXG-4&?>C~crU zGFgIQGSKzj1TRo<0SCO=?`Um3Gu!RE`{x^DcaEVMwrw;woQ7|oy`hx1c@a5A=ncl8 zQ?tr(@Iz2Di5Ih(%t#|lydkW&VuCCHkLkL*CN#BQtxOJNldJpW{8U9B>N=AulNAQ=sh5K(2ZGVit zucrFx``-46(2}fOL3Q{mA^Z4RqS3O>%FG%Wxw#`ggsj9I_$X;7FFiyR2QJVh+UpZF zP(n{EZh zZ9TeS<2;Ao@og0^UXUfX56-iM9Mt@_E8Tve7&A_n|LXJja}gikqI>{cb+{>1HUMRa zgIdPJqa1@601l|^KDQejQvHKlv|JN(YhL*(q&vq=e7$#8_>sK8#0!OH2!rkcN9ptY z^Vr+HFC*#Cr`7oL`76JS?+`leac8)lJ1WxYY^hBGN29YL3~W~`OUrFgynyVXi~d29lAjUMNrgxec!&_q z16jVQYD&_I`hT~SN$0Cdfe+YW7)fya}-n#ScyhxIORM`FFw^5Wv-| zl8zPP0J-}0Qt*rwVg@UsmjTrQy6N;u7bkgam+M={;R)Q|4x#c4D2>%ue;D>(L~;id zEs292=Wl-#W;Faa;vsb#1?lb%^GwStSH$kCuU;oUl;nWvf51r=si#RR9@P`08mM_- z*PEB8N`-Wz_~Qq((lDk8P1%;d4JaPR*?T^08g-I_7!sjHfr^e*T+P*a1DCn!s4Y?> zI%?`+*+=j-jz5DdJHMp_!ohLb%xps} zk^Wz2Vv#@VWdaAe7>i ztF-ON#=J1pmIRRV--Y+{0JeaW&Edl__2!sgijFI?hbv`9It`S*g0nXIIhmQst=51T zsZwxTn{O2W_aRC<8YT?EyT}|^H&iaGiR-A5g6j-ixljQ{vP#_6#{((>+y+r4V4v1E zn|-P9VNffe?_23j1K1hxZboiyIGKk&1WPKHsfM`o^7p6#u_#-kz=FpbgwL_Ek`nS` zB7IJ+7to)?yp#Z?+26mtaZ({Vg9vqf6fhWyEft;uPhKSOE&DAjEg|SH+WWZG8Wsxx z3wX9}^?l^4mB%k6R0Tj4*)Dv7(K_((^qhMSdOMb7f7K8!U|a~ISlHEtnw^`hMc4!H z4FipX1qDeekC+g_s-# zUId;kr2d3Cx|0*Ls6}vQ+9m~y#W7ndZV1+pA9n6R%VBA07I!|L4u<|-2IBtLmM0II zYzhRs)QB?aBcy9($DY#P{q({jogq4>$n9RE-J7#0!&C*+^s z$aom@`=I;7S87BmFJkOYb^%7FT{DmVh>26*nAZlzT%_Y?%9vC}349af^iF;am|)UC zE3a>1H$-gLm%v<+7iW=*t` zRN>wpA}u~+HyoV^RmCgx9p}HVGm^!2agf;uOztYrA!L{M<-4-V;=?rQ!!d~DH&LVo+YQMd*_Oc4lPI~WQeJVIcMx7#qXp`vptrr5AY9xmcFE5fQ7i>8k+VURrZ)3TK{GQAno zlHa>Gq;VAO_8_X4D&HhCF!OM7z?L2flg*rQ8i43*9BoJB%6jf_E92(DXDLOF z`#=fMKAHRseSphhL~e2L7nTS9K9XP6c0W=};mo@ZBylkQB^vk(KaJtyf(VG1fPLzJ zr?}WnL4h<~1o9RHb|p1zcv<5VyFw{*=twObNk$lKV}?jZm4Z_ZU!6f--rnaq(8~BP z)5+0@6&=MQMQw%j!S_*sR^F4RPs7a4Enw0aDf#euP+lQxKrWo25&OyVl*Gd=)E;7s zZK#D?ct6A+K8Wwg%C16eN944rkTHD%= zqJq$(`-fSs^OT5XM00QRrL*8 z2GCF=krfgW!kb^%velQ1qNxK9Qy1Xv8D)gCmPS;knAp4*q%}Y{Zr7cho<&7PX`e+q z3+@{bI(-8CjMD(fhr^XxX#J4y<5++{1r8=0ap4UkH|8lxGdo|p6N+3}B@K-aD5X%B zVp4QyJx*uP%{Ij}0%c$*$wtD5m zV6-!9+*~lN7DFncqyM%tkzO{xsxiip1y__;*!Dm~a~E15n7;uz)wOyehh@Ua1GNY8 zGFX2dmN7&cCM9(ay$<*-XA04wpNt@QJreYMperZ@;CK+)a+1j^rx)vA52ph;?QW!8q>G}y$k`YEM zhV4F#ftt-Y$NT#qDR33B4X=W}MIssJVb)UDi#-PquEXuoBn9eLT!D+rbL}$Lm_~=b zkq|^MpS`f3>wEL&8>DVHDS(2kUT(v;3I7MkrjX(0E5S}`uuEvX356zT;A}T-TiuTOImvq!M=Ofu0S#?cz@W%SUq4Opk5(*{je3&J&URx5J6=`JB%c-`C-e(?T_Jd zFsSHB_+VauJ3Fv{fA6vUO}_asNQ;TN?-JCk;C{%TTszeA_6W=gaZCV(%rFMwgRr#0 zl0o@l%uFC3EfAZ^_g7O=Y#hQ8gh?I}CG3vA%In}Y@5Z`jsY* zquPfS3$sEv06Aklqr1koZ2>1S!Z2t^YxndiJ!DzK1M5Z#-spL{i0kqCqaH`GLzxA@ zRy9Q%Eh9oS2fi5|1ak8S%DVeHAVD~uMj%iPfhl-P z2&IC8nU{_eZQ{8oNgOl)*TKx=s1t$a3>W%&H^&Me%5YvG)vDW4ufJP`EQcvv;@odUW^@j`qZ@$vzW?S~4jh9^!tQ`h$I ztQE4M%SR?K#PsXe_9cZ4nje-s!+#t9N^~l{Qc2^=dR%3D3%=ZNHD%( z>&1D*Ma96N>^CeG>+5S($aIsjR_5h= z)Q}Eh4}S;w6Sgt_al&;1`X|-YU{`ojUHwyQ>-Xm(#ItPupxUES@gsb zuxEi5;Aw|SR!SlhMP?4YH#{3Kz}>uhPGf^Lx3I7r^k67!5TMNRN5!_^Q%yq`0NtLv zU?K*S-=Y)+C=G}8E{_~HZ*On#gJNU%*1qgP9Dd}yzM*1)liKr9kwb>5{3Z}^-F3E5 z`WWEW;o-au?h>%6>EpyF`I_WlL<0aT2WpIm4^XnHX$hpl8C=FOCl&YPi?98k5(axK z)xyW`jUun7M$oK0Uz|QK0(=O$oD3|upnya%A{2z@jv0y-hh%VZRm5i1)9XZJKF@@) z;Rt0mHZ~#qvi~6O$dT7iWy!evc%u^4I~%7rdGD!A44`zMs!k_S-PFoMYXVv(OMh*+ zqvC5)8R7XKSg^}JbePgjRJrYSb($~GUtj4ir<_!XWC)t?MEL>#r>I? zVTBJ31Juy{w*Ehg`*#zV)3_2LPG6qS3JG}!59%FCNbQ3|Lt!z8-s-f@fE@mmu5-w@ z%#nvf%zUb;&hk4R33D0WXRmzI%vA(^A|><^%dTm)JNrG_ z&&yl)zUSK+o2`E8BjOznJApe2$7r2rjt$Uw>W@u$~V+ z)*qp3;N;ZV@{1iI4`X}aXz8x0nRmDB7V1!>R++ap%EtVyT)KaXxo0IV?Fg0kM@PAD zq8B@-ZdMxo1J;oMYs87~MF@3Ef$;{Kxc+_jh$Xi88!aDVRCMBbVVe1U0J3KDho9~B zQM*FMtFybp?|`YuIH+#>N+1amp^b;J6}qVdH`W~-9nqFE)zu{+ax8WNYebe~yht1{ z^De0OQKU{RCIIpSn(L+|Lvqdi4>fb&=rI`iUXJI8&|}MBGQHOxR-}&j8(`Dn=xnUE zL3)Bbaz95TGETv-gtcEeAq5^89)@|%Q7J2+F?VwO)&+68Qyxoy-x40&ce*4ss&+hgQiZeg6^G#?a)eQxEg=r$LP=7k0;AAPvnD)1#9Rfge96Ocsy_LC}6! zRJ5nu%?^ENgsZ{hhnQ(;>+YP(c_Pe1$=&|t3vfeh4H*tX1uPtcrmI@|X&do4R7)6# zbQ}yyGP#A(jKCEgP%k2L4Gap(NKZfYUqclJ0Ss>1q!mFVH4c4p{!EI&t&kbA?Gm1l z3eh+uA;I`;^P1gNs0g@_dLB41h0ZqoJ>af|Qsm8>%|Uf;XUFl`3ZB@UQ05_BaFyjIQHCDsY!9ghhG*rJXnP6fd*TS)7 z%qWh}epz=x1pWiaG=4iK-bqTzPfzbi&^QLQBl_GguK z&%BqCywsUcc8qcfVeD+)H%zd*eAxu?9knjlK&2*s^f18@O^2BZyg!95I2rA9qpHP5 zoMRHMU5*$Qt_aB#*ENm!(|i4c>7B$So%R&JN0}uhl0!{)_O_(c5-L>OENAm>Igaia z)BM?Y8B6lgC1fdYhnyZ9FhIY%T*e$X;hPUE@kM!g1dG@cH#3ANY#<CGr6mO*uvNXwE@f_ z(TSwumW&Ut8v@R#XJ`no=_TOXc|^5OQh596lckn9$zq~hz1zN9 z^*k;2)Q3&N3kxG7ym%I8cuD@X8OA@r7y(O3Q*#?PA7Bn+HpOB z=FwEQZ5ZfI%8)5!&Xj~CnKBcSBtt1fB15IjBtw}pmmx_ANh)NPsmLZXMP;T8A%!wT zX3n+W@0_*HTIc+8{y2Mm>wVv^*!#DCzvsE1`@Zh$x^4`2!@?s82%bCvocCeewLL`v zofZ~6jmb|{7|s{OpjLN&EtWBB>(TaE2SAu9#sh#GLK;VT6g|b^U=j2Qb}K0RE}EFE zV4XmVM(WNUdrtZm3WI}#H{>@!a)cBBoC09aZ680vlVtkKi&LP!kXl5`C^4ZvBqP?BEmUR%+348nRKsNH*FJ0 z9~FwpQa@*AhP+*b;3Wp>0}^Ba5m-LDL4_rPVq(6t)tf4t%Yg-1DkpMP_*4QeEd8=E zPRJZZB82hGG1b->#+0)&`%gz7F%a7iT>|pTWW5T~Qu(xjxC^j(fVriGziK4*|Kb9) zoXIk*@fiKZjnAW{r}rr0vbu9H@w(HZh?pZ(xX zK`fW7bgLWA0dQ9C{5)4lh`HFlyuN3jxT=e(nZL z4<*hjCUSXrETJ_GnHsj&*RHNbJi=#mxk%7L5Jt7Mv^m(>PilXL^*uNR8he?4rs4Dw zVtgRxM7vDL!}MLQ+y}WWz@I zq;lH4=yPed$bYtPOQhxGOsYAC^O5zb>KTCljk5@SLrj3)9j*Zg2!$)gNQcYk<=^gW zYpk9e;txEMoInHsv$`~M_*JHA^zn=i@RUS0J{5nnxT~!3w*|4?sI3c5h&ae`9vl=A zfdmdvS7G5C6b$ekJ}~(SD*<#0fVn6nu>fIXiIfp17Ys^4VqLG?YlTz?CpoehwEr;2 z0~Yr)FG=0pl~O76S0G?Q?u23mO;IdXU`!RL8-ZM-3#`F;!4AB0z98){2E?H%=D6p4 zTzO1WnEeQI%5xXbW-rvs{b@1@(>iWO=Y2Coiy!=9K-<)3YV#Abku(bnH31nc^P9-A zAnHQ2i?KLBJHd0mE1(GU6B&FXgX2h(*}|%&uZGiY)|jRaQ!B z0W^!tms9&waj-R1WJN}vHu`30j36P3*=x$aKshQO0O!EidO+K-lnmEQ4;LwrL3b(8 z*^_zl9;L?f%Xf>+pEgopnoU4JK&JY3rB1_mVRXGUltVJxoeh)kw7mA?R5(AY^2S*qb^U)C5sa4M_F*$A?kl*od~{gVXZ)3Y{7qebOoXU= zwQAQd$q)&)>4Wh@f4*+^!qf(KNO*WQ5^tD}I$XbQ3UmY2C6L)WZHH2ntp&mtx*Ds> zWlx?z|GrMc?qBj}nw?YcR}vhycD*hrFfO+J3^5gk>e0Wu!uR>t4McmHV0b%dzo0Jh zvN~Gsx>8h8v4-lBK}}Y1u=d*q4!HjUG4@94!IsZKzik&P4=lX#G1(;L8RL7z1nyoD*4Nax*ApyR*i%C_6WZP6)-dp|1 z=|egY%5w%R5!m&Cr<83Z*7(l5e%yEQW$lP_?S~NE)FWMq;yQ){MP#v1=*L*lj(=|@6Qq) z^6a|h^@WDs3A=o@N-}wUV&>~^mSI2nLx&cBeAEXNp60l^52zd{NYI#=MkiPJZg^W; zr-P$+KqLZmF@R4U9j{AD9Q=yGQI}wJ}8|fT=HOw|2WOrdQjxVue&P>VUb_QJZv;4)W z)R}Kg1s+4&K#iCg+*MluR2^Rft+beC8+1#7N~tX!TwX-q1>EPn%*?QQDHaEV}KGrIZ=28FAAU}9!H;4oH|K zCE_?Vx9W3;!P>39{-naj*ASY|DHARj4kij}JGsTh4GQdgN947L!f-A{ZG;dEz!uxW z0IpwWf7{6nw6G*FDC{Oif(+ABZ< zU}TOaE(vqi))IJt$N*|UaPBMIDQHOn@Q)Tfp)OJ2RVg$;xqIuWO9x^W_O)G&QOCj51 z8&lcH#mLMD2i&BC7%=$W+urp2`T9TKwxRo}Q0jCq+^fos-z9g^Lz$tGlObn~@%+%0 zKm8?u8-8LQ&2LV65?Q1I7+=~Tqel*(`r)>?Q@?sXw@3v9$jxs784aJzcph%;Tnca1Z=xU@$tw$ z!3;r~MqTA!UVw&Es?t3DiRWo%%Ne%U10GRoPf+{L($k95;D zZ#i2XM#`vL&B1GgX|UACr?s0DV>c_3Q&P|y5&IxeLCa#h&zv7dkZNaMjFg&Jf=FEn z5_g>r&Y+O@p>)x@(hTXTDPHp$YGKUsucHRBtw zqt)VTQi?adA8yK)uCHl9}E~LVW8xkkq4o4@fHe+Fumo6N3$MA&_rNiXa~&q-2+d zKkq+v;!XMHPimL~(0k4CFMUc2{9l)LYWM?^Zj|>qs~wJaTW5JHeM_XDKy{8vGlK9_I1VM943|s|;;Om-5Phl@7)4Y^W z@yg^C!80QB8dQSu7h7_ovxPm46BY|Hz0!t0zIGt0Yx+ zuK2h`y%YV#qxWC#yue|v`vHWRvjzqxZ(R51uYu=_B-;wels$HoNU%}#Q5EGaAVfD1enufSDay_fmz?5VhUA<`V3Lj++QdSWv# z_l~;00nUy*d|<1LWV%Am;(wJ=`g3cRyI=&6)1qv%$O=*JpyUkBuk6~sq73|Zr!kBvpG-4Lh2 z(Bxb24Ze6W1$iB^`ZC(J9Z#XKxxzZp>}pj`Jn^eFhWOW#Kcw+D+q1JZm%MV#Npm%< ztnOGQw6D6Gx(2$paP^&<{ZcWH#Y9o9Gbg(_kqe8y_Gmn$N?#(fi_VuWagSl?1)ZV+g8C(AbobYDj zN;nzdTYMZV6gVrc*#mdqs-F7f$r1CYSFgYY-V$56fOHt%tN${T(5ptVg360i+3cx| z7Im-$%v*rG@&|!m1F8R$*jQ^D$=`6w00)XuMD0_yS*5e@GUea?lzqEH__%Zn%Z9Jgfk~(t|P7Z*@_kP_kBh%g6L`uFvmjcThXY4tnvqR8a zCVga)|87BduO8-%K%2%;+`^>v_}(@@#@ObFurOKt8CLe|*RP4UHj#PHQ&TU|cda-+ zYY1#i^Qm3cx;xU7e<75C8hAtGt&+=J<^|aU{KKDR6x3EjqbsHJV`I;nu&4U$Ain`ed79cQA0}JgiC+E44NVAy zy$a>RbfHO-Jw>~mCXck=Z5^kcpZO{W5vex_=P;GV&LUw@XU{@r=04W6n};VAW((*s z>`EdT41fHH@&*a;ZqlGl81{UpmAD2hsd)scISvF8=mmt>;Np-tB0D^ruuM{9UC0=v zKNTV~?tgqeIgh>QLZl>jtF5Ciyl{u(J?tv_5me?p*D>ti?cbMbF3!__KNeck+f6v_ zgUzKKq7r-G-kJtepDbYZ`**_>J%Y{a*ch2BGSrprjAkEq8$hSkb2ha}^g`ZCbFp1Kp`TPDpIg2w= zxF`UMcmit|eDv5t_I@P}nV!2+{_NqfN7`;GzlF2^ZY_(0pbWJ1i{bm?nv^Z($5Q@J z`41s$kOz62P8Wno1-I4Wz-MXfrvqoGXt`cGUGd!U+9h+jN!iB7rII}5RQu&C%9UHr z3z?r?^1sWM{Ua%~btvp6e>S;)?|1suabmEX*pELmU5y#|w(zgO(wl?B!V#mYJ2H44 z46$ZrYDIH8@$c(MeDP~x@f;0p7k1fBX9w&~pLEQ|r&TwFl0J3)C7RxS{yp}8o^(mFVZ;aY1r|A4otQ9?&VSt1Pu z-e0sJ&@Gj{R82U>;c+ZK*s^z(meWO$sbG&KWoRvf8lN4=2t!vQZc)5lbV5aZZ{|C* zz06vL-?*7r87E8W^2b;C32Y;l@)=BT9t*JdEz}RabGxU7Aw-@sAwTWgZgd?RdUsz? zGx4dd=WBt>8>+O#r9R+qzfa_Xr9)&=ME1Rz!CHSwc}cV zh++WOx1g*HvkZon&Ye9=^jTwZdjGHhB@NKkC`?q)B45*NMQdE03S$4yT%t zI1RTvoCer=A2_G0?+(59=Y`9B_K|~1>%7wIFxPaKDVLEb*pW}xqpjYoGk*mTkev}m{M-O)6L}a1A;%c&tz3rc9u+KHyC?d(a%ZQ zsP4#kviZE7wDkkFZ*2^XZ0v$1+vcSbz|yeDBq=ab{Wj$s`lZa%N=l@i+O`F#%7W1_ z^%vav9r=pT9$P**>-#6ZzA%D<=JrDSc3Rp^m7S{p0V5l480B;{4=LHJrxb?O-kG`Z z$;am@%0R%JvL1hu(LI@A9VxJ^1wAw)D@zVkN-WjxRDX0XY3S)sP=Y`Tts_jG0NTO? z>*1qEo1MJPHb*XAd~@efxa@J4H)NMCQdM{z_3-l#E4KVp z(g3O7dSS@)nm@8@v-ds7ED_&*G*+-BS?-&`#5K-SEQh^PA3oERnK}3;^LXJak$eyL zlolI_9r*zVpr>%M*nkuWmWDbO#8lpQSAJWv{lJa-`EwaT9MdoV`d-*o-`-9?Ivi2& z8^7N+d_;(u>-0U_mnYf0q3VbA=nekoPcDH_0Uj&>drX(v1wxFrc2QxW2}qd0C*tFc zK$ticyWQ!UkhzK{`c6PV-EZ8$_*@*E2bVd>@!O4-8v6>VB*>iUr?+(HQTfjCpAEL}Hu`Fi zbX`4vfB9*JGZ#+H)SZrul>WK!^50ouw0XhWVaK@ID{ic51;#nu={G&2D{MTdA*;mA zhHrcN{(MBj8BlX~t9dH=PWF&n$b#Ie>z-t8*r;Y6{V7e)_PDX8c}!#FiVC;*@_x`t zMybdRFU&g`oEBEonhl@$7N3ntS~$ykYHLT)_kkw-80GlHgbIiiz#9}3=nT^}C< z5qj{C6HwFJ;FzPxLK_Gy0W^liK7I5ZKC5mL+%0h9%Y3j1ZR3dZuKD@S%(^=}Lj;4j zT2g;syZ>TN)=cT1diChCt|(AJ4azL0m7MkXw}Umx#A}wx&!+cZ77@}NYg5Lm(Yg28y0r|n5k8Hei8frJKI+O z5iMbLENh_is57zD%Uz^UT!jb2nVGEWMT7JWSq_NY+o${GVcKJcx#Gwrlj~n(gY;3S zQW%Z@7{3-nT)DDdXKVYts55*$bTT`(_~+?`goz^$1oq0k5{^-2Dj2wPbdSZcw5eMo zdzb|-OitW7d%XUz%5HV1%9kT0&+Ch+WEpoH&K8o%EGqhQ&g}dF$5)v`^-C|29;GUH z%HDfXOi3eY@+Eq-^=GRvOui z#eyeh5W3TwA0;#NfP1uv43rhbC4iiybdok zmL-0*nB6ey;z<_e7p%dRRaJPbIyxM>Qo&e0&DEvUi!>*WQ$qIh4{X_Le=qPyXTvnA z&oZcaYCsY&U3cP~qobG_B_B=bJ}b6+HMU|RB8<29?G4iRQCFf}npr74J)wC@#`K|DKBU4k83~G#Ox`YkGXuhB@a$&A0)#_87QeAUz^;=t8 z(WJzf#Moy0*eN$zuAQ;RYW)6w?5(@JFR0FWJyeNdOT}+LqpfT*+qMI6;xUJ<**8m0 zoD-CMj6}2ZcfNn?UEf%=CkE@Vs8gN;#bL{DrXTlfn!G2f%+}3*2Z&T; zDxioa!O}}CDNn`Se2!LD{4@F!FXfM8#JGk=vDh38^fo#N2dkJQX?!0X{#{;_d!@c5 z)7nhT$^Ejy@g3ep#P%obPFuJ7KOa8$3!F)F`ZprWY$tY4Tpzklus>M}ZX{-_m_EJ@ zyubdEcvZ!F**_GThMiq9yA^J^U)vkPaOT|E>>SZE&l$2!?uc!DZnBhd>yzRSXtM$v zsMxOz|8zd$xIIJnZgoa$R@B+-taQHNt3l{k6%}cZPMl%y3^n>vEq06&2gR;6VQVKR z4E7D&`7o5hs)%#?_iu$cIaE;l`NTeV)>VtsaqImY8Zz$Pe{^l~*Z7M^Wr}-GE~-*W zyJm~iNJ5nJAi6E8G1Q$NxP6s+u<4*EewUmq&HZmf*zSGgS!j&P%gE$OWV|}w-*9Kgp;}d@*=}__TcaY@oABw&pKJEpo1u#1qx03gzNh-UhYb zz~uFei~~5&Tf6$V)??%2vajIj3h6!P51N!+mrO9#rBw^l5|Al!rIMv$IZyJ7rz+B? zb87v&>aF~`#ZSOMowBP(^1rFtzqc$rk2+*JsX?)36%sP}hW)Z@6GN!;&#j?ff`LKC z2Blr6la>c>0Cj}Jp&@XIlIweB^bfd}fZE_!)|YeQ_ct39#9-cfc-$XhGpG9V&4KE1 z@&Dv6O7ZX-IAir5xheP|UVG=%7Uwpb#>T z!$LwR69wo(*W9u^CVqs$gDLbIH3VJfZr&^n4nCgrx#~{BlyXmNYj*PAn4Xz%1}p1` z+qzt*?>2BI%=7FQ6zqPhl<)2)0kfK{tPv&0zEvYVJ+Rz2XZ*IN>dGEhtbILQl_Vn9 zM;&z7ZAUL#x7VS5E3#6WtA?gz}EZe6)GaVLo)q3GqeTk^r(hl$44+cP}xwmP3Z%cuFNtm$K}Z5B})17=~MFOj&uk9c*{ zO<}sZsVN6&>FcZXd5WAJ67^RboK=*EynZrFPt(Yg24+B1i25{~CCG837V#OiHu8QM zr-gHNNFPz%!TrhyV{NdS^Y`=cfq489&>DsUBMDnUGCQ}lw3W4+h2=clMSCsY#J=fI zk8VQmLQp3`-&pVHqZr$<0&~L&DpyO-%P+|ijkT`cKPgbv`Vt#n)VQ4Yh1`+!mF$4Q z#pJgKBI)b)*Tb@bJlYKeICK);uC}-fT{s_E-r+O>|M?xn${pO%L&(Tbe4`@a25azF zdloQwjGvH9`~WKaV5N5&slR(O$3aF?Fw$Kmbx}Taaa~EFc=x%@y$efz-f5T6^Txrq z^|xIGVo?V=jPgTc$PC0bJO*f*hWDTY4gvp%t}bZ_iFr~F96bb#c=TUF!-m7*T4HmA zuC=nzrFU&t8UzZRWJa&IrKl|BM(@FWC*$5KFW2ntS^VebbD7B+W?Mh@X)3lbc$eR? z_IppkqiyZKyispM%lTnZOqJ!UeIq0~!2w!`3`r7i3^+5Jlf~kFR?US{A zy+j8!u86%In6I*4+~Jh*`t@JZJc*T=)f2;Sd~wc+h_pDF|4rOB8#q{XsPmKl#9CP{ zl8?+x2A-|UkD^DP^OZT~%dqiY2raVJURYF)ZL|0}_KXvR1Kg~?r9bTViHhpXI?(#D z!<7KVRmb)9Fu(E>srv3PmvG8`My6_qF1F)!M(^eg$B$jmd{o)@6@l+Dt9cOFppoW} zp>0HPe1sr2D0N5z88kotaQ?mzgM-Q-nc(Vh%t5!Pa_e19g~u5NwR?wx%nP>jJv`I$ zQS_?b8(MmLBwJ;awm`{196S^X zEDOR5ywHs=M3;@!>|q>->XkGC2DJs&Atw5k41e6iOub)3L_%C#-^1Tk_(1T%vr1kq z&pV3^DJVjnF2WAa*4o-lxa>h2i=#!#S)NuKTPZA;jOMoRmfBfR@CuH(8Y zK~A!Q!Opto*AI3@v6OV6_K-yRdc{P@DZ4fU&g9xq=>{)0@3>45xK;!@PdP1my&jeJgw zH>D&GGc)t_6}mkx_qSWL+$|f z)f^7dd*@bHtIA*e`KGs=hdpFS5{7kE-mk6eoJant)i7XC-j=ERq4mLvpIde_AGT*{ zZ#8b2ohpAM+M{cB>w&$=L4(qwJ*JuWUpVyd5tfep_wveW<`t2U(vuGiUp}LGp|53T zc(cz#EdJKT+DUUODZ5g2}wJr6W5Nb$t1WLC-Pes9|DLc&7D?xjDQUll(?k&W!!|@t}FE-Slxa z6=a`aow2Y?Z~wGKhRzfxtjy)-Hz&IeqD0VrD}cIxaC{>7Tzsj~gYUC9XD%oZ{Vtw9 z)1`mz-0ztgzYPiNNIqjb<+l0)6Iq*lKDgG?(_HRL!ze12f{zQTQR`B_ z<*_q(J@Cq@bL^7YSF+n=Y^=Q0RIoxG_R7l02vti1PtncM9H80hj};v%6?R~(pS`R7 zBFf;O%iVjNH-GEspML_*n%pZ@_yjmO^!5Gqef)T9X<{BQ)wXQ`F7#(vSPW-+edBbx zIgk@k2ZJpcwdG4)TY!n*-R>J(f@IsbkLaDUL~T8bAZhWBrzp(tD8`6Sfcrdm`Z|;c zaQ=WUuGeT|mya}|8ojtr79k844Dc{n4t(slAACXo3uC+t7Ua;k`c0|CLqP+&k;a99 zWg|;FJB$%JAoFV7WA!?$IRaF98Mf#?J=^oY{|sy1HQ&YXzdsi|++?H{9NGH%w33f+3e8>#xg|F`>4m*;o5R>gm2x*a_K;VSQEFkiRhN?J8ZfqSBr-94~ zDpr9QEf{HAKktQ`H4I`*_4Q{>R^c=x0|^}H=CGSVRE{A-_x{z^

    ^uh!&FV1lru9 zqV|rCo50fyF8i*Gv!V@ykHtL(*U2@nGCP|zpdBeX2#}EO@(2%4O=Y)`hGk5&tgWqK zCKJ$E6m1AvN6+{-+dx8v;R{h3{K#P6zk4Y+fgBmKC#Yo*ojFTt!otFMW1C^0fu0`h zUPjH_G&Mt_quVj20ex2$K^WL(U`R1)Cg{jMf1WdWq^J8t=~lxEv7cdU6b(jLOfG;< zgptDO)bI*LvJrhkI%Js{CnR+);KW$6?F5i|bf2;V9A{)@ z1-kgO;}@`U2ah8 z8i$H7dc~3joPfKNpC7Hkf05|xbLkSVtZXmF7CetgOf-Rp0eL-M4i1MVS}1nz92*}$ zeDN*$);T8|8>usbA|iKn1aajZzZSrfd-RBC!RbQge3WO#Z6rN zfkQB@m<3nY_8`L$K}RqavItDWrb$G-4q02Yy4-x1ax_YDf+{Y8yk9k=1FTCpX9|8b z!d{29LW=5k?gf+}=$ba-d~MiNC5_@GuIWD$^Epd(IAMdkA- zpM{fC0ZZ2t6isv;jm*v41ffoO^zfk+T`Nd!`8hc>(O7^ejCWB}_u60}2UMbsX~{L0 zD5?tULG{?N5oB?*%M~+K0|l1bld7?7@oh)uaGo~V zNKCW4_0HKRzpgwMXjglgwJp4tLj@a+#q3fRTi2#Ta) zo`C`8vumQ`v61iv2AD08I6X2_cx%a#rWU?$PaZxjwrOSr{vHsp4R>H?;I?4c4=gUv zS$nRJHDl4ULNW+i*T01Yo!;%dRV^ z!KXmAkI^bFDnfcq(#$wH&5(0|hZ+U3o+6q9U{|ZC40Uw4kwvZudTwnhZBFSRXd0ZL z0&~d$bTI`Nm3#PIC{o^A@7^6lO2}XN;mQzUz^+M^u5NinvVBI!)Pb!S*Mw0hP>v#> zgK}Hm=Vc5If3!@xDiVjaiwPB%KBo5&r1nmKGPWH48;}5=B9@LF<4-Sb!i4 zWGURqwlvr38inzZ5le~0*@R{rIvN^mZPz?MdjJ5~{Ue1?O2jskcI=s!1LW{-v;EAE zd>h}+p|5BM;eK**C467d7cmFx0{b3t=(4ICd~b96na`JlXVIt-Y5C}rdce63Q zH+2Y6A8`Q24#wkNRo#K>oI=9FeK27Hf_dOm2*#>|D2YfwPe+IFbRX;qbmWkb1Ji=# zEBKor(fdA&C@d;^i{ukcS}<3!$Ki*#Q8vBhBCnu;xHnP=D~|PueGHp3?LTl2A#aAk zKUb>jvWolp4SNZMe?$Rv3{ggyL74#awaru0#H`A+v~2`uQbS|u4^SOB1qJk-mRD9> zo>iu&2S!IbwLBCSmj%%+`t;;`aZy3Ro##Jsk!3iKG%LSy1uGBSFd(mJk9XJ>x36AM z98drin}dsMY-EM#J&1?+@uLGqaRGwGXN>;%v3cip3r1gq#4&sXUV<>9UHL*hgb&31 zhPddQq2X*3_5jXuG6L)cGL2G{eY_F+P%BXo))zYT5G*lpEjoH8J0T*04JMD+_1g$G z66~At--d=#)zY9kyI^9HYe9T+x~i-!9I%BrPaNLsiv3Dk>^D z*$gWOPv<}jBS+*9cwAx^;21e`2ATq9Zf<3RhT8{0xg9?E;B7_4Ndd-e2#9;jUDDCZ z1z82r5$ZB1B6_}l{qSb~$rElxIMzuQ9qq5Ffoud88aztghMJl`Fmu2{`wfWD1YQ9F zjq~T>-bV`E$yaq5!!g?#IY z$o2i~Pxoo{4rV1Ju#qmPptMvO;5lL>oQ5!cXnA=V=agKTpFP@no94L0Ti)J+Hz~jK z+|dz?KBNz-`25&Ol2yvSbO<4^P;XBuAN=kHi3h1_r=zR#+wuj%0`HOwOD#^sjYkOy zkk|kyO9f&^fI)2pWc7UwkiFqHKdI&NpBu9myUIDTOYy#Olj+BVJx$KXX zYd1lZhN%fgNq*Qk9BcU=JvyF*pCvosN(Z7Rs*<07{$Mhq1v<$ZChu(|kTpTo1V$t# zsNaOWR~yvGNF%sJfcpVsfSU}U>VXQ6d^qSt5M4%h zZVoPCS>r*A#KG1xJuS@z!%EvChv4dm6FE*B_KL9Xz1rZ{JdyZPXHB#>^j+ z0As8-rcKBLCO#x~3=VE$rY(kPmWQRidxwqiE;pC9wg7t@-B>9Z840!sY$O~kZE?U- z(UyW)3l<1^TBGPth%58*5~`|jPDMn}h{QdPjgrF_>!97e~zN6 zlvhkFudIx+kz_@IM~9a|OrX-t@5^TGqhx1yCd3clo7lgzR=Kid)RdA55 zt|pwXV+*ON_fNen8Kj$$4=-G7!{f(yEWN+02st0u~FX&t|Ye3nw6)B(}@zbB6d-Ev<}jwqfG=v>$pGrfiT`1rOp zVBS*~m^7euUA*XnCNYz>{P!uf^v`HurRg=;NFYzbokak|pbjfrTbRIwS@t63xO8!1 zWCTWT2(0#&mN6+Qw9;R3rr>fRg2q(Rdo_II_TL+F|If;m!8)*;<$sl{s*d^n{E+`D zSF$5s$ErB~t9U8v#&*DC^M7S*EbCY{-~WE||N9RwtH9h?c;7zB5LI}00ewR?gMfzq zOx0t{FeeO!W?eo}nct4T!PZ{W8+w6Z4Gj(8e`=bURTdP?rdE9eeG0|%)~^a)hOp-C zC^7;OQpp0Gw>CGzWyVsU!HoI!Yd4wz*wmr?1VgH!z8+rT8!5LgzHtp!yzF$=LkYBjv|YHVqs2X;E6#( z5MKIyrUlghZO)aYg{!s*)|bK2$1-6ef}IC76ywgqQfTg>Zh;#C zSVwzxm@+5e;)DbqksRhNT{sK={bON(a}^%gIzSDAKh9%(2Lb_Q7FJ_hs>;mIpR%a> zGSdJnNyE1miV-Dj46KraqN0F8F>{NYfKj2t_$zEA95x`nBJWv#jhg!_Y8e}O-YR7j z7dY+^kl=#_lY^SIR5lt-!~nEYP~&6AR8?(~Hjr|_jhi<&CsejD5s5!W3++`Dg8*d! zq|AzjSOQhn($?l@pu~Lz=p7XiQP=td2|el=EEEC(R);t^Kqvvp8)Gc{hlihLWnIJR z3?B&Je6Y*W3;SeSj7r%FL+(H^0h-TY%`AQJ;1n`>wh%ns#ee^hQ2@ul;etm0ElF2bl(+hg25#7#1eT@vGH9 z@VI{M+6n>|mNOKo!#yCIqN4$x<5^ggK&68y*-*<^3<(`aB-o%tTnGXx)b&ra1UBEr zhc9Pj=H|YG#2G*uZX3zGdGFr2X_BV{5@(S(YRN!^;fe|c0?u{t6Rz6W(9qFAxdMGG zBv!SjFsn80(W6pq9o%nJT*$ld?t@KDnd#}HIJ>d;@QnZ(-^W{bkzy}IE2Lq7NXp>$ zgAGE$Ac}2wXX_wpgU)#31jUDSM2O`q)Z%zgl4`}#5yoiyPd$Q(fC4twf7b0a84L^z zy+xN>8_xvRTv}iM+m6&2xC$N>Rt)X~2%>dUp;)2_-=G+h_y+*T`-(2(F_VybS656t z43Q{c9zIgA*Tvl(?iHx%Py=8%OdRAzQ$Dc1M6QLz5pfxF1+eF2{Q|cbY-tt_BMJ=k z_F{pDg@^B^3<6snxInGTTfi-NTm)6*dFKDUjEqJDAjUm>Xb6|T7ca(fD2)%cBPmBpTe{-i`Q3 zAYjj7We!&P>>|KmN5OTh2}LJ>Pt;Yzm1F=}kbt4W0FI0QKn(?K#;hH-7$8^g#<_q@ zvAvyS@o6i`LTOTZgKKmXtE&}^@Mjhd_P=qcf~P#fP(x^KDvBZ;CSeWEO;(b^82as|C0U|!zc8sZl_#eN60{p2zj%x@t z=j>pdfkPJw!zE`J{zGPp1%e&Q7UJ&V(P1lPCaa*3bno62bSL7TEAy8Lsy;qSV$d;C z5X!w5CmXHc4njtdbseJ=n_ZhBZYLn-Tw+*_!Wl}lbNkC$oC@VrWV~C z;UhGI}`m) z4GD66{SNMSjGRDyglr}C_FV{p!N61uao6}2iw>r%i_10=(1GxPJPm&BM_!Rs7^HVq zKehrG6fSvHu-7N>^7G^7veUpW{R+P?a|3HG-T#hkds4N1>A&C zjVSlu{ALDR1i@dG7w1RBnipJ1896vwIJV!rjgALApAhPhqN6elu_XB83L%ljDNcTr zhTv}`Z2cGJ%#dc{^n35c3Nt%^uR!!K{}!dEq^v&3g{BHk?WJwUbkv5EFxcs-z7SFd zWMD}13RK(;VFV%zW69eudeFKt$}YN3&l*RAEsAIuw_z9&>N=gd8um*n&Z2g!DG|B9|x@+vQ>Le0rM)`8Grz|fjO1wmh z3WKE13k8ST&=%mll_f|RhJX&a5gJ;Zka>fGvan#2@?KhooXip)Rh|rVv#*uu4wClQ z#KhHE#GPWu~FzBmy9l*VX{jgDgAyS1TwG9+p=cC<|ZoNnE^WkL%e^^-q( z8Jj6LRBxNWw$CIZ7_CWbHkiV{z+fm~LO2`ICPQw6YDYIwB3POZVJ1CAB@2bGkSr}*tfCg?Lxd3Tra7YL=r&ucY!oq+? zAY&{mD~r_~wJSdb1aESUA6uk`KZfx`jJ2 zV=|nUj*g4~LY#@kk%RaR)EhRDiNIz-j6m}RKud(BoeSE6A?=xPbR=!=zbBP2RT=w}Ixf$e; zwG&klNTcrDSs?FGVsttG@h|Yc}4N=aEVMH+|Gb7<&MgH`o zkh=CQfudHtyD{KBk0j=ydLl7I%wyte#q#yv9v_6$x$j-rG zbwlgLYsc*wBqJltv&iC3$?KYlM)ERwSVsRW))( zavrzN>p|)`u6rYFy24u{)Fr=9b#!#Vu@7TlD^Y<|n@Y*a^}!4}qVgCoLvLHu2ZFuV zrrQ8>H_gt*(WkCBcDi|&?cQLeWataQeHAnzyht4!qOX#&*Nv|`2?U&yCaHg7zq#)_ zHOD6GZ<+AuQ8$W-zV9fLWs4Shwy#|G8^IOfGVzf?$EJxL$1X9*&*bdb?pywUV%QC+ zQ_(>K4(E5S;smdV2!^!c1qv;aLik2r`OQgaTz5fBs8A4*q1vbYn&A2-{=DmBj(PZc4>Dl1Sj@W@9 zifU!HO;Xk*DYol-ShFS)mN;#I1@&tlCV#02Xpbfxk_4hsc)A^=&*6!Q&{T4ZAmC1w z`Y3=A$7)!PfeQ}k2@#}>#q`e71ZfZw&dZvbZMEX%uSm1PP;u`|wX*MhdkScaa<@Q$ zOUb?*G@;(*$5b_-y4wkC+}u|#>V-0KbJHmd zpoNB{<)pwDlp&h$3?TAE4BPsfx;1=+Ed(-bM($lyRH)I_dTx4p$_fYsmC(~itrVRW z1VjKxlTNe&M}l?E{a}>zaEruZdtI7UzRmuZssQ8X^E;T11L{P>j3ye|1oVXVa&cj+ zY$M>SIe!-?T*71KxBuHy^}NVL6MeHm9l|MF{o^)D1KgLn!Lju+5{|Nz=68 z(vEsnU9a%nJ0);97XNO{5SB*kyIII2A+_i zg@*_3yn~aw`4KM@*`(3hF8mRIK4_`Y2yynVf-*~a&JP$lG7ebof&d3Uc94*t1NKAm zgj)?P3@H?jS&|bSN^k7)=yrcpQ_5cRo#4UU~EUnu=Kk<Xcajf=vLm88)qhM#cH`}h1RD(H!?&$k|SZa2W! z!}*T~SpshyOppW!iPcDg5Mm2ZMW$jV9pq=&Ay9dsxIvZm{tgB|?dIl2whuEeenCN! z%(CkL(f2eybIsFKh=#%K@RVH}>Yx4@3Roj_k(fKE2jF4Tbdin3v6F_D&u3GoOP zcYl8VY2)_m;|{@)1XwPX8vuFyOwXrJ;?mN=tW~}n{TNV; z5=m0h4Vw{gMNRzhHUckpiG`TeBo_69_klbGj)LQ!EfKGy;%#g7Lup?~FRuqxVGHUXeIxf~H4D-C)`Z z__&+?2|%|!%*hHj#(B=br3L{(x z2Xranl=S}Zfd~^AY+&C+q@Bq^8sc1Y7rzJlUCeuF7`#uzGKp{jP^u;YC=m!arEj+3 zphiCr#cK-w2t6xI737rLg%QbsAtItLhYdEFVk&JmFg)j*)XBp* z(ssT61g>=ttj7EMA7d86qeqw$Tn2Z9(%dv8?>IX8zJ8@S8dsl0;@AO+LRYKyVhbij zVcP6$HdYOeWXw#!q1cmS^0%-PFerAQ5`?k=RZUR>)kiHsUt0OSE|GS)=Sv zfW0&_Jl(|1#*S__dL)>|DUW0u;R8leR6`njNo_i-Gm?sm5oJ<12C(#yFQN~$uic6b z2F@6H+K;Yt)Ta3Pa|C}F%j*k2-q+f<`jYo^(`CRh1VpjgPOU zr-xOJbXo6@RrMbQ=VI5?ZL;&QEudYxtjW8%7ZU>Es)pXjy`-dDUdIy<|t4 zV?YO#lHytt3*;(-VWvUZ?859UiRcC-Q<;6(k6mi#>S<8o{kLw$(&EY7j&>y{n_GgB zKe{~WPw;^D#b{adag%=^Leyk2Yy233WOA~r;s4$f{r&%EtMvb+2|K>EjmmOoUqo-k R1CoE{$>X{jd1{yL{2#jxT!{bx literal 0 HcmV?d00001 diff --git a/v0.20.3/img/learning_curve42.png b/v0.20.3/img/learning_curve42.png new file mode 100644 index 0000000000000000000000000000000000000000..e3d704b28f8ce9663e4b9ed5bd573bbfb5fb880a GIT binary patch literal 38412 zcma(3by(Eh6F&|wAYx*maMOw)O5PG8jR6P) zt(cfmsBVH0bUW|g6kob1}NxqIEknk%n4K1MRM z*8P!jbG&H$x>0+vWb3A{+{gzLVlNGdk7C#9o1{YiV`h6m0Qpx+jOr--gQHikL6neu zlK~WiLQS^)pWAo_@2S*i5%V9fDpoqwUUSeQj}0BtPpE5(5fyd#nbQ<6nSY)rZ`~6A zyh5lZ=wUOp6hp5-vgo7c52p?YL>Uxf99IsZ8k@wAPTn!f$F+PD4^}$)|36DXl+p=g z+L~}dqmqA5$nQMF7#gz5HmGLk@b%fGyzU^HR-fI%QbbLr!OrH^idfXWX^N8|wx?u? z)UtaTRjpUfrwT+nYbWCn0oy)z*%h++%;p-{6AE2cyY;N$uk*@n{_#jXllrajptFpV9I(XN}Yzv zcvLdGr(OIKDALs&i4(U+ul+O+U6~t@kS3Ry$5lN4-<_Gxbj=p2nXEgpVj0aQHz~XF zOVDr5-n21Le&_dh&elX(X;>6G+~V7dQ&WBU)^U{{z5TZjBdeyZ?83A*^*L^{7_O}dyfIH^lZ+}xRw=`zdQECurgJDdiv8^-guOu)#z)9mczX9Z$P2NLngiF z$%yg=@)1D^1cG;~&u|2{LCM4WSAUv4VZ%bFw~Cl8gAGSU<6I(rMKgDsbym1k}?&4hD~2Sr3yPX%PFVY(JJJW@M$ioK#w2!lZ>`vRqABLMwE_g%Pe?xfd=VCvR`iccadQxk4#y;79S8Y&F4(h|k;{elhk=(Csd~ zrXP8A*lToUvi*YT&L1Y%AI#Pt4U3&#bJ(U96ctV6)^ar3rYA%VSNd2lH%pji6Pa@K zo$S{xxy)5X`p!mSLJ}~|G{QZu&`fu7px9Zcd^YbDr@Wk=sh>L-w_+wrCOvC}#F_Z79q%m;Mx6sx%cjZK@Kq6;U2t_b#h41Iwd zOZ<(ID!=^QwE+q2tLLs>r}!OL4qB##I@j|TT98Z&t%AM|fp@~`-mZBN`7>|i!`Go= z)BL*O>g)++QANaGnRSD*Ew|P$Eyd+V?rcX{hLmT-Mx^O>V(=1JO4L8i^K#5A0)Fvq5F;lIRDBn^xmBYbUQ^s+$}~4ex2@En5;chmsP45mi=5QUyQ% zndKZdHENhQic$PwWZb;Yk#!Zz4L6gYoX)X%*Gumy@Bc7{!6{?hy!hH6;A6lY={s)?Uy%_fCc{=MH^2T~NAI4j_7KozWUzq3j)?X`1#Hcim0U3Kx1 zfV#f|34lPatA>J%uB3&J(nU+|OtODG4p?HJo1jr5LtjGH|RA_2yCTTe;oxM~x* zzV>+JAmh4OqRrZ;-YSN@OyTP*3J81DTQt$S%sz{(#5e}0n7&dJ#TETJ`{ObNZ)Y_yTB>MxE@Y#sW27qQOC zAb6*e<|^NEp>adJtFs`J3>RVt3VNeLPu&hD4Hh*u^{ujbYKzCMXbhJoUPVSTe&)I9 zb{LLbfCtfH-93oS%~(9N*_{LNl5S_I8I6^SRl`hYjaBx^^;EqDR%k^qI{KvA{C0X9a68N1jUiU6B!H zt$I(6tnmuE%-0-<-RK`C-?$y^J{T2DzO%7tTj~t~uKBl`_xmnh$FDw~80%4C;SWM(h%8n@vp_5qFiGe=28k3G2!4@_jO@!bqLI zB0H9rz(rZoMQTR%`zgbULafI1#sPRzNnB<|?_ZU}Cdd8|qG`Cprdz-7Q;?WHpCOKA zgYnFmOOKvcA78j`$&qN>N}A8I46aff&VTtFQ6PH)PO($c+$JWF)V2JW72o&CGq1D% z?_?IkrJZg;AL>Gt(Ro9$ia$&Zo3)qY1CayB4qLB{HB<{Ttt=1kZWzPa8;Z{BQgkcD zM-`k_NS;>A{%0s$nRHQk@UD7u!eQ9Sw3DtMc| zyed}Dp0ZxCSRC#CgZwH*|58sL2|44|TBnRu6DG=*gdGsj|4(+dh|Z~=5%@zT6lK-5)3HYwhu(aJAqR{HKdC<&*u5W*JqCqAKJRz9chcQt-ukI8}O z0CMAGj@e~C%djNa`-_zkG}*SzuEN1cJZ&33AmDYh5)FI*E=nNlBAtn0^M7cHF8U*I zw2fw8O(=Um)Y|Jh+m-oO2IDK-TJ;m3&Vipzg z+gdrscJ1NIP|XA>aV8V5`{eTZs)B+7i}fMT))%KPh*&kfo2^_QW|sH&ZOCilCsu@? zx}ancZR}BB@@TYwv)s&p?dqo{@cNRY*_fh9!?B#}EdRnX3&iKE^g6ygJ1Tnr%Tr&H z^oWYq88zP)MJD67DWP&wvOa5fAS$~yU8x=Wa#Z+E_zHPQ+Ol-A`Z?`UMvLEHo(0RN znQ!gTTMo;As?*ZWEZvplr)(49G7taME76v^Q_5^%o)x3)&Rg-kNxmYijs@+1LL~rJ z$ZU>91j|>g_88_5c??+A;HdiSiIsUH{@a49;-u!d=u%gb&|dz--(Mp4mGrGZqqP_K z`|dY$jb;-jG?yJV5e*et9R02O?y4h(TrwIG@7f~iyg3+PlCVm&HagBJCj;6s-P`J| zp7{0TM2ot_(A%&J%2iV%;&xr1di{Kt>Ul|TwKWV{rlLoJg{yu}5ZhzotF^=0C9)Fz z{44*JAGEw)G;1!-=*FFGpkVzcme%cJwn4$Oz1?l$)kjqJJa=M5ctovRj4HMO#wRws zyJEhS!nu3Nyf(ymr{XaUomEekL9o0(g2%l~diw2&34l3-qm5kZ@6q(Uy{wI758pXG z=Z9s7$~{eKN7@a2@Bw$5KL&*3fAom%_4iaP7t6X1GFrBDPO~%~sO!TieV&tiPdUkWGbFW z5AC{#KWYFLnv)l_Rn1X%D^MJAzhkUd1w4n`^`6tS_mheHV3PV5%Z_ox8MfLM>$%pC zt{yWG-6R_GdG~H%t`tc8&HpHgAj{`+JN@qmS1-FP4hxoA8u2*e&-Be?+O6%Y+jID8 z@k0uZJA2;jIxBOPMvUPPU6)0ZNSSNpiH-s7v7^qJ6T-rWDbmOs5U{xk#@MFbATySX zgiHPV)Qkl*bd?aAYJwPYlG#`1wZ>-3b(h-S3-&oGgr%hEh&s5~`>&HQDa!B=j$WXDw_6X^c%1D>i#c~;IgmR{3rC`QqY~BW#Rm_ zIxb&DS6@|p5Z7#_xa7$l82P~uC;f>D8qCt*e-_b=7}t?yiYHHqgHhYo>={cPebsu% zVXsO)i1+T)Q^1g$2`or!W?evJ6&Ll&sJPC*S7&)Q5JEYUM5157oBG7=Afu~g;S2{U zP)W|!P`t2A@EP286B2yQfwD1-%&RN*BwBP<)vI*cdGE}x?#y#URuEEcR&3y@oI6=H z|6N>MbmWWKA**KZY0LWTmBm(9SnwcAPI!W&RkzsrVVO2-TF($bwC-)hEdL6o2+p1a z_pE9Q8wWki)Ubr8W7QSn)!X^fXTa>+au4gbJw3R_=dxT9HCy2kc12H5bnz4t8*m}8 zw->vM4Z}wYsd7evoSid=GLRZw=jmLiVB;s2q|8Y2fAe=kU>E6Wi zT|RFw4cT(zo>=*&+=P>e0M~1%83!B_L^^AWKc?RCsLT?VkR7FD9W0tP)F##ua*9su zz2S3POns0*T$JTQpk13jHRHlTdpBX1=AWwcoH}D8PM;>Sn8|k!fw_!c`^K*S!6}l9 zL0MPilcr$$h;E~K0FktPfo}@^hyr@&)M}`Qb+pa^e&LB*5ptRBvP<0X5wG_0s5DK7a9$hH;e`lJYpkH&6smXDO=B~za**xqvlbXC0;B` zqDb8jhAEKWynQxCu8JpaT`;dUvU}&xBZ{_*c+IFF2jEcjFL&mlb2375*a`dfvYY2Y zPmTcZYg}g9?~Or1ou@L7(Wt77s-3t9Sx{Q48%1nXsWea^iNRFj&0)HQ@ELh0kJ!f#E3A85m}E^ z^t@CtL!{UbJ_JTSZAj1>D_RunOSTRR(<_#js2SU=K(iCy2#xU`%7URyMMn@rJ$u27%5i!& zp~0a|bn{o2U8XoiqvJ5W<8&e6Y3JU(RtZN0B$?43(pQLM&2@^=bkLqy_UsYqMBjkb zM*$2f2+sH5%fH{M=*d$Mi7ajWycyNpgViW5hV@TlMWpG?t=*#2aAtC54+ta=9WzaD z;w=_*-7+qeS^1!g;4A@FkX+BSKey>eWq3-uPNy9|vU!aPHPrpx1DGHLL?-~gt|sSG zCMdoL6FBv{H~88Oh*%HmHqr>|KLdQCHHHj1xaX6w{r_o2 zHhEPgy3Q9;Mv#Cd9?xIKiSRn>O&k@qD;=RMXZR_OR;-KZd1POfrpwCPo{GPMsGWnS zyDRTm{Nack-1c4S_MsrhU$0?3zdbPJO_v9vH~$P34z5>F)>8o{<#x#Wm|(u5DP;$| zE>InvJXxj(E9nm3Ho0B(fU>~-TQ$^DukPD3Njlz+5pMo8&Ov#4gASg)t1|~5*tK7j$ZQ(kT0^g==9zrCzl__==;cnH1MEX5hkUMDKK)A zxNRS^#)KGkG9D~JV@i7%2hCR%_cRC>P|jH;_N#xEJ1MltfYOd zh-r0olwsmo#OIdr!O&eA{b@~*(fdAB6&LP9Y^;q3R-TT&9K2(p!xIpjY?_EUyWg_9SZ>s`%dJ+Cuic%n<>J7TIhA@Cn%ZyAa&FSob{vx)w_m<<{!dz+_U z5V;T&9j@(Mj2!c1{*yU4A!}eGs>)nrmLXSuo=({sNUo{NJ{~rVW-c85KzO=LG3v=K zL<+1U|8G+!Zr%~AvxvynaNvx4F8VP#^6Zdr78>naQ*@et8U~(wN@$qPH3>6MFGEaH z!I}r2#TcB~Y_C&I^pk`{hN2EBMIHDBM_zfqc|-`O@6o zHAB&oHJ2Hc`g@<$I#1vdrt?>KR!&e;L_w~b6IUB6obQHdj-Z>59z~oqt_7AJ+tBMt z80*Hk^t02u0X&+7EIc_Qs~j69Jg+}NC;ESkK@>ES=DPDsL^@k{>}ed{mhg8g+hHp^ zGu|pzNq=ba6XqWAuc3$-0*Z?52(gcj$=hRI9c5?YL~%yRkZG8xhj5USm_=-&w_|85P1ii-bjU^pME#mvX>`fHS$>kz(17 z{=%A=VclU)QI|5PYv=2`i^o|ml(IKujfloPv=?73L-^9RM9O6#%t*(HW5og4q!02( zkZ3dqQtB&h(+jxPNe9J?5n?5YFDTa);X3`uU}3m7sBHeP%6v}cprXDWtPF~mENJrN z$Q;kE#!x2ZfYHa@{#>;zGQI#5-~VE3G65Fbn2{>5G;|6fHL+FeZzyYW1Xb!&l@qil zN(6&_*FrFfe)SaFE8;ez%1U>e-zSsQy;l3KblLd3bX0c*9hBQdLtZ75f0)RelkdA&OG(cT@aH z(K#~$wWw87-O5y_=)+$%JaTgG(_Kbhzn>vRk0nT+C#Lbl7V%!A5Vado8{WlEd)~v9r(~)w1TLOEXCW}YB>_=Ek z>Sy)BMaeG6`Tu{J%2$k=E0DnVgb5!LT%Mz|E8TW(`GP3>Y*1O}bLrQxev6+u_e*E8 z%SGHK+dKkxfvoy0qNRV3*fY(%)!36Yd~dAhQKXKl;bGR#|CdIUa;T4*oi!x#d#eSFvXx5@^6t zvbkWtddY>{61g*KsjUlfM;xOat?l|BhQ@KtP||6xJPloW{Su#p9C1$C-(v+ z>jQ?Z5ieE~bTS9k|FdW}IKIF!MtI1$k&{F@+Bp?)_&gRkZ#HHPUD*6d_+P}TSwfz_ z4%w=&y%&$(Zbo8#K2m?Bz~O?3Mb@)@0cK&Ln)v^@;F`SC@H|j$lFnW{!i5(DjY!!D zs%b=|RAyZj64@N0z<4>|{AC1CL_NdNwzVKIekUyQj_=3+!zttFiK*>0D6y0aSNWG~ zt9zF>rS1DZdWNk#POl?rv@=9ln>U@FmzF2vu_1}~nx9vOpPC6me41K^&b*@H{;~gW zFF+Kdfc~3DqdzPg-d=`ib`EmTKw#V)6SxrWGsTs{H|KDxpUuIpOooBoe?+@Z9BaGc zMm)1QJ{+0VccE`xg5d4{81b+R#8^Hvo_b(_T%mL=jy;C^4&^eJZaG2k#mKSbnM?zH z3b5KU=uCLL`uT+azqOnJz7s(E<(k)8)!?Wu2mNtxWtMWYd6GsG+np?@=)X!~q(9lM1hJt^Wmjc6p-;fA{ zGSB?Bv3T5C#`w{%d=!Eq`KC=7&i(6g2{G&eoP_&EvPeL@VJnH8Wf?K7D3{~iIky6o zkk^y6qxvaIukE09K+V1XjYw!SfiwbHV#+Yk-w60fE)_Sf!8Rkc{7!X=@n=jXxA*sU zMP0_~DwY%G9ai*FmxH)Qe5`pioA48`?wG6Db&R_dU5XK5@MThK>vcNzwZ71$CU+H;KGcmqf&$P1LoLb%KFOM#=v(z)Y9oKV*b0oZQ2W+8=M#8tWgKbU^cz%7udni{?iZ4Hv!v-z&|JkeraH*ovQA##dB zoW}L9s=HlR-u|zkWFrTAVbz+LpnkQ_;$`S@h&$l;ok%ZHQBKiRmU|HjfDXi6xd<2@1&{UWe0g@};IMia9>c9Q#)3WSZ2y61~+ zX6yK{oAi?0h~Il3$Pw$_Yb+74*=Xvr_??NgHsIVU?7tYS6nZGoBQ$<`i|$V>*s^^%1-zh?Pz0^1D-AGplD5!_MD)Zm~30FZl~c!y?O7 zbNZ}e*nVGS6T|Yf)PDmIo0r#5Gg*Sxq?;pV2vtkkw=({4|=Jwc1Ci}gHlu|%TAM~v41VzQwPO| z*ty~nIoMdugU9}P^I@ZU*&K>YD!d6@HHWF9hBjU|`ug=^HDXN~;rz*ol}z~yBX;r_ z%r-s20~0Es43|Zv`9y2uyQ8+! zF$Bc?#_yd`tSU8(@w<9yDRBhty8pKf2hSbRKguXjS1Y%cI|w{6xFEAgJ4mg3iK+)z zRo1rE?GwFKC_{$8qq5kQI!*tj2*lDUkIoeKe~@PA31@q#F1~DvI2T;@`wVJy_%SF1 z6c2eArqrTr7-rUu&g5FPWRrm4O2aT9Pqh%I=pXI>VXA(2A>WP(MEWQ7?KqX25Q9%PCSR6qu2md$B@hNS#4tX-YFdDrvLHI%KGrB*0Mn0J}Ib?oqVxt)_#a@`woT3>KF%eMWdwbdJcf=j2; z;wT3m{;v`aMVXIiyttPLPB$G2$$g2D4(-Ktd%XonrFa^sUpM58W_H`jZTGcWf%9wn zq?O__7N$J0Sj(vy&gQ%Dd9x$Bs_WNbDl{Pxf420&9UjYYq#{~DDJLhp+Lqx~IrkSi zCFd)amkNn~f+yrQ%%)fOeJ)5i4VD-^`hNC!JbkibRxuVB(9Uq6+X78Z)e9+JDnS_z z{7VMjiP`*CO6yJY)qz=eilczaf&P<=G&Y6xA(^z+@%PB zN8cI4k%n(C8Ca#yDM(+lCW!>>_{h#zzmMgw<@TwnobpeCQQWvduOV_!-R2I_)J;2S$-`ItS`#eGb5o2R6^T({^g`7GNf z+rdE%Cr^RR`YuHzH3iD8ydT8Q5H<%!_M)KZWWBNQ8}S=ZMX-^;7oRD$a^OL7o&9%O zjmI^6m-A)$`;w!P=<6n+&YTneQIBZCEQ@tR4F8j=!PxRme&#}xu=SC<&1=;T&EK>l z*>b`VHeI3iH~SHkHwkM=mMr0!2P(eA9>4jmz>X0;1O5yE0uVMe@>zW;bz`1-OLM<1 ztG3Mq|FX`f5X1ff?RxrImx6rVW>lS|b9j~wBi=_CF7NN%wTA$RQiy>2UQfb}nJgTM z(mz4`!wZ{$b}u6BYKTX?{^a0z#d3>%jT?%6AS^C2=Fwd>Y!NroszH*yt6nK&0oyZ| zewn%loK*}FI@pNYgW$807DsDICjvHZ{s>`@GpWRmk82D zYO~q-s*Nfxga9mtgeQJ1uRU^b#XF z25f?Gg;0nkb*Kp&?<@*KM1^?hx7J5;Z}oxdWgfX~rfA&@m2eM|*Jt-5jjEk_LYp$I zw@r&CdNup+0TY&mg?db~EP_G$oU3=2zA~>GYVh5DjPaor zfTBfg>W8Dw*e-PBG`;O<3g|{uZ}wgM`;jbYIeZk8WC#q-cJ+Sw@pP)mEoc~70))Cm zUY$WQjXOTs;!Krzo&my?5%SumxMGbIxX%u}9x&Q)*S1!5&4^f6J%so? zn`X`RPZmkXU<^Z>SkEH}lI*iiN^mUdyLj{Yv3dj*b!tcpA}xjpK&#juns-`x zfl?dtOh)Rik&>UZYd6=k5VYA ztqb9b69wyQo!k*>Am;^c^#pX17(740nF#H>`Ryqvu|nvoLvq~+>u%A_MNCR9h$DrA z#S~%$Nr^sA2TyRsC(8O*mCqG5j@3s*aQB+{I2J*p2ZAxY*ZQoNpK$IZf9|ASNc22I zNsfmx&4YT=ls+z-92kg{;Ax@(oXT6v7#jL(=PI$Q6vW@^M|K5(=72zM&YfR$U~9@Q z(20E&YlKVFLx|LM)rpF5-!%#opWG+@{XAKIt38y0a*#}8r_|YZly$Q!T`m&3K_+^u z_uL_l^a0}C(4+A=ySb!csS%5Ataij$6uZa%@e~3}IW6|R{4pe`GRka^zRk#auCy!j zs(^4hi${{5Y)t|;O3)p;6rt&PjJ1W+qi2<*nNapU{Aw|!a8?G$tI=n59i9{cAy&awqmER)8pR_w`?FtDSvBvXooA_>Rj!3kONh8!=C$@I^f{6nV zkz64nA<20`-^eZo`dB1gCq4>{l<+SP`^-1f$t^sF_|%Oy6iiVLZQ0+O-$&+B4;KIi z5>klUYHDf|EeUbQul`JKo}it41uf`Hm8T+Km6pXG-Mib`9(NR6NZTEVzpq)uGu8L^ zPWb+!3BJ(h?7ti*AyWm%fBa9da0`+qLeq>DB!w~3o=)LoI2!u8*Am0;kgT)PXT1el zzmtFm5m;RX;-YwCCgP*dI(cfia+_}_1SOT&&iYpJK_^B|yfW5LLO|q~BLX1Ag5fKO zazZDc%V%1qu@AvQ4Z3>9t7Wai`Z?#2q{^w}_w%gWA1r;)TI~W3gaqETA;EJvrc+a1 z7U}%jTA7N_)|3@TV&FxM_>Je)0Y6?bGhcQ1$toA{zm_Ijyt92SkeOHPvvQTyp$$JEBUA$7*_un2mh$+0Ocpep)`W-j}*G2R^tL{3Tpbmwdg1 z2Y!6=vf`fK35|p7wFo+wOM>*L4FMS zza{`asD{hzZH#V0e|Vgx-?X+$IM55+y`}+vCVs2k-1P{m0N-tuK^%a13Iw+GBIf|z zW2>dd%|R72bBZC=rLmht%&lY4_Wh-xx+`x}kvK8l6S652OX?%>=|ZSX-Zzpv z(CP3Q#tA(vBhD(%c7zKG&aPVtg-{raVeg?Zx>=u8LVH|EBhxWu0X&xNR|1Nhf&`yN zoBDev2JCHe*VD;we3pUM`{Rj+aFMH1a}CGH(MV5kH>TET0<^_Gq()eUjsbBxXQQ!% z8TY|EU1#`ndQ1a$kt`1q8uxp|-epMTCJ<7-lXQaZS`K?UjhSK=nsM{DG(WSyxA=&E za+IdS!1Z1!8k7Rv+Usrm&Bn%hJx}sac})q3UGV)w7_zCG4SH4KA1=AHU8fUV^ajtR zsPz#;C|Z7s%6ehkds*~89xE4MElB5R1jHM=YpEze#9OUZI;~J_UTP;aIP@2r?+Xl_ zp_=&^q0vyaO^yrXz+R%L6)=tim;#9~LYA1k1~NjJ$y-hF>f_`n!6ouhl;aF?$+1 z`ldJMZ;C)&!6C2yB7%+~88oBygGek~d@+5H%ytvV_}G!1j41nkqt?=Loen~Wr2poC z8`TUn%Qb~5bHp@DjK~1P=#NB(crb;&2f>1r=!k%*6y*bEv{@thb&}H2o4=c|X>!{3 zE7!nVo-gc~Mru$>1U-u#rGyJ6JStkmUPuo=$O7*1K-|?5q);2jL)qbbr&9Qg#%08B zUED0M`0D?&Z?E4&4HuG$CXy2Lbe4Di!#e$3o;PrIgmyjG1y@R`!`*2l;1e|7WskSe z7>YQ|wJ_wwI??uk^@o2wA@BP#v=Y9wYK9*>S{SHuGNCP=^VO%Z&;ZRnd7pK59>vJ2 zM?0mu&Br9I4{*~Rrn!0gI=AXw3*CRS%AQB+>Ye(^r}f_ftz&hutLfS0Ib4^Hym)bd ziuUpO!_-{ouAk(l|M%ZE-Szd_PxuGtn%pwyNo*})Q~e3W>G(iH&FXbRdJnH+InvR6 zF6|>{F-u8T@A2+CM$h!_p=h%Hd=Iv7juo%o`U#aEoeHnTLwW~LbsD6z;@F(36y9DQ z+qpdjab{=)L{-m(MOM)#y?HL-l49Iod(;T$-mR;b5wO39{r!xoAYk8Lr32bXS0EVK z{i=CfHW6$8!RS2dm&2;&Bc1a}x~-duJ7#llq1uh|?~b~O*7POM>|sM%>N!n^D6&8g z@F(a-Edzg|TjBMHhnE+xpZlN;{1x!kya;pE&qIxBxkrA}&Tu@`_OlQ^y7!bBKsnTI zloww(x9nYcxs>F)1xH^LjEz~S$BL;W1?-gqvLOOBhPP?|VAuZP*>n$qb#4iT>iej8 zSW`kz0&A9=uEx(2Sy2MXz=BpdWh#yf-=MSI`D7OzPnYVbZ_E^Ci zJ2ugX4sQ*9Y(^)|=3y&h_Qd^i!nd~)S-}o?8UVk`@ti4pGF}Hs){T>c=6m{~DXYZr8_wGzfj&vSQqLiF& zI{rmLYYYHzhrsIau;h)7^V)kxdNyf49}LF)9O7XvZSq6t5aBM0m|A!6z!J z9lio5dlH=SE0<>|Tzoj@%O_8Enh^WRrd-K$3ww-BiX0R`;;nSZiIHuNUhgs^OmuWy_R_)YHObl?v+>I;GbvsVggPx_GoY zu*z?)DA61|c6YA7u+Vefa$&s#30f@gj$?iZ1tOyyeQl8 z^R>riNv?-f=u(&Qf(PG^0P|?vU+S8ksgv;mWSedc*vpR+GR_R=QBx|vg;IDrX!P}y zE^|t?;DVc26Q%v^TROXPmJ(sb9ri5p$i<$X&zw&`7%PaK5i&yJeml7R55`AO=Yn2c z!M=Zn$3}d?dX*AS9p18qVkqkLKuv1mQpvsQx8LTpRR^utSo!p_vm*KR_NL7N7NN%* zA}<)7LY<4XJ9Cw#t)|_a-;dC*#)Rz2pT?j{G|}HyoV#1=_t>}Ds&MLm%={ooH8eHt zbk~ZAh!lXqB2Mf@5#5Y*rpfK;(wYd;-@_oMZ|^?BC2TKf$e(*5QNiK7jK}Odc*Vl8 zM^OcfEnAx#@a(t}!K;bx%dc2vGy=|5ZwAswz7?Mz0~fg0Y)*HAgYA7L3Y9zdUq^@&-m=^CC9#oDiZ4k% zmkE}Fdg09Liw_W3>q^VgiazqP+T-{wTsOf`A^|KtuV;E9&bbE&Ae~gz$RzfNw{Jhf zA-c@N!=tK8jr!ndVXi#ywCGMsPrO*{X?f?5Ffjb6lSw8RlKn1~t#6!JmLbh4vu>iP z>FP{x^xN*o?obZ<1O}^>eFSyV{s=GYYe({QL#RyUa$)y)UKn<CdLqv!GQ%kO|%g|zFy9}DFoy?O!(c^xKJ}4!=$`Q-bdT3RA z@B__EG-Ma)VCq_?{+p>G?9v_`qj)qS=!Px|r*HPET}{xqF#a@pd;Kfsq3vzUW!h0~ zL74elRr3^Dld#q;{QYp3l=wq-edzGG8+X+aaYKT}Wjf=H(Rz!2z8CuMV%a5KZh^Lg zJYJ%3m-u69ZLCOdG&b%9PTilARpLdgee72T11{Yh83r?Yz8 zRrTSLBv9@-J)Jg;! zR?L+`DP*Y#-WMBxpa&tN%&I6oN0Z`P>f&@TS#OJw#Js6}y2V z0`MN$ki*(Rn#J<384ihe<434axoNu0%Fc!E;_mov!m{jU)Jeq2hPetjbX(`9XPQBEdS7gSZz&$M^PFX`^bf=L zRu_~xhY71u#5&}SeNCKG{-_*R(k{efccWeCT!)k=d1o{rnqN-?%Eep28M&=YYISEB zXsf8GP@G<4HeuQ(AgwzgX^Ssb;JCv-gS3bxbB&g#pua|MmsJ*}usnNG?7l$v_-KxQ z{XZ#o-IDx{k;GR@f+LT*=yFSG@ZU@Riel;p_Ii*S}%y0{L}vwS+2`p3bmy}hC&T;%4y zP?Ap0m+@=UD!Y{I`4r-}!rJyEt7&JEL1lN3ZXW9_ng24*^ZX8^uU+}pnqT-)H3ml| zl}7tZ>*V!`ID5(f#ySTqgsn$XVQX^|;yYmqvTx4QmHjt0@V|8Izpu%7W3ei9Q@lIq)orVaNY2%b~LOQVPPi0PjHD}L` z+D*R5Xv$WmlX7*d);Ob;u6m>xtKK;b*b{udJ!~Y4v`1E^9?`$U_z)H9nzrQ zYMcD3ao+fvc$S)#*H%tGzT!M1)aXzf<}^{*h9tG=xGz1QO^*y}egq?QICTIu_DM?d zb(*JJ;R9m#Sy5T`^?#5LTyZ1C4x!yMtXa>~r1;)_qf4$nQ-ZRG57SxXd`qgJ0&Eja zjk;~7t|k>b__=$Q9WBwcI=1-~fpi=0J(!{5I_SFmelLo~d?lI2x+WCTe(s;(*a3aa@A*%%p6hB)jh<5eFRF1Uk z*4?k%u%~7+OHE62S%k3ju9067Tbk;&ETgFyks7965(f!1YL8T6y&Bpo<9^AyYmsd! zXykMWYNoK)B-rCH3WqDrc%DL2OCTGM;cw@yDELwQnT5AMf%rP`qsp8yi<7_dFKTdd zAATp6Wv^MP0)Exy?wdqgvBKB~hWGB^&o}xG-TV5OqM*0+0CN3`D?-3#TTDCfE*L3jRHV#sz!n~v1V{_S(r zS-@vaMj1M3Jv(xC1<1BtCp8X$Zs*efF<)_8&YFAL+q|mtugbK3B4PvQUi`>fTO}Sx z14WSOW>QAnLxyOMli1%F0iwWt1>EF;MDtfBj^oq69%)9mu88I5yIr?M)`~l~2?ufA zZvshKvhhDK3pxkL>0-{qGVF4`8+x45E@Lks@G<=kLZPllG+v2q>(Pw}S2DKp!j$$Z zQI6!TIz2i9wyB|P=GTSSG)#^0xiz~VF~v>vf6&3Hx-B`M>&h6g^!|vP$#GxMp%tphTPqem8Nw#{LB@OORt^hF;mz4Q zB3sGkw-v)v*X9EYcmF*PF;U^e5}|w5Rn}3StJ3ok+&fIvP(Ej61|H!yeCatA8o&tv z9jaFg`Q{L{(>zD~^a|~|gWSAPI23BFImtAIAO8Q#|Ky2MwX&^V>^)rac%Cce1v6 ze}NZL2!3?d-Q|hq6k{ROIb0^w)!6pSAB9g~j41)P2>HyG`@cvZ!roGDI-ZYJR$u<) zl{+Da1~psO{lslMxDh}2F#PQUrB11`Er;T6-F(@E1&Yi5)0b#&aC5-^O!yu#lHYtX zPhJ~>57231_B~mg`&E$Y4nd_GNhhIlATY9?qR);xH;H-cA(l~zUoXZCE#>m=dpgO1 z2)Li_Uy7&q!qNdFmEOGVeeOjhl(GNb{|`xwplQY)YUW5(L}h4u zD?1l>tj-g&u)-?=>-1%y>=)|m0xPTaLU@%qBi%YpFZUui)CA-5wOYZa%Ap+HP&I+3 zPEFfC-(Iqa+r5RaR$pJzJA}fyxt~>TEqo~E@tSzPagrV?mM?gP3CpWZOjUb*@J))y z<6sk)Tj$6(Hnf?UnH%62#BfhVXv%3>S*1aVSw!U?R5J?vw%2T;=ukDGd+5gQy*nB6 zr%g+ZGc<$#295farUGRGhuqmeO*068{W$xo@t>KnvnBthwg=Un-U!B@8|7m#r)D!p%1$Ie$YIDsv2PXRyG1?_WQ9lZ$5-+Tb+nxgzu{oShh zZ(K;*&x_-s*(>7yDcw(tvhM|*G8uoW;8Z8`=+9H&Grfmg8U?4@-ai2v{G19FejQ1n z$6P;>RM$AO#>a{?;73lIUWj=KN#IHMdFNFmaiBWR=1yd*d&k2?-TS!Ew)0}4eXj-t zhJ@)}A`y6h8182HNAKl{sV67o+VI3GmT^8R)P|${+_yaRbT3W=abh<5o;B04eThN@ z8!_(41wLMIAJg6VXw|ou-hU5f!o%mws;a76i@9h5Cc}B83b93;w&=C)$gr*e+a8Y_ z6hBla^~Tn{FYtasgeG83f}TMd-(A{a$lw{zbw%0Qjh+83pEw}QHQ}Ji3fn^*ALtw z23B3q$bSA&iY2G3cVYaP2QW`{Dx+0PtCM`DDRENM4>f*0i2QWeh^`0vin=PiUAr@N z)u41314%1%BNkv+?HqsRTn}av4uMny6<-v{V&!gfe8XS*d;$`z`<$mdtDIs;Q_t;` zDrZibDBkV7Gn z{8s{N{?7ceKNmC;BZqD0`8;1N@O4l#n^B?O)E)Ha#GFDHpm*AH~$B;07x~nG1fwy4G^b;5%n1FR4 zarmg;kalgmX(oe-A_GE@9CoM?mdd51NgB-)Zg?`Mx1Dy+WP2J+p@dBF-vwmruE_>! z>Kk7b&H7T`5in44M6#(es7T>}K7ijgz!!MIG^Byg(N)KdbPWd&Z4tN8mRm1?m8z zDzs_8(JH@R0m%;Q+-WM*ix+#xDGT&(93%Q2S6yCxn3pHjIF3gyzJc{A(0Eyb^d7hsc+ zaBOT)|0&=}%{QJ$7v2zWLxSrLznIHrZ)dZ=z%~ND8_xt|o+_Kk)&^VnV9bF+NoEJo z=zV3CWT|+CU>dXeEBI`H1f{qY48Puw9QS{F4m^0VJ%3q0Cpm~wFq2C$=yte>sAvlC zDX6e^;`N?$>w9YXWQ}W=B0rNsJ6>O^Ao9tuOPy~fpT1XR^i(hR(i?Vt2m~zh{uO2a zpZ4A}D9Wx08yqo%AWBd&D2jrjB*}t^L=j0dWJDzAoCOsGkt7NT64U_+Gvu6=s05KP zAW4FfhAbc%w(sHDZ>zR;>#J{TYkw{ERK1k%%^l9^)7_{0x~}6km&tG1JOy<7*9%)- z&dmLgZr12Nd+NTY`K;&LL~byaL8iGyTt3-0SI+M^B#R-ku%>9>$+ zGcB@dhmJi`8FHB`>do}o_}jsI3gw?Z)%!V`Prc@XPNMd+X~bR*ED^noOwe0i6J+vP z&4==vxaElPXUDU6Uxs|@}<6KYt8 zOX9e!zYsiY2qL|o#~@;faRTNOKDXeJPvDWP8#n72-%Nbd-4D{=mG)ATGHKt~WibSc zaDF)S@Ot<6L~+;ovdP_1M>-yKH{^Xz)$Fhxt$#Lr(rsb6?k@iM2~ltWOVh9IeN~36 zgsQ`E`cyM<9!UN!{e-L^%7tvKCEjaxbeHb4lhMHnlp+mjWkF9SWC)iVQdpyPn(DFA zE3HCygWA8FBKGQjBWnUSUh?nHA@#gx!}eUv<*mI_T8~NLU*syAO-cM-7GY5?oB3w& zOJ&F?0nx>a)bpXE^ky;2&8BdcanB_1N<7wCA!n}f z_PSyX7*QxktV4dsA1V$ZUtv6x)V~$9UaYDB-07~DYZc+S%XF5T{#sLobYn#JOy;9+ zQ4oG;N1*^~{@(2r*00D($g+U&m5$VN$eXnIY4w#~wO5lb(L7{-Mc;Ieh>L1R^HSG+10S9=Oagt1?mblUg0@ieerT zIOXjeAr?Ea*X+ljK=PTvIkcljuYx;KzI?VWmx_fH**n(OxsBxOIzJz&XMOP$a~eWc zw2Y$8?V%F3S9Wh_zLg;UlHJSAR9kYXET`hTv*r!9Abk#E9D-V3nMab6TfK`&;kV?lwspu+rsjb&rk7S;IY(*cfZtEJI>~@B@ z0(T2Q?)K;%l4Rj5T!xrcXQ)VIVcDJ$)1(kiWp_~AQxL5D2yZeV(l}*wrc8`0xqA*i zg1;L?F=W4<*~;22`eG%0P=3Ry!#COj`CjQ_?#M@iWxfi-tkK!;>JYUmY6hqzWJanI zbzY~RKed!ehVr+;ROKS}03~I{{7_HiW6iJrnwv??$$FkruH-abtt5L0`B!=6s? z^YYNA@iG@c4E(DoBt>F5!7G!Q0hkxuv~%E}q{oQw#w_{_aZES=t>_qI`|oIL!c%62 zW|)i$+_Y8f7MfkPtvR{HkQWT+Yn&=VXMb-?7V6%SzKD=_r2pmhWJ3Dcro`x)1s$cY zq%R3?PHwSq3W}^2zi1%dv|ms6pXJ-I#x)Gvt!`#)RrKJ)8ZHWxYyYG0mw{bC5>yBn;TSiapjSZ_F2YTF)u>qlSq#J^-MN_&N6Y)%Lzr7K$k%@S zXdo&dal=v2Dm6z~_%!0ArHa?y5~j4O=hNrXWgeI3JSEwdY+IEpi-BIl)P!dedY;4@ zUH6T^uZ$}bV`UOf309853|m3sIW2=BP$&-FJYjw9_Rs;?5UfuG6Qp|A%X6}^r#+{t z$r;J;HjwKd1;?j)`SN9}zWh`)UfZ#g4+_y8fR{>vR%IqMiHKck5%@OUl=eh%3~URu zl$4qy{GjE;IGqL_Lh}!$DW$6vb4+&9+~ropG#7cE^DakUdqemVHgfz}|y4`JX6dLB}y!0C*TgX;!$|LZx0XKky)m z`aC!Nb)*KewI;c$Gi&W4@H`*pZg1nSKY?e)SF*oe?F%Q2P4G{Crfq6N#ZF+>jEivX zX=AeWGr;viofmp^8PHGh0_DqdIu}tWzARTA`3Pa>mG#N$M&Aa~D@&bB_2hnyDaYR6 zle^#XBw7XVNJrY@1mvSuz8qysSn>GX7$yXt;X2c!5*HWe1;Dd6NlClEf6w&hV4#LM zUY>1NK7DZr7?(90Ra9&@w+r@cot*NcUcW9_>I3^@srv3*2*tds%MyeuHV|}^bIDND zE-h=?Rb}6Z<1yJzLE21*qVuFqJd`Q+fK+`Ix;8|FWYX6g0aX;(r&Q$BL>FNBQ%=;f?`m22_phAv6aPUYv?BJ!g_Oy#_;?y-XR5z1ON-X-b0{oEK z*DwlcuG3vs=WMDuiF$POv3WxdHmTdx&TvlBP7tT^JcO%2^VZmJcY1%1)6>H`n5$p0 zIT2@lJ8$0RV9Y&@?)Z4eZ_SYm&m766%4$1YZ2-I7tu_-Ran8~yn81{rg6fg_fBCE5 zzg0Fee1z1lHW-)GV@Y)u?X7=vTXi(#sE3DZ3dRPC>(Chs0a|In9iiW|okV^5L+TqB zuAr7UqiPB!=2i$+nQ6yu4P0~XdeJ?q+%&%|DQN(=US9$n*K&9W9gkb*J5l&INHC*P zp!POXHks4~qD)+hNDIE_8>78dNIThp6nk`QyCWtN4B@n5@|k* zg?z_)>ch5N3M=i_spmwW`6EzkyE|aDI?anaDOQfOVt(WWUJu{S4ZIGs49(ck z^|iDj7HNr5cl5*D8`kf)O2g0;ysm`wLmc#{X=(~k^{*ASzyHnBdR9`$9d9F8@sj7Z zKb;4j3|43>f8$gig+Ae$)k<1ik{i&%p^|!QaX1W(`=ee@EaHM5>H{8PV9p?M zF_pDIV!wi=@Tf@=_e5FJHOrzKPn&l*-vJho|62@iaz~0=HvN2YxjP5qT@)VW;Ma;a zA_Xf+9`ZKH*Xx(QFU01xvFt2U80BOmF`E69X+RdM;rvXyRv3}ibcTI5sDc`nbLmov zZg%HCZ6w#8JEz_is~PFvXCb9M)OL_0m@)*2mbHmup-@? z{|F%l@No$3OOHf+e~&fW=L8L$?~p;0+H@t{>k-L+^9o%XpIR~tPg)=Btyc+v2wGoY z+~Rvdv{Q`vhL_}TvVEYt)Xuln&~73XC7owNa5 z@w$lg+?Rwc{O(RP0rIH7&!?(u?azOPNbQ(|K83~bx*$cO0;B7sPwDW>NK-nTqGv^| zIIkZknsqVB1P%ba#7ZTAF^wSW5!`w}YOK+ie=BO*i^?9sx@ zlNKK5_bQBqp*+VSgfzc0Uhk`6`y7`2?zg407^2J&eAsx{ zYD8U?rJb)l_r$A1_gKu~sR|0=d|p-2xR2jD>DU5YuiSUI*x+`IOQKlmL&VoYQ~v4C-4V_CdRlgEfNM=ed)Ixi>by|s2@OB4v0~2ohqZx zHZ-(f^_z)GeLRYyOxTK2sK7P9VM_rR3~fETYD) zEltJ-k^>sOA#%?&Ua&h%KvNZLf;~zM;~<#(gz#C=NJWXUh^OfxBjk9+5UGR;>NY2W zSFCt2ei>$uP7Gm0obf-wibhUYO{E*?V6Dx(`yanWAU}`J4C_j6l%<(1Jdc)&Eu2;5 z7}6f%sI1i?S?81AVsd+yzquoLptCexDi1U8xvxLkm@s}&6~In}843AhH_8kzp~0C63IN=;5rmy&lBEh2~LdTEJh&8v|?4EY=!fRH0V!FG5JXJw<XwPaAT`lji?(OKgjUdPSt$BZZRIg_wLG#%kiF99HQt;*Q_w_$u5ixbVI*Z_S4^@OdRUf5uOSir-Fn5b;(U2I zxg*)RmjciU7~z7#K{u?n2ptFlh=-!K8D`G2TB}YwpzOPvv0(8lHJf|l_prPy()vPu zRt;dr?iIV5X1T1|s2V5V5ks-R{k7ANsZunyRVaxcY9Qn`Lfp1tkaa;U549~5S1D&- z>?*XUhdZE*=)W9`!5ZUg{Khf{?y6o@X3DCRNO{?OnvK2uKjWh}&~EHVzldJ*o4^4! z%5E~>;V6o;8k}4NBKY*D+?tLAb2s5cm`F@Hg4v5M}nU0FcOvlV4=08oHU1)0sMO#v>m=Iz)<0%r9!~ z&E-z`S__p`;mmm*qPb)}0)O5{Lg?aABCU{DUMkMdHrLCN1%{D*H*qoR$d#pTv-mWt z!v#l*j2bKD>#sjAg^x*iDur=fftU|af$rSW?({=1eNfUqo12k@q5{dQyWg!~lXkMl z{g0T70Y`D(U-%%AJF0@tZ@>Wo-ew=8!oIuhj?LM8UE3dLEVQSN*k) zsZYoYmx!z5N|~AP_>1_eU>c~Q4sAPX_qGBo@M%W~kBnFi?JLySQF9)cbIbI|VGDgG zj6ks-6g%Y+u;=cNd`Tox6Ec2Hz-8%&S`0cV3Q$OGNpMlwyvngvBFAXcN;AE+Qn6Yzd1E!wMhRF{*qcKjAm|E2E)Y}sP$GDoKhTJ&`kRfbnx+GSLO zRDXoQcvc3*ZuNUH+Anb- zL(Vuu)t7hmm4^rp7F)hf>QlJ|cNd}TBAlo=-m03e=cTppMhrWcs6b=(QqhVyE>_p6 z=@#cj(kL?B_eC zEXX3ixyllzJ&M~+$-VjOTckVqdxn}0tIZoY^J@-T*I#5kQ_v?FT!M}mc~aCEUj zni3@)cQho2#IE6Y;#`1ncW?NY3A*Zx7t!$BRA-3(uk(41&yiM2qaHo4{4qd@WL4=EVwzWZrsEzUwpn=&kg<$Ms{(A zzZP8r;6*hC>^o~}Lyohg*?uV98xme!%AcZVEmpoKV5Blk$ulPoYvJ>^BWW9(7jJ_{ z;4wLtOMQa$)qEeNqdGJuD|B7rc-mv*E17zHUqbW$7L4oHdT+VJT|2c#oyluEoTGI9(jMXl7MlsylfG?xUU4HamdYA2bLdWmzG zUE;#@$Bd*N7WR5FfA-uSqCJH~bnAJ^!Qd8;Lp0_x$XU-}=m*Us$END}f^QNROzUDQ| zc4V*{TT!|Bt2AreSy%4%3ldLX768D8oof6z1Euw>)y6mnog#m^LuOjia$SG2A}l@5 zC~tbNs1&YtcdY(#Y6H?#IO6ZAVGkP^NP0iP3JaT2diptIueE^1277mvY%rE|5MP*O zD@_>Bus;nWjna3~#;Je(+0|35orhCHq2!UVDyXVlW1`1_ss}fVw=){aFc8HuWu7uT zKzhRQkNP(ujlbk1{!PM3=s+e+SrH>!X4MA>)lCU@13+Gswwnq;w0r!{|d zH1o(HXxE79yv&;%`0AIEscc+d$^1QX|A-r@<}4wY)$ykR7=O~fEizIjT&-O<{Km@H za=p>FgzC&{hMoc=53x}i7+E~jf_Ge#(l+m;dJ~<#QEmIXgMmns@kBmaaX61L`gbft z`7l;oYDvppU0b90I_U2{?0~9M&upw=_4SlUu#m-rBr8OTaMg6+vSgIDGyrbN=(_Vm zW56;gW{v|UTsv>{Jt5MxdOl+&-Po~~XC#fZGRRVHIbtKkD&oEJaP|?psns++l+LyN zjUe3{%x#_J^?gl~HwPNX%B~}^9vE<(hoJmsz%8QddPkw%U}WmqoX)dfTi2FcfY+UT+6Y@>Zr82Y#|8;MSkMgsu3snM-21SsLvCvL5%m zAqacoj;_5icIW`&>LEMkrqCA)<7e0k$hc}^fw|e_~c^I zE-M?+96QTo@){+&THXTqpcsCblzipA3)O&3Y%g&zg`lzMO&Z|EcTS>m?mkjeJM&`U ztOnz_{(xLS?h3#ke6OuFs75O6xpLv$^z4;nyT=d%L+#%Gz=?gOqMxT%bi%tbNn@KA&HQGS5JBXwk)25+43#!y7c*?DEQD)v9)}aSk$FTVoH7aJ74PjJ^4#_+FT<`CYiqru?tzVQkSg_~ z1D~&L^bmVIlequH9=S2%#4N&6X##)bfJO!&Y0~O9fn!K#pd&rHI+6L~wf zs>1KRPYuw3iG(z5jm|-gG;EPq$uBU3SrM%a)*^Ldh00gAZY?A)K$F=Y(CDn|t-V); zLUm)u)tN;MDy`uuky+obJ`l`SQAjqJAO>99v8`@OBtLNIS7NePGffn$HBqjDmYtX6 zve@MXFHcWRPawq+*ukPGq#ddX`g+BU^Z+KEslcyy^ohX!5HWXEC#d20<#uAd{!Z7D zQA;1?hX#8FkVwuN=)71Y7ttUY#aF(@vy}FKEC@F(Ymr#~rOP}o=QdXgWS)p2IDFdw zA}sHvxpZjwK(Quq1XQ)i9_`*Ux`tm~RWOksU#2lUVEzp8U?@>LZ4-=9s&bB>qW@v` zv;2~{Bnku|18z;<>!A5d_fv3@lfDukP!*Ucbta~@-%?XZk1^%d_BOgqr^aM67gLd$ zle{776rUi00Q=sJqQ?+&{h5o*lA#S$z4=K*Ea%cFT_g+X)u>MqD41YJXi-mY1W8li zVRk-bDmRc0;%2JDH_At6eyPAigWm=*mWz4lGZw`2UIErd*)^Z5gSM;c%9wAwLl|i{ zQu-)euld#91vYRIS$~Tb&H{bqbPE&`ePh;PqkyGPEgL+NPqT0dZYQf*Ln{yYK%SP1 z@d1S^?$=f{*<)7}Xn2dB%r=6=0Sa|Wi#tbcK`a~r^VuFjZdJEpTm5UJpZLuzoBTqa zEA>}9(i8-jZq0})xuuS_AoH{r9+)-1*{$&|Urv%623GV5X<$!b0Hye3Gu%*na%Sc> zJTurHi`nRRQrI+i#sCKPf4@$x19rTCsN-m);Ehg(R&-p@Fn=A0hILwQuS9 zs{D!)DwXu(g{7E43J?D2PzyUE>>}g{WfXVhz1W&+(N*+bR+#To%}N0n>3gL8r?e)V z7?du@Vfk4dI5_nYP%wkMma%2Tjs4vIJZb??XH+ix=1)5z`!qHvk&K+Qy;s7xv@`NA zKJa*szXEB`CEabH_U%&Y+it%X*Q{X|$X>i^_$R}|&QkgyZ^jda(V-*Y9@3$K2W?sU z+5GstL|7xU@#PJ@-Y4vio(m@pCf^4Qq<6^xq?K>L;*mZfMqga+{UPn&cxd(m2XSh( z{dqawcoup_78{1sb8YZ158JUgA6xNScxA@@R~F#C@_TB`IrPXUPx7q*xT5Kk3+@$< z&yZ2O8s(9agQpO>6BIQ0D_`MUkEZzf-GN2nwqiJyA#0c6M;_ZDEx7m{iI;a3uf7t} zGjoVfy9=0b=@pf}G3#bD4zh{ph%a-}nxq{t`@{(q*P^-Ll}<6K{#4YlQta?ayM_DT zr~W#_@L22{aBGG^{nTcr98|c(oSwqOb_jq6or7=x_}{l%-oK66;!n-9o&sxQVd~0a z)tLro$*g4#U#_+P<%+=2TE$OWbSoJhB}6;0eq5GBMcAy{`;Cmf3xn!g=5puTm8|u&d~dR6@ksqY6cRRTutC< zH2cHY#hO0(RjBi8G-5uX^61U}R~(XUa38b%`+S0}f{ULdlAK!47=!uz!RJuvhHXA} z@J7;DY5cxrw9Y_f^$X|psHq|Ni^$LTB4W|v)|WZTQvD+jk1r^IMb}r7Darxs3-)A$ z*rPB2kpc)R)b}0X?Xtl*Wry$K`qQLnQ6D&9Eeb6gMp<}m=Ezed^SC!2#*0(%I9$*8 z-!+3ASa2$v3v#$D!J^%B7xgpn?5l4dBG%G13@#QoEC-B!I)0)O|g4n zMTafFlloZMN2dh{PUe#a@F2%0sF^FSQ$KD2I!QxGDFpgo832<=aA^f82Ivb2X8?3O zQw{qZNbbwOA0m_ztNy|atKRG^F;`-`-`?((dKjC>K_;QVnOt4E2brPq(4mpmo$IAl z&Ysw8FrJ5u2YPf61SlZxQsL%PKnLyuaFS0a-+8&vNKSywDgnZw=kIun(9+&O#quI| zbH(28WbX-1>JiOhHTu3ghYt18hzP7LL@01mew3D(NT@XGCA>@!b^h+d#pgch<1{=R zC!!Yh2BasFxOIkmmh9sf z1jrd0dP1=ij#ONhNREm51m}9l!x=D|Zf?b&?#6utw8)=6djQTOcnkm~JNEx(5p$VJ z>1}FgxXjBt&)g0s!k%B4J)3SoXS7$)sz zSDdq#+u(4T6_D+lJpMjEKi>twmzU3xEbq5d3gyz<^cySi5`C^X8ecuCFR z-D%!gr|*m=^bU`!_EZ!O(x2&l*A||xVVaZM@Y_gET3`iQ^` z*ecipB_(wuV3y4D*Fq006X>+a&;uuEw;)IS)B{s9vrLfwD;sFt-LWU&s7qhSPnd zp{w`RR|242fOWgke(f)Ce_j53J*C+BN#)%N!WsdVO3a%A=3J1QBL2+#ZCQs)4vC1s z$jHdd?`huzR~M3BbHlA@`u$3?ZcVQ80qO(0#hty`SEj#B*cr!?jhvtsPJV{qy#ORF z=CzW}+@uD8I|T5=VA2f;DapTI&szeV@9S+f3jjh>Ey{-!UQ1*x2JSHx_Dp>iOtAv^u?AV8|34w>-k0e?QKKAovEIIh< z`GtT}f9g-y0`Ck5l2DU7>vw4mQ!Ab=@c(F9x0`U`)?ROYhh(hWFFZL+-iVGhVfrAn zkhFa}xMY9&Qk^Zhe^y%tap%DpfewrNw(@%=oxM*?wteZP4POiEwe2<%Gt8g>Mh);L zBqgAGF6`n0?SpFW(yCk^>*&0?UdrWKrFW-$vatcSpEn%VO9^cBo<#Iex-~i5vnLAm z_DCn-qfB&^+-+VrL96H-Nu4j`&)t}dHwV&^9xuCT+kW!aEpD<}=D;xtNO(TZwL1K< zBUsgU?psun4ZWvw=z%}~MgFsBsmZ@?f0}EP1L^){eX?63_@cE5jX^?V8b()n7_Ux& zpmH@wnEh|7@4{$53h69q!7buZoEpxDvZ7Uu5{B#4OQazP2!=O2aLlWu{CTP6)gWbN ztuU!QHni|;31dM|Z|Q1vwd6S=ZgWA{COj5mxzMJj^} zj4o02_hKR?eL)N1eOGFoR`$^eo}Qu4Trd>tAFvT8dzXJc45Iy~AA z;_7`C`PG(PpWuH0#9^B&WUNei8;=H_?$bUF>BsSf-;fXJ66-8w33ieF0mB!}eLN3J zc@?w%jDBYfP9_M~Tmt{GXmk`Q2HIk_;gpG8z zTNvi=>#Cz>2u&#;xDB<{;utRrsNJkh$k{h$x zWe$hpdxCbVmoMy0tKa-f*71}9sbB{y}p&d+F283kh9@k}wwu9cjdvz4(j z=)0V|NDTPf5VlKbBs2}{NaiLy*8UtP=5Zy>(K~_``<<{}N+C;Q_5ez=>{fuw3vSHL zPWo;6OSrmUQ*`5?ZL#_4FEfxC*oR8HbFq$YAMi&hT6qwzQF5Ybk0#GF{P^w3n!>; z_dNz7ly6fn9!tgMQSP0WbEg7NzmU&ubeB=*($eG+>zIH~>3m&L=d<6M-JB4YVa95aw}a=PP_ z-AFBtrkP&At0D`R3QcGuC8;YTF2DLLgzSR|T}a56<=G!UB6QpAj$(r9)KgEtS{^Od zIV^wvo%Yc3iySmvSb6H=n)yf#i-ehL??crp^)Rsx0jgl#ZKMGGUO^#P=oFn{q60=! zUQ#`D)7-1y=?A@=sPuau-_OKNl~wicfzig3oB`^9S!l;k7t9O3P9bMWmgQr4 z_Y1Rzp|8s$DU{%&F9aROzZY>$*WBO8z~5x%1yI8W9xF`|!MgXCS@99Ns+4_p3K#@b z?ib>BzF3W2G@`k&xoMN5d5+UH%g)8mS3VJU`1kw=6gfSq_CkH2vsv5=pFv1`>9lEV zU~pdSjK=Iut7vMThO$Ga?+jt2v{imiS=U}P!u?aaa9h2E7CTDmEQ32OXnizTf&wa%VVYdylRQ3$HU)e%7esmlF^#<7PQyFd1Z2Y&PsCgS+-mafwfT zjVO0g*rhuoUd>U4=lxewPK!Dh0IXHrGAj{Ly!C{a?$4pHkT@kBW5g4!ZpQ*{;d*>Y zUFKY9u9-VkhE3jADXDD<>#XXZTq-~RSdYmWSwD_cYET%`2;D8o!-i{xg`Cob(2LZV zsMSuJgk3D8$msJn9;*7;D{%E(1zXRjgJ|DLFWstkB9sx15N>Sx9D0joL`$Oi7kO%bAyZ^oT-8v_^J)4OscM4}1 zMrY$k|LVN_Ugo@;^ya4Fx=LhKf953mp~2KNb;IDcmAE8+#nQNK?zH-N)FH-Kt-84O zHEws$k@6k&hxl%RhN-+y;r&`1g=7w1q`j5&q4K$N@^QnuL;0UIo;k_YD267S+yivM zI=>NwBf|NHDZ)zSPB!m7{^nUm^>+-S96gmdnM`^%(aGS1A6DaQdpfM8yZ&!zV`+3H z9X;<^&?#FS9>M(g?jW=P)hL38lD6+;KvBB3la|r_DN}n#j|!hkYqrNf1=y9 zm)my5xHq!@?nxYBOOyj0cF9_w%@$vzn>X3?!|HWT&PfH+$0xFqtme~IOl5db=0T+G zOz{2VQvd48x2tk@bHDadTzxill4n`Z2ndXiLIoGNF;v>WpK)RnIl7=ZX*mWkBB}HEp02u0_t;3r7@qCrP;_hD3{+!l5AaauAUi~jovzZ8fuvJj`MQQz2Ml~np%&*kyBJ?$1Q`e?* zs;g0<69(hlMVh+-%;O;pZ57-p;cOm=(Z(ej2&qv0%KxTNw3SY%smY5kYwN|^vpsknLa#wZE+#|k2vHasXj-I|_TWDoBy|zI!A?FH0{tz17X=O^>@I&t!eysThteCX169Z$%JDIS2P_O9Lw9)D>g5BFd6ZIaKi|d|zoCN) z=D#2#N7EnJ4ePOH>uXmjj{$?m_0f91_H(+Aolz({F-6&1n#3ekAIY7-oEct0uZ&c0 zL$U%IrGU^+ZVm+Va3I=Cs@QP3Ds$BL=MGQYFlt}#Au4ERYRV8b2^Jm)<>xZbMqieD zaG2eRR4hTMrQfr}p3yBY=d07eDWOZczwPBdoZ-MBJ~*$*%7sF{#OM#L_I)?81{t~6 z0~O_VNolyfxoIQY)L%j&=wR2WQmw}Dy(^U4oBp)F6)>9Dy>dTb?`g3Rc(5`X8z||= zq--4Zv71r|+#}`APmR}qj^*d2#I(_@8%wju68(x)$W~|E6%n{TA1!H zOERocCu001OUd7XA6^^ZW;U~H*E#1g7;E(gfI=<6&@Imw%_$f~y=UK#pK7yEDvgi)kZ$A-%1$A@H8fV3Dd1kf0I?T{+n%M7|)XB41KKF1fbl7C{ zo>A}jWtYL&-Jw3Mcidkn1DQ=$w&}9Rn2kyv|G7L+!07cYF4@@_APZ z)kOu>8_wP58`O$C$aqk9PLkH<{X_cBAF!>7Xm`LoarUXzIo0qU%IhH}1Kw~8XsN;8Gf-)^2Q8hGL_w1 ztXV+azrR07=f;*=Sg!sW$1Nn}qV-wS$F8l^_4~CFOX89)+81wjrH1bcx2fGd6nF_- zUw^k6A7-X@zkkF zMimPt^}5T(l$$@g0*a-EPtFu9WNo${x9(N5gm;qEI4_{U*JvRdj}yT#Ru5ugxOR&2 zXx_?AGX2w&%M$KSd)NHT<%y7o=y-g?BvK&FR)k?F-IZ^h^LW$N=;YWvEA zk5eo2AM7}VdJ4!x$V4tZ^ddO8OkO~{`;E=~C6}`tcQ(VYQ0h_YOif3-!s5H(m05-x zQ*#Nb@V@4()fzpo|A%Lw#WLja&D&SnOT7GW1e4?~-StGREZOM|9{r&eCK|0_bJV3{ z|BcB?#KmxWUvt+sW>uc-8=E(ki2_`B*R51cm%(ZdU7Yd}zi%wDe_HFy=dcOXGi5x? zqKBCu!V84yP0lPqo0TmRN!{&WKGp9&VPisl+;`>o4>gpFy3^WR7sc}mNM`k!Ykep! zER|P?jR~8-Ki>0FzoAU7rfucToKBeKIQ>_aUFe5<0WaIUXTAU3Rf9yUmHy~(-ZVaO zR01CvaX{Lbv=KptOtH1cjl)F;)SuKHQ zHSTxD8Z$_tm2 z+bpMR+;~RU(IhjMO;wF8S1E1D>Sm-U!F--Fb8hNntVp>(@|lMF?*4NGvbkIMt`6Yq z{`SR{-p24GgWuNhd4I-W={dM;-3f30@fI4kJfw+2t-omD5s4pYNUu9;?7IC~BeAm+ zw2p?PP25nI1W8thS~Oye7SBJX06fFtp0}!#mVNS-Ki?2q=QBVE# zOp&32J?Rh9n8R<>7W=n=eT?A#^Pua_NODo||B~%D_TaZCBzQ0U;xA2V55aHJ@AsYl z|JvoP=zC zE>^N`eSUIK>F(Wb5RZ(GjC3%JphTI6@KH!}8n==m^|SlhZ=XQ(=Ku7Hf#emaZ7<+J zM05IXj01oCClrdKIJ5N2)R&f)p!$Y#xOlbzC40INdLjXQ6o(O(2)EkD`pe1Kqv%7P zz@c{qrU<Tzn&Y|WzJjbXs5V>y?UPI>lm;3g)oU1&YU3lwjt73u(W?x? z_8)%!FUMFnJ(oiIQD2FDGVlUe+ma;pKy*fYx@>mV2GM{}Qc#GT^kf9dV-Bt3DA9Yu zX12d7brQwh(V!>BjmD>gv}d|Q$RU3=D7_Ma?yP}E$3#U%t-xt6e{MFp()+&`9eUf~ zN>5HEkQ9)I1PXU-2B<@DI>h^QJ|xgvHlBrB8G!OiG$)9C+miID z;I#YcnVgy)7+=uEbI(8&I;8xn5en+uuPQyDtXymDLfw~5n0p)j>eUCRdLwF=AunHQ zfJ_GYvLK4~R2owOLRFS$^^_HNpYI06(NAz>x^v>;_xBU$_9t6q&{2u&^fC>;sLV)e zZ+qGEpWpUU(*?zx>P@RBJ%>?gPQ@l@;15$2UkBHa%v9jDHlGD@+Q}>WPy;XfPIf>2 zyIU(VVPOo~U$Gz!hx7nyM{`pW4uA@n?Om;-)C!?GsB_0n==~LH-v+XCe3oJT_U)V1 zkM~bN8H7?1s4lNz2IB`rXi-mLMg5NjO3m50>ymgl%RPKt~{5OZ|OjX9ozBp~w^(hS2hFj-W+NV!eaCjaf#gd86%3NRfKuC1W z&CQ+7(L98YcPOH-=%|A#8q6Jmp;yXx)1}CM*bt@&e9HZH_H@0xR-Fz_RHD(|W-Ov|@XoGWv1HKc<;A;i-{bLD zR_6xR-i&rk+w>PI!uWQmNuPxWNe*#X#%p z(X|%f=f`taxJ-Ax{^twI=2?7=*@jhO?gKz=w2k98wSj}yoJ+u^{si%`zS-H?L0f3R z6-Uj2@N;Cv9B>9pqGrJ|RQ0f&TT|GGq`#u=%d_wWwh#$zY98-6l>no#M=2kXQlBpd zSE-{|44#pzN*@<$TM^l3moe>U+f@aqoJYU9C3;&$QA@O=MX9VPHzx{ zY*)$$@?9EUn3-w-&a?OlhPef5HVOYKfim&v0TLIb5Bx*|Ug>Wtc;8DGFUs-p@p*v; zzMrGc07&;}me?5}LY3}%K0cK|H~j=BIVBX@gUll>6Vtn`Qs`wnG0VegM531OI6=T- zE|dt}w~EcRg&d%uBaAz^C7iZ)({u-1NsDu3vYmL|j;7EGk-)QN-Rp`F@2(nFij+JD z?t4OE1Sd={SNql#!%FSBff5WXX*%*i&5yFjp`9CS3RD)^7x#_Nz^qV*!O>vv5%MC! zo&=RWTtL7fME4z03Q*lD2N9!;hYcZ~#Te*0w>y=BohGk{+_-@T%^Io8-C|3wXmn3b z_RyLy!f1sCb}wi)e~cHhM%eRSa6A&I1oTL3?G=X8G>L23?it*^dp8u~?!Eq)gakaC zB!iA}+EclA@1|ukIUP6fPr-vDIzI^a6SRVEffq&F*x(T@YXLEPc;9_M^~Mz*9%V!; z1D2#997H)dSM%wl7TC<+mGY<*NG3Ax#_@0HPPX@f`@OldqEQ6`_Q)Y?^Z!6wWMalK zoJ}elzAcgVA&jSTG`Ch%%Ans~h2<8gc)54}22lt^9tf6zlbr)SRnW8_u%Bp+k#V%l`Gl<#?m=&U1 z4KR{F%C$YHx6?R(KNaS=hi4LXh0qn#v+bxiM@t_qzhi!9uW_N z?7g+u3!K3yUyrWd&pSw9gdeFOQg07_`~PSQLVD`|dpnx{|8~d-{7-g+g?lFt-3>e% T5cGSf5BfBf*q*xufni-F<)d`@p=Ys8Rs#x(~6^9Dxrvw*yF^1-5uyZmL- z)9Ey4B}IZLQ=fhUg&ze3F~nR4qd@)@P8kyXeVvv`v20W9T)n)ETd}J~*<4YTYU#9C zaWHa@X-u$Kv97?Eh%aRLuSDKZkm;vUR3_VbdL>i55)t%?8hFkxMdC8xXgFlV(U*Mf z-m-ko8?1wmAo!BZ@6}uV32VGJey;>y+7$lsriA_jLpRU7)zsKndpuWyg@whh0ULZz z(7h+Q0=6hU-TVID9SI3(Zb3^<4l6hKvr_L>ka>m2+cfj6-gom<75=t&&@YlF=K420 zA4Qzy`z28=wGm~3J*$8BxmwyX?O44P6FT*d31%5|Ix@il!lP%rKM3tVvl!%qZ9vi}zJLbTO1P`6yyrv9a3?0-)QUI<{8acSG+Ws3gW zN2h3n_ato5JUQLJ^>Mua_o>@R;hzqT0O2l ziX#6TDH4qh|MkK5ahzQ4H;Wne^#iQa>Oz*5^x|>!U7NP22bIH2MnkU`ES+|cO#`1p zB$crY=%FXPrqA0Xw9TG$5z%tw6)IQ9y*#$dgoubZkj9^8&>vgwe5#x3y7ykw?XW6h zdJF?mIi;*u9eOOCqG~MPHro$I!oP|+*;~|gh2Swe9jPOr68iP5>m1CKla=O>gWc*l zxSbr&k{mIC^Z3E{9;}TM<Mz@!Ze8yddD)3C=-tFfeHW{#S-YT@13kHLj*MNVLP)RbnxrTpIw z=Rs5|1NKW>NRkE-KFP~t-(4RcpI(X~D^jU$wMTwG0%Lz(mX(zi)YK$NVz=7rCbBxX zyc7qYLxKvtIcJUK3Rs0enrD&6uekBxGdH^`Ff6T=vvhRFe94 zSE>B%!->>#tp6m(%{3=vgb{_l;ed(nW(DqRC}G!9lUrYZzur=#OJ`RXam{@&4lO>X z?S{=YgX-@eW~EKB{>XS0x?M;m&3{d&O0|}=s?-)pe$)JqLe*;?zJ-*OAf#7U$7W|! zWMxHpa4;?!)#bwIy#Hf^5m>9VKy0~AaP!S1?NXiPyddb9Ij|`?r)-siV9HlUr|xx ztMyj3s|f1!c;{P#dXM+F@(KzLMOUBXKd4TUp()9y=QHUS=Q%jt{aB)$r#kCn3pUUD zb5&4Kfarv%qa-IMWME*hf2cQXGeZkYi;r^F3Wt6IE{M_K4>p_mOzT3O-E6hRybZj^ z#i+2pfdR(L=e6fU+;w|}MaosCINAY)Sp)$#?GF5>omfAK*h>aI7CA@t^tNEo_6g}j zzO$A)Nxr((m{y}&!=@7cA0sB|f4FX2*RIhndbiF0)OT33EUBSETv!NIn4^Z{=5?m- zpj!13$r^)FmOk^P#)Qa7%-nAY&szNuv3h%Zwfke~dgzj3V&J_84cEb1^$+K&U@nfA z(;n`wu}~0TnbmRd@M`Fn%U=Q8MKyMb-O1fz}u^Vq=uuJUy5srii$)fCEsy#Cw+%|@$3~b@nHN%y_>Bh zE5YVR$A*_liHY$XHgCZq7`IcLmTEMEde&EW^U@64eBkp7fmh+V-H5u_pGNndtu(S5 z=Dl8VY)Ihu;!Dg04hQZ9+Q#j6nw3XNUYchj5K7yphifcS(&bfo^o;DeEEzqlYsnpxQr;iIUO7n>2wA=cUjC+gxo$@qPNlf zC&@cue;tbQ)gE+vwb+ju`)vm6E5;2lF)`!8vcYZXovwB9Tu#U}Ud@>mhfhyU*|x#s zS??FukgzLqV{^UU-~-aH2vKhPNx&@FJl5R3CmDkaHI|u4y2J4j1HI!ykM*%PCB)Qa zz&vgZrSzu@_?|fP6nBK}?d`>L+P%ZmbS7)L-((QdB(=vZ1H#fCNhwbK{=Lo=L?t^n zmxR6D6R0+?tbBHPE$_|sF_IGj)ZnD)+EAvH(UwweZf*iy>TsCW=o!lqhs`f7HQE|T zNOoFsSpe_xRv)3kO@(`2ki#sw?$d!+@j8hTHoU~RE2Kz0~?RR)&T$qy?z3|gnqqNma*W3B!M}*cdcp9C=P74@gmyY;lz${>m;anQv z*1Y)rM%99H$}ieFjvQ*c=~XKYQKz-Mc-fLry}6HS$v1Qgd5IAq2L}fvS}s~T5-fS9 zBQ*V#+K6CIk5Ayr#`C0l4js=&gwQYl{^d^TTRgr#KkH|xbUak0p{2EHSp1g46%)eH zT-!0ekR1F$Sc;9y`g45IKjR;y|G&1^iDT5*#f9_s_V$B%ZN&AmR~pd2C9fxslKMTG z?r>5mms=;Jxo5022$Q>!QGCeIdlKhDQD$FuXAVRsHC(KBIGC*ryxgCz(Nt7awC$&> zxiC!8G5S?N_|IK@0ees=QalCAwE4Shv0WO^VuA^#;kRmea16)86NNC%=0jDgKEK0z za%AL_tn8hg5(OpY=*Y;3-j^?LL;e>i>_no$DSCSHh4Ws`*w|^9BzEhC^M3X8^!zsp z5YIohC>rQa@4;Ap9HRTvrA7nsAAyVoV32<;Gah~G&ZpHoJ-#aA2c3O8*$xe8m;TX0 zU3N~6jG|&UaA}e%Dg?((*P6W{LHb+j%CHypg=IrxP^j@Eirw|X7??<4goLIYa6c%w zj!W?mu62b7)u*DVl@0Nnf14^+69&uck7L-LZh2${KdJcn)4ZPUmfertmXS@!ehtm> zDEPh>r3M3jZvKCukA&RZ+$mAv2T%Uc(9nF-K%1THs(GKy^IPY&AS#aQ1v`~GTZ3Mu zIXo3rDk3^x*p3eMP;hJ*w`lB&wzcIn8>LCuQ6jS6wyE5kfn)C-95mP+%c)hTPxZJ` z0n+U|e`t8T)HIO79jDRYX!CfxZFg8T9njeA2&V|VxYMEo9@e{WFK?0VXjtdI@BN7S zPbEW^KBx~?STF?MJK&`{BJvYDnB?_SP*6a9Tr8bhH#h;8y@EVvFcwU2U!M+eOB2)6 zcEAmm*=~x_*KI^bOwFv~Lx8_7^SE=&l}sTV!?`lBrN{t_A9cV(;|zbFfCUs@^ka~Q zswFTT&=m^!BCMV|<8wQjC-vy*>48ziolm>)+!(aM-Rr3XFALP1$!zL_uCA_N{aSM` zPo(rSsPP}g5&d|C<+yp7{eOc%?_q$ke@9D;oHQ7)d~CG&C*~(1E6v9Kk+uBv#qRiQ zgA)T3`;?cL13Z8M?CW3PvII51Wmi(^(ckkt7WLiFViBzaO@2LMj6@LEk z?eF2qiL>|JQA2nr5&M-PFNQ>@O=f4O*gwom#x=S6=333FDJD1=>78V< zh^;LX01_4mji$I}HqXFVAj|>wVmYbm0ueU>8S0f@QH|)ds#cq; zFvN~J0UT*U6Y-zPw1iIw9>$d}3W+FbHyL0&ElmlUz zVyWgJ@RySI_AJ2T8|_aO!>9we+<3DV!YeDCU0KhI4@>ImA@RSkwthW~JD$hIHoVC* z5Y`OffPqxrq=N%<60iGHHaa$61k|;dH(b8epM6)S>=41eeaVNkc5Zm4^Skid*heQo}h)2LQST>%1C2`rnV3Ak{7jEWZ-uVOL{>Z5XI$pwM ze&5%_**cw)um8q;oP&9s8$oBN=us>0ndi~~YFR?Y&K{Q|9=B#&=X^D5l)!9+2Ew-m z=k?IW+pQ#QT-<=hYFu!ZR?W8o01wpM#gFf1=WaF3e#zz1mB}PFRv7bvhMe|WEA?W* z%cu_<3lGB*ly13s|6d+>0a)bwZLdeFhrgG|Qmta*;#nXhtsLgNtL>?_ToiM3bPUJA z*@ZAE_yYa05!^MZ6(uAfJ)5rTal+S{Q*LzrXtzCNFp@3+gd7TtdPuzn3KfcKw^QAW zO-(Zk3xk1uJ(*O}^8ek9M%#E$(O(NB3c%}iNafJ8fxa&-_h;g<^cr)y{U;6GZ=*ty zpn{>6ZW9@tLvA~|z#;^=(F*9RWi)uD;FT6^};amjfd6^i_q+>Uu^XlSe( z7J*~%C15tZFbu`$iHr|cR0+_*w~!>IfLi+AivQHq)x3ILZET2ZA3|c%0Bm{>fY2bX0{QXq+I{`B;6m5gp?Xx4Un zlFAa(Ep?X`Aq1R2jsD_oPR zF%)g#H-G&Z@Yg!-Ep&-5$8+I}^7D6U6${vxoP8G-)S&?tjI5ko-EM9YR7~88dqwmu zQjuwuRe~1@@U~2$k1f@J9EEk<$bS9b)cNO-Pj9?CR*nBDrhot}|HH1-Jdepr>-Xpw z2e13ly~!e#3Vp=-5-bp_Y{sh9f>=eB)XLO!EmE9b=kDr|<2BmY*x1cF>63`0q~tym zCnx7;VPPy{V$@a;iwuFCFC3~i7iUos5oK^N2;bZ`$6z5jqOjB>g)pzGo6+_Y6i`UmCJp8kqbMTJ!Ir^t;-yY{xeFQ zPLhg>xZI8h=^%fzkmDyWA%RLi9W*==nVkF$Ag#Q9AOK2Q9;`KsjzlmfCMFAR$4x*- z4F?k^w?s+ZPheeKTo#Lo7}dI9OwG(h#l+sy&=g*l@9Y>G4yP`<-d&7K0lTp6C27jb zI|zJ@Lar&;`)NvaOpHT&#OSoVq9TwEp)*yBXH%EcTEk$jF5c?}p*OGI(9(v0hzbh} zYrOuOg99swvUqBiJ>6;)v#F7QYoh^M?%1`LRZ!^q6GK}*e@M)?ecD61?So9uarD^F z(DGViIsw2ybeH^db&5RTl7Ftd&67Wt*=*_}j{P`2nwB@8h zT@(J27bM4|s%7+}4zXsN2d_BU;@<vD~Qti zz*j)PJ83h#{M!TzU}N)z+Fu~=RkcL=$6mE+4e@^2-G6&3Pw>nYcwu3`RP(f6s(I>) zK9JeVSl~Mx+8_aEv${f8G${r``I>f|)~&%LuvMKu(eGit#WQ{fwxHU4)^?$y<>Al@ zKs(B(n;!l-Kic#v&N0JEoAprk3dh4a%SjBesja@R^L_bH18{?8HvXm7cJq%VuM0RT z4b5t=r^kckmb4UZr^uwF{t}IbnI<=Op+Gc?%7v_OzCUN1{VpJ46crIMK98T?r+oGD z_fm`dtu3(Q^TmlmeUUqD#-^>t5Mi5Mc~s~Oj8%xr$haSjQ)2Ev7c&KCrkkk7DQ%oO zJOACyg94wzLS1|WDZX0@U@YLYK{WG0rNS@CvOWmJzDyQ#EHIgwnKwYNV5w>Dgy#{d_j$^_|jvHg*44dF( zaa|s3BcB4_0ERvTTI%;Z*D7mJT{*$G{U7ord;2BH+hL^@R@3v=K+|=X5=b>P^sc=> z>xTxhqEb@mN}(9yp$-`{ng46dMhznxK>Q675u9nBf8TRE9qA7xbMl-IFhes6yW@H1 zUQgFv#plXFTXkia|K;!me@`CC_`&$TK;MGmdZ3XjK<)Kr%5>BLYA)WUzu+~ ze6Q#rDZKgnTp)-bVQ9L*^tzj0wysl8cG*g3Qcz3Nh9>K1U;c|m{eHYTtky0DAT90T zz$iN}j~O89XWwC*J#)tMl=l{y0{^fDYljqs_IrK=up#zI&dsMe=yXubzl}eD+z3JC_ ze7m}buUPYw z@}Xli{T=E-|1JZZo5RhC4igj8+=mfcH{dp#loUwELDDLj>0f*>G9=>{ocX&Uy-RoZ zRQE3pD?P>sw%;`W0h`au|D`&JG<1ZPUi$&FV|USb3n>N@284{G<; zdp70uFzw}E6vj9FKWHm6;W^}0c|Gq9`oaHh9)`-J@bzeH*Ws_Mm;PXVup+HU1mJLw ziUqD&1O6*u`2BmVRu1+bO8KFgwfnVJ{1q|B5w)O=?FZpOe*^JhM5~WL(*Ax}WX!*4 zYeV3e(UzWbcPYaVBl<~x%`PbPYG_#YW>h-nyb{~o^iGb>KS>AQ*;jh$ zJGGTxJ00NAKgLcya>AGJz~8Kg^AAzRsSBE#(tP>y1!#v}RT)Q+8T(v==2$#PWtLJQIs!?G3u9VKZitjIYqvYDUv(p4RzqlnWd)t`d7 zSnsm)v$+R)VvpHwR^12-K3Z>T$B?2b5Ud{EKGk?zN{>D_2o-)dR{ni=XFpVRew+4_ zElYOyC)3}8rYPB*D+$k1UexTMIJ8mv%F~nNGx3yfFH7Y=c>+<;x?jWlmdcX=a)$^g z_^nWYz+s~+qU2mCD~}x8iS5HQyW^CogMW}8z<#yc*gTy+V|Mb#9@!}Ox@pOIk#R-^ zb^i~_RNOveVE{fkkt3%~0b9D?d{G z_Y3xkf3C-(f1s)b=tvWJ!)eYOvvREX!)f|HXmX5bTeq57r?;7xe^+k(Mp*bP6`z?S zr-%BdZHG&Osab@W)2Di&HY}mMmNfQIpW;B&)H_w9*=eqD^YMiE%#(F>e{qC@+5?Rz zFJ=^98q#RCnpk2Fmy`H;9El!&{wail(Fj64gxpa}kx-d=zx)Y&{|RGeb(J|28{{1X zGc^3hv(O_!P zW3Q%@;^?m3d@a)AFi{_pM}{*PKYn2?<;Kyi;tFtG6Ajdx&_Z2IB_Xf-)$IEWF&HYK(^ za3rh`Ua=;bP9(2Q=q!k2G3T&1OGg}msz$?`*9+9XRZJr4uzFr8~jtjr;-S@(~*tkN>$xUxd z#AKFkQqg~u$?I-SHutZar5R_&&MZb+@@~5=Vr5gnZ{+T8-%ahE`A!tj#_kZWQ#hcl zkuX$EbKXJShL)E{lf3%T>s>B&C#x4+2?Vk+82r5CPMMqm_K~aOPwUOeL95vVf|g>{ z^Y^M9@-f5i3nu=*Ed9qZS6OP>scYIrY6NZV=Utr?iPW#lyI;IOr6Iy)2x_Sz85#M{333K zCM~Sxs{3XNmq06<&GSy@oAvybPHr^@#d;r@8xU`icI>hPT2d zvvWi5%B2U_F?JFhTVQ7gx}E&i1~n(AFK{y_)X8c%n zEu^S7vT`nju-_lLMbDpA*YNBNbLFReCHw|TxS-~U z|5@JaRFsZ|AS(|2vZ}HBSw)ioEJl9q!1{q#4yu{Dn)NugYh0eBhsEpu8VD`-pUKlZ ze6i$9h;topZ&MKW4C9uR6u#~cUzt|kRpsQgmb9lk&mrVH`)APG87-!8saDIx46oFv zIXrHQ6DJ|n{FJ@D6{(u%?k8+fc`70FU34&vm{UM*FdK|b!u^Qb8|0hJ;eRkg-u9k* zs&N==yy<#0m4OC@WHI;j%n~jS6?e1X-rP^j(EP5HXkrj{mF{~%{?dKZds8&IyS+hy zNjh^=J6ctU;FI#sGG=BnxLvWZN5^CG*J(BZI%TU7v!Mx73ws-3f3N1rlfgZP3W%Vb z-tCloJN$-)Lpb@nU&`!yu@eEwukk0>c42Rnnha^7)Z5+9zuF;D=_#Ned!{JU)JEjy zpt)>xvMK;K=q*|xW+twnDL5TwTX0S^;4ZulfaA518h+YXC0S8LpSjMRQ^M?IO+ot` z^0IUsf82pRWS?H`_IO}=i7`L7xvfcXkRxC~kX~ax7kcCFi z+ByZA4goW7Q$)EA%r{)0HDUsjP%Izh*QK&hx0(zfzA$mO+=v0l+Ztfv6ZviQj;K*upJzF1mV zHA_`+Vsz0N(GmZ3DI<0B=2`Ehz3?&dX;sV8%4w;6>Qct|D?f3B_uo#bv0Y5YksMm* zt1tVA;Z@2baLfW)A9G2E@)4!7VHb#b>G9<4TG?%}qiaC>g1J9(r2^t8iG%(&MHIjbV>YE z+Ec)hq~@vWGjSD*Tj1yu-M}dlYK$h^fi%bhn3cG@D%x8F|y8gK!x8 zM!`#-RuZ_4U?u$5%eF2QpB<=vUI%csv{iBsD%L+}_L^IIm?Fxkl+)D=zPht$z!oA~ zNVCbbYr-vSe*9}j;(j79RiZIpKUN@@cUGkJ5S$?t7?ACqiW%mO1EI;gV1E)AtGdQy zb{7nXtjpt-pr`qm`WhkGD;|x`s-fGk*;yt4!_Rz2yU2x<$IR;KYo}y_Jw1H;?&~u`)5De_Jv%0LV>Mj?wHPWU*Debox8L2_8Kd|J*BA@t8RxV zJLo%x&JPgv?MflT{5egFusd62GX6S?_&-6&$FDg&2Uk{$xvo7kqDb@*CQA!Zkme9& z4ZXQA3th=Y95z0C32_&p9^DD9Yj>O$%fPLZ8#PKe;RoFMzm+fc<`sRWsE@pPwQHR@ z3exYkQz}LnI5=JKc-bRaZu z2^3^4SO*AicK<@ck{h&aww_odX!x=kR*nl`ux)=HVb>)ir@|&x9gEXxf`*vKq4EC0 zbN`la+*MogirD@nH+DE{YP%<;2UBWR1{XV{5X~mSyzyDrmg)V?#`B?d@b>1z_40Ad z+dHH#-68*+K6Pb`;)=c%DXo2;It?Q!;&0z%Zu(Vl=?Q0;Gxn4bWsr`CDj4+?A}W>O zZ&%!D$+_{jD@%_=XeaMppzJ-{!uYstK(oLiwwdgSRM0eG;rMnJ&7XB!O}Ro~J6CFu z)Vu9;>**B0cp&>fC1l@Px&{uRIKIC>T#%!UcuD4}8JI?x=6;6A&~k5^WDUP!&sBFh zrB(-0+pdkZ&Aoe9ucC1wKYm}9wH>#NIorf>DnG1!M`Dmt>adkh<5pd5npGU~Q4MXMUQHTzFD63eI1l`Z&NNXMC14DWlr- zTZ;t{yS~TWF(~8{gMV~?a?i#6_n|(Mt-lKTgQ!N z1@EKMD--LBQ@%ba{6Jua=p@<$4E2lRC!0VQ`_TjwB%nSzmXi7@D6E{SI)mo4O77Z2 zE%wLGl`*Eyy30iKpyIvu0~FbzIjV^QIRZ{w3i7W%-v9_$QNh#<>ID@Q70w&Kgk39# z_&NY@X<9+&#)JCm%BbzQpY8Qnopn{fqV5py`Ers&$?ME9{!Zbd6bNdded>+a_U0l2 zE<8Oc#Jb}pa#l;%s4FiW)--CavSJhWDq0_cnn;Pq;hL ztnEKNk*t!_cM|z|rC6(D$^$!tdo z(3mW8;AtooGmO_A>4jP-SU)hv+`SVi!xVJC-boY~A)=5j#W+D2h2uJfOIFrekDQr9yYr_qS9aw@dVFpXFP7n~M%%1Q0K5rCw zwzRc>DXejwSX1@&p>|sO0}`T8GDXVP+uh&J=H;vNRiL&zm5pW22&X+#gke1Lk}dw2 z(s{ul^pnv1wLy4IWIUemS{VmsK=;+`2{mld3*`lpL`O@&MWNb6*D|?xZyIt*KmxjOj4@@%+|91pbw`xJ%%+Wkm82iCD(nPIFSe;r}ty)B_ zT38e#uPAgxK8TX9%UF2mov8}34=~E(b!tt!t5|6w?D??Bv&wFinxjh9Ki#xu&ncLf z~mbRdp(n^2m$YuPi^&lpbi$?yi!H8DmIu7hC5KT@xJ*k87bTqWuF+5 z?-!MZy)ZIZHi3Dh+O9cLM2zUOwDK!`0~OPJV&$gBPgkallTMi!7lKC1mgk@UoJcY7 zmT&NmE+5xFaZ!`TX+q{o1t%B@2nhumGh7@w-Cnk54Qj`y^WEaA>gNsQRc139im$T- zuT_fi19a@GTe*)LI{+yb1fE+%+{Y9k^AFOgKiYg>ME&|Tx`|V)*^~(KmFfFjpv0zT zS?6B_=rFaLfM^Ki!htyT4H?;YK;Zo7eshAw=LiARzh7x-!`-1o^U2Zm>_hrT4;LDR zvHW5!fymEpy>I~E zG2z+BD_<}|C=@bGH+5lvspg;!{?evJ{KmwdXO@|lgJ)C+(pan=6;S#IMN(oA_Ky-y ztI}>>0nNx(Q@Zz}flw6iQ~MDGKq`~?MnlNg_wop%(P9LQ6_5a(rOOfz;X)= zse>y8qr=F_x*Lhj5JF@374rz#Bps7(_kH9&75Z0;S;6ixXjM~Ip_CEl9?gTIq zB2!a`fS}Bl>A zQpgzm{q$r58`a=LDf(b~C5GEOoElH4*Jr;QNf{$Jzi6QG7!2dl;(bJ(&5l z*^sSq{HyiXV9%@c#Ee2BLItIqBCM;tBR^`g5eJPt6X({8Ihgf+9O8vClLyQU;Abwd z(TJhTH4OE%c`0G?x3;Z>FxzBbJ^IxLSgsTyj!e8x4nvcbhVz7z^L1(z-7+GtT;+ zoL~I&KXc0jRZsOBU;h$s=npnk_=w{BkR)Dk*DkfGR0S-<*sygJA0?ujyRnp&;SRzh z*E#P^;o$WMX6nEv15JL%Jj0MGT_=_D2J_Ed^Qxuu8f*urika$c5J~ayDbq3@YDpr&{;c+-F)@A2{IZa^$ft1?>Hx?Y`6V9;T0kf}T$*03jDa}y5(U4=Qs|NA9 zKpy}AOq+39L%?h~KU_`|0JgJZ!#{>As7#)=BQsbpdtA#%OONEl{C*CB))k=yKu}?V z5~VAY1+kzGB!RA_@`3V9IuJ_Vxyw$7PvYH%6tOF-#0mt z^-V8I*&o$n(R=;=r1H@KT^;VzpEcnhgdCYKXn1s`ire6i5oVA5r-!PoCAVtOS@<6r zdx`E)njgIsBkP?rm)WoOK4_N8ENNT!C?nVWR16AEw|i~snKH+_w1r#f+V~=|)q4k< z8&ODC6dyChtPCsZ{09C0lSc&c@L^VS?Ju%8gGi=#8A@j8Y$nBIh0V7W83RT~H`qT! zgW6{q5@&wmCnp%DSM*f1tef8670e#yoWwlb6vFAJ#EEUGyr+2|sxkly+9nGC(4{3naS?`c@w{x~l_%2kH zx?uD(gFyZel4UN06QectU1x;^Lu#+{^MZ?wh3Av1kDRztR?C$6`}4^}fj_`ID-m&6 z@`sQRYzE=|4rRD~)t6?If`Q0(o%GMFd#;6R)xWkr;lj4R(TmscO9#-eyLLU=jV zsvm?lc=r;7ohRe#fd8Jg_su}Q)KyoZR0NE24lx9G?1RALsgLYNzY5)-^bq*x4hu@xW%PA;KZq`f1 zW)!7UPvCk935oSoRZ*3>5a;7YXS1NS+MoO12u)Ov)y7$o)Mqy=lZJdL5-kHvd4+~lbi~SmWy|;kib0-?9Z)m zQnO?+`N+_G6j~ed^>QkeAX~L^O64$QQbNx?U(cQHG`9hf7y%(U&U+KyoHTjclPIys zcU_V8u8>*YIhn;vuE)YR-~YS;e?QWodYwBgI#GBl>nR)rDIQqTf-K<-7TpY1@r*j| zhp;P0G%f-6=MJPxdnGO5Qx@5RYu8ypX&V`P$P5ZZd(=;jgu>M+*`np2^JWnzo+D0} zJ~#cXNAJ5MA$&cn6ZT-EQr4yRV=%OR%T_z2M%zxMyh}|wi+k_c*vm`|mh_D3{o^bR zla;zRVcC_$6YjGjgRuyXJZd4Vij*|7Q#{h@W@_B&%7cH4Z2;!O&8#f$?EcESY!v2H z(XAq##kJp!g!x7U#yFq*wq}IrF8zx7LDli*`<7QcX~pV&`U9JME25`&_g31g8@BoF z8SN>ZRwM*psE8qRu)TJ8_y5zdKz!HlVGpNf6z;GQM3rhh?gQWRM0MfkTcV%oepy>z zSJFZ0PtEDlDWTT7YUJgcmHA^QLr+k1Ii$hf^Zr+Nw!DALwz5bVng)9~QLU3tH^UU| z0^?j;?Z!HP*?nSL*1SE)bghR4;4ps}=r5O5z**j3K2l|gW|e6gH_s@XJ^W%eR{puD z7J*Z=;Z2ZVMtP^Rq@_3Ir?K!w;NkLa7Hy*ukbUv=lP_v+aTB%jn)AOW@}f?7uIE@< z!6ixwm(z4!hLdgnexCfG3w*55PbwM;i92Ddw2hF+{G^FbtF$j>&hv!yC8aW+>Izbt z83yxKd5x>ZT#~#9#S&H3>_5oRg#D(*MCe@OAty9)WeL03aa4QZCU};t|BTt^;Axu- zt%Ud6WiA3!Uf!!D93eToeL1rtdH)t*s@!}+>+D8(L%O4Z( zSRX7)i+yG4{^lEO!5ZzZEO1(6f%Thm&fFtd>^By7@2GoI9XC^(`FAAY8tK4=h(yEz zzrPQm(N>HN=!Xw#sso7(>w{CjrK>Fc#A^B0KTlscBTr?EM%|33J$3@(c(~gE@M(sC z7Neor_a8^b#AE3Hw0?X`Vk>s2i;XSUW}KNrRaRFA)F;5l%IfHO`>X44_saYzpk$qP z!RLGcIf%rCtNKwWU*pm!GvFuMY@n|t{z8+?_7a1A31Q~RMM5meZt&gkD$SEp`gx@T zA*q(OiQH4*3x|>?Wj!O*4Irf)F)egRd93VI39JRU2#nsz-FtH0DaPpT7!Jw=u>nx^ zqu(ZiiSK*7KJZHohdRX4o^OG#Gx+xGy&h&ht>qhOJabfOk6Y^LnQwMK(_j6{?eKO0 zm;sO)SpYHn58zA!ZW`!8NP*IV0ii1?od2=5pVkCX5)fr50CN>E$IIQWEd$Vq=M3xq zPoqq0yFc>nzSff%AlRAVX&Z%qAuO5~Iyq^x-RgzD~|1v8-Y>PC{u!TqgFkfS3{O*mcnvEhwW zMUbdv3;d`Oar#x#)~CzRxB%Ho^f3}EN^lv`<@9SWxNLHT16iZ<6=NXhJQSg}GB;IrHt~$Q_71eeo``o_@-+v`GG*zbaNECKphD z>gAXb{2`|T7{_NDeeNf1FBTlF6(3(Jlp9lAIzFrSt9wndOQ|+T zZTJ+&5+RA$O_Bsg#^rYAMOeDy<4EWOp z=U{m&pVe)3vL9-w^yewnWOYv+z6vkt;4)oOL4X%Xd;WbPnh>)HK4yP}IFl$B9f_I! zv8S&Ssx8ysZQq-{r>lz1@1Wp67w_k@>MQ*M#-gwWg3>pY_GqjoV~lTZH|FGU8YwjsBe4pBFWrIVL~Ck zuM+sGo;0p=EpG^EFHFESr%67gGjcjE45)aAW&z1{mtzH49$+5EB%Pun)4Z8^ZQn&c zw}TA#rfo^zKaw%N@l5xvH5V?e$p_5?FN@Kxok0uNIv`9P@+pGG;?YIBU#)vrFs~p? zFY1pWABm8bX-th&)mydHf2^9lG40)1CkaWrBz-P^J$ln6yjZf{FXp@Km18%pe5n># zlq<$RpL!qQ@kE`q{2M#qi6RN)uHLJ-)sAm_@M|2NT5p!3TCa26vo7rPlDZFd_dqE{`$xo)MOPC$=DghoaGe0 zG3rhjSTiZxT%HIvA|nrfeZs+Hl)XZNi+2i_4~SIB^`P3Fm+Be}UPz1`lYrGYm237Y zy`Uhd$?e*9^b6jlW{gLKZk0}ulxa$#j!|ga1dD&T7%Cr+v9;w^IjkA7jPF-R{M0og z4bfY2{;bt=Rt~C^`5Z#chwc@v7pO}KE@yHdpA)eg15?G+^4sr5_ z?g=t*Yf&2xFd$ru<(p7nmp?t&(o|2S-=P9qhu@e^Dw?(CW>G=`*mIf87FS;h2naxj zE!+w4!GJ(24k+wNR5OQ;AjIqjjQ&M8g)13cCmK})f<()*B3xVv`585<6UAR`b;2o` zBQO7+9^N8NAFNAxF19auz5JNfB7C@qXR{$CRPM;PQ^%)mJ)TEsSBW#U?`4~jXQ;%h z#jq?@CPRK5{Fwcd`w^R=P=fOM?9u!&P2<3$DEd49!C%DIhNALz;nRlAXe&tfQN`%f zfxPS))(=PFE753hzV6q6^Jlj;paW=|#Wh|JG-i-5|H}o~*J9Rg_10wAlh?6GHMiH| zyGT9-T>e^W`oiIqK7#~F-h+{09$9w+hQ!dV$$`uIo1l($k%7>C>)EJL`Av4; z$p9LNBrV)_Zj@uaT0IO{kK^;udQ?OmDpJttrp6VRJ z3o)TKdFxgkIAHQ#Be9ifeSo6SapLq+{Eon>KO$nlZKDXW4BWUFe?dEDapZ$9%h<3; z@Xl-nl;OwG_U3wT&lK8t30%B{oSa-z5B8VU@*a}?(Y3!^A@jb9cJZaO51q;%F2doZ ze)@Enl1Om0TU68EBC#jsb;wA)e!Vu_kX*sQm+iSovo`d1)g~N}Y1SMF6C`U52 zCZ7%XUfDhl<^rYR&3B-B;7$Y@U_j%|5XU--J0GcLX2kOqnT7qU4dP|9^K>RnON`*h8^x~%r89UaeMm(`*mf#cmk`q6`uwPdP4 z;iTiL#DO!MJoYs@Gl) zzi;dq?w$_+&#$&Hs@AWNRD6a=X0G$=%PaQZM{gC>t<9?bpj#AAsk-gYg(wy~Ow*ed zUu)VFOl8(i)T>pA;K0kg`XU%Xr_;vCTx_+$?l_OLF^@g_tCS6EI(HHdUoYU;5n-59GvrB*c`oA)5Ev()- zAkjOCy@L-ls=S>{aW;Esa+T^OGMt}hb@w!aA5tI*4(l`6@(MTFNPdQ>1UR7q*|ogN zv`Qt=c&ybl^yfL^^Jm71-RNF7LWG2b&|f}qbF&`)0y2`(8%U*qI@@25MTV8tM?qLgCm){m`khJmB+~~5fM9_e!!?0?KM)Ap#xku{unO3|;KT-ap6YfvzS#6a8qn|oDD_{bB@$VJSk0zTM7|3p=vJg>bBkP0 zNG}zlkYaXYAlU4 zWiTU|`+WH4>(^lh-L*|}EZ0)*!NQM^nYyZu&IPV{x&42wKjr=P<>GtNfS+G3ru6#C z#*7zgG|{LNe=w#8+I@6)fPS)CE^vUlB|^78Yawz#<> zZkUnqM`inOcm;=dDUm{^Suh}TpXX%k?i#h|m;--jN#)%xGwAUflC<2j#PAT^yQ2vI3%q2Gv4>1GV6tdW3jdDgC{YW z)DIsOv|y>Bjzqe1Wq=zv$~sY%p?0@W0q+|c`YMBs(y>yImrB^m z6j;(d;T}w6h95`JaDRd6FZK9qBwxpwQ!=Mq&_+T3ea9Cy+lMCfRD-K5mq z4MX&#C^2q`-Vxq!uweq9pZl0{lz!96qfV6gf7SICP*p}F!SH?(UYZgY?}G@B7{HjdB0qp$^B9^Tdv|_nvF6Ij8?=;De8U zgO)>*raU$_Qn33dSV@Ym42B<$lr8c-5n2>RVf*Ka2S7~gK@$I;hoB74@&g*#U;NJ> z;yAuxdKijTAntg1v0K67Rk<~j0Ol;K@%P*1yd+L{ z^3Cv`J*PJ?_U42|YZ1;osYW}z&-x?qO{%kmH2TmhT~=*Dx$X@W=O}K0=xlM)qmmRP0*Ps=$zY$}Vz zR0w`&urS@X4ELL|D)H||G1GJSaJ#F5uit{qNv=zcbB$0&7iVS*Rt^@fZN#Lbt9 z4Y!q=RaRs`Rzj18_#qD{61e=GjohswCb&t$*RQUDFqUtY@o&w;AtKQ?v!Fg+5Vksx zq7P?dX?QcMj%B53ZM^oUjQ9BD8QNfcL=66HNX6ONyo=LioSpP^$~@Y%*2@+st=&Ss zZ#`GkbenK8C9!l4x&Q3Fz>5YQ1U@qrp#em9@I^G1@Yt{=?%sgcu{R#A|yE*TW`+wg9Z8hztUwR3Ggec0zD zl|!umnzUr_`~h^gKaJnqlW)sSN3#nG$hy>Uc<`-|6H%6bl=ysUc=h!<{__?hnX@PJ zM2W(M*SC*dKF)b{zXHNGm#rP>=MKAu$=ECNlu5Q}HQ(;!dLPYymxg-FX8S|UcqkOq z(7>xeStgK>iacL%kC1!C5y?r$La)KXh3jjEPGFClpwu2?Nm;X*>ARSsK(5qrZ^F2jSU6aZCH%!cUF$bo zT01~fVh5TYib~Yf)bQAfv{!(z>-E~^Rh|t==%C^us(rB>ndYx}wH6#h3}J5!HMO;k zL0QEue@A5Ly$gp0Y}nTlQ=87d!s)5S2ae0^5}b`MoE?aVZtDLdW26n{bAkhO)VWkp|`iNZV3?n5fs!co_w{NWi=DJGBEl(aE zAx|7@@(o(C{cFCih3 z$8ja(&x45RygJ2Y_&A?@w{-DCw4|$Z5L+p+CS^$1-_wu@P{XK>@ky0kJnO)1<_}(H zkZ9aU#C>+fxU=?{d|bI+0cOxeoLj%i56#Wte^sdHib{HPB50ME#G%ue#|ZB?rSk|)4WbO1B3`H!NH?({OPu@7mhtB(fw6(yfHB1(&lR0X~R=uzP1cVnupDV#! z8}UMnKD&!zqxhl2r)JqLO}5xEE4BvV_By>4a&@^+fmu}*|RpDNEZW%GaHP69jooPV-8z9RdoLR8Zplp-^%Y;>d@lwlYGlj)-OG?9w;a8S$FX zFs8pi?@6$vG`Aq;WYEOxvxCNHk&>z~`m)#)MYd%-`n2hr z&!V`}t}Y5K0!hEsxG+jv3x=H~pc;xfgV)sa`9tPM4NtZrv~Xx!(=qU=^K`*Tpt5sK z`R#y|+fFdgbysYWz~z28amCy4zMUfAk&{@5F4KZZaV_9iq?gBCl+-&~t`=q*%Deoa zvLXvki{w3*M^c|yT`aB@63qS2j2QJh1HvNqt?0C%fU6zUZ`Mz8!|VrmZIPwM>q)aQCsnifUt0C&MzZm5t)zq;$>0yZtRpO~Frb69m zskBwu*6h@D>{Js(?#=#Iz34$~eu1^!4pniZyp?L^1GKXXP)QiiX-)t@F{=n`MZB*z zJaa|u*h+gUkBvAkKM?)$O?zb$F5sC zpPW0B%e=_f?ajt1AZDn|yt zJRb%#;hzh^-kw)R9^E{=@u~%qSaU_MJ2cpSC4CJsxxS*v_V#!JPv7|Nx)JfIz>eJv z3@_BoYQH&7X?-A=F<+?O;O6LnfjSJPZ%foa#NtO)dxkq*ruEltrKYMoV%UEfc|WbT z<4VWPqEw1^Y8swe%_-IE8w@n~_py6RoixQPsa?JbAot$*_x{l9jJ0Ktq?=!#GBcmw zuBbXX|HMmXXY)Y8eo^TkxA$mK=QC&=NY42f5^5UbSwF)<0SxMtR!bJTE%CD4DLXgk z=-uo1=`BjXOHQi|ygaF{eY8)Bci@ecyd)%wIyBE--u$~T&_2ZEht&(O+MnzDUm0h5 zQITeDhh-JV3=cu5lJFu2NHJl+3Oq?xf~+{A}FA z@Zul=)#O%(PTKkgfAR9Cfwl*7%(C5qB-9ZBai^!?xMY8|SC#$>BRH*Cr^T1qngt74 zdh{PS3h>Ku;?V7JyUN&Ken_0FV%WgI>CIf3rmuJOl0VLmvQ>G#``r4nF|6*d#NTrL zPX##~jJHf@clMaPu{S8=F$s~LB#1)N)4;yom=d}ffE<_him$+bQlN?PPOewsHTl&qdJ3!L7=m@QI ze!qK_{P(hM1+Nd^8TG{ptEtVT;BhG`Rb%6rlPoPvD!73lVXa0`k*Dz>;T<@_N?nK^ zY}C!DOq6!7-Rx@py(4J@KP=&2iGI~U;JDPZEbF-M1|%e4Ja8Ou9=MS~e{%=m|E z+ve|`8%4#rw&+GZg}%_`6&ouEltGkq`_IqcJrcl!4L&Q|1xUU=&`)8Hz#jCbj`jQb@c!1ym!)=u;`5_ zBFrXpp$l6yXG-+n3$Bw$c70m-J3{X6-;375OV2`4C`K`YjS;Ai?qjN&d+O;4dgL&j z;wA`M@RJ-yBp?t1S$;`QVVUjKqkqMZ%2Lfd4?HC(;yr`7ckoVC9E`4B`vqSqjh7PhK}W+6vm*gRi}gF!!78ZV?&mvWKw!Fu1YI z;d}Oj(P^)w8jLtrnMqlfvO1;0k&eBNo~xM9Thf;-OOE-uRWToZu{=4d+&9`K2k(G6IDS^lo>QWf@LP~l0 zkJ8dEeo;`>)O)S^g6-Dq$6|!usi%)S+b4z5?uKJ#MylQ%Z;ww!BGRKIvgw9A9}+#q zT6ZJ_PTle5{yk%C>l`|4YDzsi@P2z%&tlQN`1=<#Y07ijs#T8-E#hC*$ep5-QGBcatg5gTl~y!Qo=iWx zWn0FLsT^gq@4DzFa}o3;IcJDi&C__dL4+w0Z^sc#?~D(22vHi~m;sdh5>HPZlnd0_ zAzd|TZpXf^2DdY_0inBVK?+8&#RWjhdu<^g6~d@q$w*x2R@V>2*W!Uv4=iGFk*iuK zV4sR}G1%zaEH94%m}=pY-qNezg_bvOq>?fYNb-E_))C%nV?jq&zDqjM3eH3aS=n_G(qMT&XEhy^p(T zA+mSX;_S;1jH(t^K?*qQyYthfUwG0~SFPGf#!jX zk`g}VGv~%^Zf=WK4@akg;-ra*$uz| zy{4TfC6_065g)Kj)7NPBqUYdQo_sJYbrH}Z6of_&qv>CFXy1m2 z+2l0IrF8x3OK?g_QJ1j(ZC?8ui8Jr3+@Uh?fwS(>{+!gp__?n1QbF|PoFESBk1%ne z6E=Q^g|v%wm7{_S?#ge0QBK!2s-03%M9a9@LbrE8f4*~lv>FlYX8N>;>zpzKjY5cs zNn&09Wp*z-e#;hUz182qXRkcuB0AfEm_pj9;HhCEcwB@&BX$5SFagQ7#ZA7aIV0Gs zGe(URJt~j)r7vwaBR!X2ALYAXZkCFfJO_0JT`Q7Tfqb7ra5Q^`pA$n~3Fvq7kfyqHlE0_sF9_X9e2cqsb5+jYy1aPX2 zlo@%{y1rL~J?AFZvwjHKa48T@brN|n)i1Ed#yo|v!G2Tk5Cn22CTPQ(KkG;yVcdjs z`3F}PQnhjXEC{qy_N!NMj0TdX_qaGYIU%hU8=g=A`~UoT)W<%!tHSh6ewix{?=bq4 z(-K_EF^12PD@VcjkrR=CJl;caAB%N%wB0GG-oV$(UVG>X3tl)^pX?=s;8NxJbHW8B zj{SDTusZ>;U7|WGjy^2V{$_piQBCL$K@nea`@IFOaTJ>=L1t@|U9JKEn1iYj%>i+w z9*BJm&#pVbAt6)Hu%3b@^`mado$`)V(m>Ep8%U%HQd9{kar`CSK8xbwzx8ovbzyAl zwbAepAj*23vYzg}bg_o5c6-B?!yZn%_s_lwrFR) zL>t0zird^?gPhD={*mxoK~r&W3M#_prfmj~9hDW$CXB((BEM;0%iXccW=_oL^pr*O z{jjC|Jq~``H_HUv8?8_0toa|dAehkfwTbJK@sdk!KL9J&upLIAR7BGLnU24oiRouQ z&EUs+L2{u3>67Qh044*Xc5VQeCeY;4`-4XAwnaGrbb=7+Y_vUrXdk4tHJ+!^sUf=M zz=!MgFi@8;2~JBJQn_a^VY#l#)`vcS*ODPLlSM1WLih{A z9*g|+N87PA4M?-}*;gmhs$wzGSF5vEV`ch;D5*1lw{qU!IcxiocPR3(+0omAZV;1x zKYY2VtSnFw;XIF#G`J8ElPo!4*wm85O-e=j0&HHs6f8cSs{GPCfem(ANGX|yVs2G4 zNU^^SG~~m=kXanaKug;+!5;)P;d3%!+JBDiH?9)M>VeW4hFmhz&I#A3fudN-?w!CGl$N?ls!IsQe|_1Dq>_kdBblu>eU*9hXy8k7VvhS7O5a23 zwcFFen_r$<=Wd5e9>WCH%BfY=au)Eesj_cn0KW*Pdt;kbP-jl@Zqn3klQ&_}6=(KG zy3O@rExDh3bvqfl`YhMuDP>`uSrja7hF2J13aN_6h|4l1V0=W$5HEStQcK2hd-Lc) zqk7;?*JyD#OvYxCOQf#y>0AZpQ@mL(4>R7cae+TOJmQbVPt}L^BxLUbLv08LjSq%S z;bmL>%;mfMQm$>R>-f4oOu_tvOszi@;vx^uzdX%dl$cVA*m(7}w~wq?BI5k(gZwqF zB}DUA#9cmTEjY>J-qStj0d#$}W{M)`p2ZRY6oLV|kMn1OocB6iCT|1fZe;KhlYevv zinl6XVBv*S3|?VRC@Y+2?F^OLb=CdV8XNmF@Y86EujTK@`o;HdRlZ6;5lp5pRC{Q2 z$vDx?r!%^AEwB zlqSXWTXx+ZieYLO)9H&FV&XdNmVhq@JF>QS_N=Kh^zufe5te;;B4>4I<9k6K;|Hy1 zmsHkrO69x09}dkPX@aKP#1^;5dnxUMZn5~`~ZKD+u6lSeNbCk4qODZj=tXm+#kj7!U+=UH>d>1ouCgNE1lz5C(iY`Kl5URRZ> zYB5DaPJphC`)Fs}v>>u9pjJpTP*0ljVDlWiOZAscNJ#U>*}p1}cRDBB4|3^+AEjqx zfW|}Y&6WT$PoIuxa%c2M@+_^Yx^&y!ybQoUJKMP<*)D?o#|y92zynVWk5-(ipAv9C zj_+yZ_rTjoEA5&*Wb1p_{%u=#t|Y#r>F$@#nR5o2GZ#_TP4?h3+^t_mzI9bk<|@7} z6;^g8tFwAUPcNty`ZGYVxbh!a=)cGw*DM(RdGjEY$z~cJKDv+ z%Es8PUspdkiM2@92HCMNX&SK|`m>jpYeD~Vx zk#>OY(&9hr{(~kT?^WXRIo%AH4LLaf`IDHV^P2OKCv*A}n-9-plf99{D-R{*5cfK# zWPH=4{krQzCCv6k%!7O_!Y&`j)hodPm-O$)l9FA?3Gw$$eiJBLoQZLu5>anXRWO+xr%<*Dh0m0)9iC?PX4x6YA*lWGZob%O2JOOv8CyP1WCQI{jkc{ zs6(EulF3H8mCfu$fO$o9ccI z6;3VT=N5FJm7fz=JdC}5GW^NL@+a9r=XG|*&?ci~+_ge@7hzRoWT4-}{I+)> z-GcOXZ&-vt}}n2l8cI}zK&xVT{-Qg z-BF_=!6NV8A#aQyjGRuePtgpt+8$wN)4_$Wuxbb2*=#ENzTw8Tc%)TV`Tku6HzsWS$6QWs?;iQVFC z^ffswzCt6|NxW6+9d>Q!d`U?+={6mh0+_l{ONKq)9+tzWI^n zeZuYUBwqMjnccoPJi))|H@fF2@rZ0gP^iLd;&Q|-0oGF%Fm_5(%{z@+3v0VjXzbVU zG-v0G3-T7)HweRt}s7lwIDox7)}=23Z!zlT-9_#VnJR+{`{ibTyVrm(Gkt5&AMaPkSK1R^E@b{dzQ?ddp7@f8YtlRMOWNRu8&ezTY< zP72S?R$8vYXRVFFkr16OO$bY|`8WWJ{Cbbe=yxFRdhIa!>_LlJHcI|LYd>?nc2Q91 zbZ*Xz&C*33B36zjZUNZC|oLJ@}ED0 z;MU^5GKgGu$s8x{lPO63O~Lk{35%GtK8hlw{pJ- zm3zDxFb>A}uY=yg2Db&8i8F@iIVSgry^p%(o67EKkE%@npq zYx$!dtF5`l9hE3ocxrx#K%N+_?v1Y$SfA(jLItQL)E~(k$;{@|B~^~PL>(m+z?x)1 zH0ESSy;{!F6T1F^ND`iM<+;@#mh3C3pS6(D@xjN`eRocs=t2&|%uG!Dk*XRE$TXbq z(L2cEZs`8b=;Qe^M%1a>Ge8kJ7~ORzB+xPAl2r$|S>4@4NndKajr1*p(sUwq)4v<1 z2?JcPTl^Cqo;kfyV0tGxZyO7XM}cK(UIA*30vaZ-Gwwp^FB=79e-W@b5$jm5(oM9P z@{vtVRkO^V(2cXJYjU zMLymxY3qWwTUi2^#fq|VWC=!BHU8yF1O6DDo`D0N$mh?~KF>IRsQ&e=W-EA=t04es zz6S+W>lXvIclVck{SRzdei~H~hg~g+s2?Ekx|{Z%<*=-^nAA7Qf#siUm8|EDaDYa1 z2&I97aKOnCXsOoSpQ5IL$S07o#Ur2@3?1|Pef;J%jgHIo!ht9)^rL>_b3SK4u(eau z3mm^r0)?(q7lVoE%_`mK5ISPI5um)Gh{M*2Co^Ed^a1p;3kcKBQk-GYeMuH{`mI(n z9Wa131duHME$=G_suF-faaqp%*is81HTQd@qO5m7buU1i`g)&E1n77eet!OV3HZgV zL+Y5(w6gn?t>2T$xw1{LQ3}P`?Rg{oKJme8J1qg}oG<_efP2gyCz%Jxb~9=>l4gy5Yg=9dYM1MA`r$-D zGvC!rVj~K2xQ;WB?={NZgGh=2BOmxJGNNTxeyV6ua-J5L9x%O?pYgxJ@TWkEc}AV2jvQ({OET<~g_{T5!;1Uw(6fw2f+EJFMz4GN%Xl%+PxDq?O_`Lj3QBfpLphwZJdoV8Za7m0=;9y%p?DAK zPhK%H0%iRGK;`=KlR@Pe0C?C2$r5cWSgmyp79V@{96EGV?(HfJ@VIr79Vb2madd_` zud6*s3Y8ENy9Wh3VE}3U%09rO|9nS^7||Im`n8lo33eWhz<-Dwxt*@Xpwg$a-t`2y zdH{z4zZ-Em9ecoZW-_3943@=fFqw1~WXt7~3dS#LeFKsc?_fVeOK?a?JW*ZrT4y7c5Ki7bY6~H(IUNX;tAH&Ry z8jzQVtO|gDE3%qNDfnW^aKgYtfNIHT7^nOHVZ&hq{F%CF5@Pz+fObm)0NZLNL`%&vzH zDhJWZ`oIKP&A zqdQ4p77c`91P3K2*)AqSbP`%nlF_EbTt6Igw%m-aI21M8d{ zd3$5G4_S^?up%*oo_`F``|K(eb?6WU2!l=u(0#d<0*NF8MdQz~1@N4`U`Drq9356O z(BYjfHN*riThbq-1x)AnI?+h-hy>e*AXHW(asu_q8kURgsxr202Rs%G{r7}Ns1C3aD`BosiM+e4iUD5xO zQqYH)Tg`1Dri_^B$IbYT678P|zCYNNybAh4wHJMz5%xW?T-I(D2A@93*8r;uP|#-u zATUVpb^=^q3S_|Qoc8p9&qxw*DS`B=pp2s@v(A5W0>dfgS^u9Ifd^Y`g8AG6W)b|y zXh1CAq_5N^i)Pi-;R4N2{V

      ?4~^MvrOh?bsyz39{r2?S%vhH8u;R~qlMU3ZYT5R*BLNLI@(Z*0IF z{CN-nk`Hv)S2Gi>^ngi4lH1>$;dKyOnD1^D@A|>TrZ@eVv>WSZ*Z1`E5kN+e?O_yl~o3ULqHkj1BGnfpz`a_vJHzn+@kUB z<{&*6wbK$y;Ejj>@r$ya-j|TsivLy^`DTl}(offu4OZ)g`Y*Y(IiS2+vLg2K8xG#1 z(4P`w_p1P=07|}J@*@7rF=C^#ez9*zbpI1wM5LNO%E}&`aXbi41CCl(zq1XNr2@ytGp(yr{+IG z)x&RC5qBzQtwKIelNUqI0L)d1yo4@JE)q;1m+Oo~H}1_1-xraQ#v<*f$B+s7x^jsG zXb9i${1&2p@WqR%-#6oDa8a>|zN_i`a=(Yt`VKzt4e!F*|H;K*YV_(O*_tnCH8fU( zQ+ge;P;MT$;?+<-y}2X%D%+z0Z{+((lc*&gy3( z%e|G-WlEREUKMwbS3GNQQg)=V9&< z&QWYpC5Suf6!rmCr~bbWE(p9D{emkgjr7@3s&Js&V=>v4ZIT08$B&FJ^Na$jFB&7s zTb~`TKDs*k`a=7Q4TAPmgw@vC?+v-ggurnFvR(OAj2&HHw;&y!4{skm#X?2@D~x0r z(P9(iL*6_;_Wm0iQXo^JlsR8QF1FM5r`lB9kv9KRxmlICNb;9s@aeSPFqTxCpvWg8 z+zumsPHY842CAbai3+HWq`A6;{$VV-0#eyGpB~|Iy?`-TY(;fe8oC#9v!GR?(2WE{ zSxHd!>viUSM!9C+3Xqu^O6UBW{3{T!nTIopE$(;tv zYaqZXHZiiGA>oR~Pc+(dS=hE$<77z4?a0Z>GUVeE>om^PcTJd<`I3-Wd_ak=Dv)hM z#CH1^;mOFma+C(}+r_uQ<=x(3LwP{G%k0EOtJgK9_vtVN&L4kTZ=TXVioY&oT&G&& zOT~r?aWS%~0udtaUTX{mJ|gad58?b$xf7KK2w!ZTAM=wwA9Ct@%R|WPmF?-e>$JCn zw5)R~5EY5{VhGOvG)W}cS7EKB3YN;Q^r^el=zH|{Fe+-2KqiKb;S96k)rV(~Q*9#O zss~|iQzE&|>d)@L-##;&rJ1VCKRFpdq^IGi_!@EZn~pes;?!iRr`+g0EK!l^sZm;| z)gu92U&}8{bCrzQ&is#v+yffJHv^9T5~KL zXx=&>d=inHJ^Z?0^kiR)A=p=Ft&`+<8T$gIV58xaT-q7K$18jAe_eJDzrN)Odc8)K zf{I$+LFbBFnHTL=6n!}nFeP3T9iU7r{l!Sk zh(KYV@SW>`YL9&f8(SIAvbScy>0{|zf&?V#YR~>^WND6z#G?o5b=cD2Busb7KUF(< zm(A7xF?gEbLQFdsm^eIa%G7Q=jRf$sHVbv%USi=3FNVg ziL$!iG4sE|i_XqSa?A@Bi?p+q`=9cL8@1?_z9;9DoIUP3QMAV}8V~)OitREuJ;r!o+=S_+o0Ws6wVlIyV6HwfD1vz1XaYMvdXLPBU*9@jQ)FO=fO( zFah?{g~*5r9rLw_R2}ovJI^)N(mCAI$0Kb~tGUVN^R|o%SGF?|WcKy@W(w&r>Ze9C z=1S?a*{q#AUNt$RY)VR|PTbMadsF5nGtX*MS9kC#>xHdPUCoC=bhon2HucSH&9{!# zls--A*VJ~Jk8RzKSH7(fg3)CJ&fZp^i4NkpyldPtClj#i+p_&3bLNa~y|<`R()&ix zPn2E1oMu0Q6kXNU-@#8>+GhMuOX16sYf2&P1+l;N*c|12IVXIkhkQBamMN(ULC1l} ztOvd2Mv;jt&$&4Rhd)#C)^Ew}ZYXlqRhAU)tKZgaYhq#LCw#RHAO5i!s#~wmh-=ru z#_y?mDikZU7IFRbgj3Wj=OR(xD}J1Fa+UL%lzFbo|8N+E`0#Y3Dh}m*fhxws&-GZCIFwJVBPpiqG7jE{Ey$?Mh2bA;ouqPvFl!em9lff2<`8;I@IN0GT6yVfHzCJ4ZVGj z%u8D8HuoJUwyu=Mxjm!kjI36bGSC*M7+bP(0NjpZyCban{*{)^s zbxC)dVk+X;$&;#b*hh4lW4q-zSQ+`1eaFWB>7mgO{EYh=*M}n{8MJ_vw?ga^3R#vR zTmE)NE*;PAbm#zm7;{KcrUY%?AeLn`fTTky!o$Y{u(VdvXvDzkf8h1R|0`*p0^Sh4Sl{Q}ezMQ^SNxr$e zi>6%yZDV4a&K8c=v=&=&in!@)l7t6@{YTVgv#kmlwDjEVQN_XyIU9JH1X6lt!s2o^ zSBcpsQ}4gp<9@MW;ftYe`!18oNR7W<`LCQIS=k^t-}*1&V5w^~M;`7Xmf7saES z;@=2e2rxK%wX7?=UP+VKTtP2C9*B8#v6<;yz3;au_d~gBTVH0QyfoS-!(h)5b{h%2 zsMTkEB`xAc@WM*-qrjM2&$rED9bio;%^zui3!cSFnWK~i;}|Lrr~27>KnI%Q5BGOv zVa6Tv+?@x_yb-g0zJy+Gw|G<~_D-ypGt9nsQ0MmF=tZp+^4;92%$!z%Nt~URxM*8u zweH`(5_>`zyOCVmeJv2*{lzK{vQQTnt`Q#3MmtSQEueIQo~Ry(0ZC^yS?# zt<70zuZweUlfGMpd_SFqtz|R|wBN!4PSh(?OI$pS5egg++JwnJ-#jQ%ZfrL-q&@C! zE8H4i`%W}*8mCoI#dU`%7stJ!*s&g(%4ojkBTI0$1f z@WSS*^IB(xS3~e@1+fM(2RcrC2g{RZaYIgmXzzYzWyP;I;WbeQKS?>KsG-Mr@eLSX zr$^)}+nm#SzXV~TbCbWTtM7@Q=J)$Ahk~3Y#-CZ^x-sA+c-|j0SR|TfLaa&XYtGW$(fs}RU0U}@ z)p!hxCGVlG@6lib>22i}D^%!~#5$c`y&6Z-UA2krBmsSy5b32(`_Y=p;0ZJ#tow4V zJvJiN8@=z(0GuEUdK9g_!q~e=;=25E&!Pgyqpq#Rt!#Mzgw7vPv6ea)<|5v6#6gY?dh~2s%Enr z@YX>E4(!O_pb2Q!FNkRO*#CiJEZytZ#xv!n5Vi+YG2c&8=X-|w z{Qd;Qa01-f(Lg|ToV4^S&TyjfI#9L(x@dAel|ZeI+TqtJ7TNk~Za4f2@6S|;ZY zlAXSp!Tet6jE{{G#>dBtu@G{AOMwdlSG`as^-3%&-By;+!o{r-t?p=qTQ&p)12 ze&|dOaw!ly^_eCZfuz4&H;8~2+Pww$?d487xDd)DV2(odw}HgGF%)&p*JR z?*iNoz?#bdvkfbhRJ|0rm>)k}0?tcmfD{ePky@Cf93*ELc%gzih|O3J3K0zq4Mk%m zfmC2-CXLhn9FgY@fLJ1uwlF*9f8=Z6C7GD5yG1I!an^n8zf3N#jky=WCZQ&NJgEWewpR1Q!ZNykhFD>T`>kaKR)GyI?fOpw4^wIj|Q;$>!S$o(17gs6@$XK_qJG+ zC_Tcq!S|+)de0^9K?45|EdRe@>i^F0=zG^lv?!hbzz>v9rT*iZ_TKk@UnIzH0mwE1 zzf(5IO9N^Tpa}`=F-zRe{sZ=k+Vj-OAUJ=i)I9_#IR^t!FXU48Va09AK5vAl|al5yb{tYdivl11|LxK zi6p0~DWD2gnSlEjLInNVUSFxoi15#&vE`LG8tb3`{j5-c{(s|goE~8S>l9SU00`S3 h{)_tludscGYA-hSv%O{GEdux@Au98}P(;V~{{Z3<4paaD literal 0 HcmV?d00001 diff --git a/v0.20.3/img/learningcurves.png b/v0.20.3/img/learningcurves.png new file mode 100644 index 0000000000000000000000000000000000000000..92b0fda949afe6f9b68de4e2e2d0c641dcf4711d GIT binary patch literal 38242 zcma&O1yGe=)ILf`ND4|wN=r9Li?o!Abc29&cS?6k$3a4*k09M3AkrY+-EioJyAQwl z?ssSIf9_mn7>76aUiqwNt=F)3N;23OBp3(?2-tG6k{=KdkTehw5M$9%!4U^uiWKnA zBgZ##AJM^&7y74A@b_aoSxrX-1YAS-FX9ie0&{Rsz)4EONyXOG$<@%o1i{+S$A$yFc7$;+8YzN_ZpNIi$V$?WGI!155Veoz8Vps9!}Hij%JHOc`5Fv8C%UdwsR; zj@FZpgwSsvB+;J~iE}(cGS4$J-9Lz-!c#zJwPOe`~Z}X2>MoDUbD`DEAZfIIDJbY*v<%y}iAqGMxdvV%qP@LD2~F@fP$S4-ZA$IKHf1 zPqPe-iP>0*5u@kgs;DaX$C8+s=yi9wG3#@^hah}^b9|~-{@>*_jvGozESl|>?>$k`CQOk@T}v4DU^golTRg<-A8jc3!Ah@{+k zNJ)4Uh-Vxn74iQ);ukc8(kG=SfnmZ+|J}RSUS7fo3=l6awq$05bHRE-Y0_4wdet_d zQ**<7iike{#V80NW}Hjzyd@BvM| zfR(8dA-I{eadcK)$G}Y1>vjU@g3GY|(fUAU@5$Dv`+BOG+i^dC02ky5mPV05SZL_4 zU%xbb(J|I9usOZ2oVsEtmCyI*avXHtb5nz>w@%Lds1vt zl5{egZVHzPMx|*##d;@3FM*X5fwcy9uNW@>4)bfFa<;HwBGPrjD^@S3 z=jBZl+ON&kJV&p?NZN8*>bb3}Dxd=EF8#FB9;A>dmty(_TI*wJqiXk6Uf+N*k08f{0EqXwF4mdaAV z0HQxo5kqGUE}=|+wLF=?QysoywE3yd#k{+Qg449G-mn=wSK{|OGX!Fg9SYQGy~_8` zf3PxG%l&Q4Qb)*WjjjIG`N3eO>-^Mi)ATnYDD^D?%Y$Vjex4{JFK@XM+!-%5zC2oE z(X0rZ(D!Z9G!E8lb=WO8NVLEO%a?uWez6piD(vpie~yBJ^1Hixw9e72-gyVruVd+j zh)8Csb`9T-9u*NZ5^9Hzsg5EKR`y#VjVJ7U-ZKU`7J{DJ5=vJTsf><}PCu`JfI#Ev zxOODDP_^bcMqPk(oA1Yf`~q~aU#-aU3bMhnJ#o^7tbcgiv+oahfByWbkizk$&T)ea z=r+rJ{rSAtaevxGM0ohCgT~_&PGeMwU@Y@_ue8&BzP}4>G6cY9mkc`oK7JlEjLz{nWZ7J!BfwOtUEj-lwk+<*-B^DoTqUIu<{b=V?< zKZ5X+^0w?UtMVh~n_yXa`CR#AHicBKAk9is#CJM6eVfC%&v`ALxz0JO8>~N#eLqb+ zy49s#23NPNJZA?vb`eKapj-mIeFru_hyAi70)_95ee?Cc(A&3f-OlG+5oa2EiIB_wbJG9={6YKO3mTI5Fe5b4jgw~b8|*jRaG}( zitvaCIYmY6=?ts_%;LsJy&IU*B5;FsLJ={T^FE)`ahWo4+M7O&olwXUPTk!6jQ{j0 z@YozEG?vt+_S?LDL_`E56I0P;(T^XWVHth0{2GL6Qknq^G~m@E@5H^s?Y2hpS+uJo zLnu6c%HVEMJ2 zxNTyKgv7T}ExzbTa0~rF~q2Jf*%^==M%w@GmAsxQIZ}XG&{+4J~Zewf6C$Uh8 zh%~KVhlZkqMO)~e@8}a}SG-D{d+j7iyyV=-CR1K7ii@9iq6trCk7N`S6fBjget~AKS`yOvcurtzeShFjqb-5cVt9F-P*QIV@Aguo*qW$={IpUyuK zSm5uEn0CI@LozKIAsM%p8@=jTD&xK_3YPh-l{F zO801!r)zwCJh8AFOuW2d&W5e|!bs2iR28My$PIrc=d4@x)3Mqm3Wk09!nB!B#goHh1K!Um* z8lmD+^ks_&fLC$I$;r(+Pw4j7%b=@6b=)w{ex30EN6QOh)v1jwR4KqGBt&iHozUq3 zzfXre1FcYc?dq%0;2}EjlbKmUQW9LLwOilQ{XNQK%M8pPBL3#)f6;-}p**(Kz&xP= zXz`IeIvNX+nmc3C;LNlN98={g{6Q^A3m#m{FBN_Y4hT#KTD5Y~+#<@$@PZp;vx96} z9n{cR_Buf3jcdhN@>@fC5FcL4(UHT|*=G4gM67|57=+-A1OXL;GWfN1sY! zu{FL+{1@(yU>&&J?nkR>2bdTb0A?|;v&XPEU-&Dg^VwgnC8L4F={fYnu+mWS)x+NM zP~4ckzwVWxPhSlDMz>lzorwBBqYvPlHuYQd^S<}qeAd&?VlTlC{BH~Dl9}MZ;*s{$ zK8Bh|y};*{f`wUR*P_aKOCw|BR1QNVfDuiuPPT-B-85dVr_Z{~Si>WsakPLmBJ-1* zTXX_=g7)uEB^JNx7xDi;nF)u!&n4aQdjASY3%QMr886-#Nue&`2RBeWsYk^Ejrv<9FE8 z02f>o7FGCq-u(UF)KaBKD&X*`vRM7e5n^w?2poG+QG;;v>lY8O|4v2zxo7`8gH9+| z{?kG1@c)7o{k9KqFctS2^U!attLSCtk%#sJJdq(t??-)YVq(HPNDGJah}*L*&e^eg z)FtK@I(q*71ap5Y;fCVjv^9bf($NMoLHC^Nf})$7o27E0rZ5FA@P?&mq5T(3OyPP> z9%G)~vS=j%B%;g@>HrZE>;Cq@_c^}}870W4mdXqU($Qo+_jl)F)YRKzlRSCIYKkcG z58w!Cf46dwo`E3*goj3%3zv-l8{(U%(bCh$XJka0Q}~3aenu<#^Adra55Oz@2V6|T z^P{T3Uc=qp{dZ?4idEy2h1G|=9{Na67LBC?@A1%PhjekHs#nsfK#~cHu@*cfbp^S! za#<5c|B+UQCRyqKz10_Kug)PCPfLXdKocr*Xtp^vgxg^P8CvP4&hecV++>}Blr$Pt z{>vWGvj5I6Q8(jhBqJQhkrDxo*wvF(p%wos`dW%IgYcldJR2%XD9KEW-CkDI1q)?Q zndAeCD@;IAqd7M)SuJA_6b@ez#CoWx;shBD1m>cO3`%=+m0JRNqU{+fE8UW#EuH|q zX}u8zu#rd&89AwY!(=WC8%tJ}rZZEruplz$ueW#>Jmml4rJ7?xBdL+Btui1r$#S@2 zIB7F?MTpgHj7UM}4JQPT(GT=Ofrmt<-k*(G9eU|;4HOOL46J2njQg~&mGMDYB5RGI zp~OD~^$E0em>4bA<)_}olMHIG4{7-G-zxPVU;3H+ zl+c^Ef6z*{F~*m0Nau9xLv-`&uwHQ()FnKCbn2>?24f$g+xpaflY6!?js_^j0YYJ! zbQo|;cHlq(T3v+62U?z$Y!JJ0*$+&nLFgJ1CRO7jCN{9Q$v|4jvxdK2@>LrLj(*$K z*c~(MpzY{Fc$cvkbNw0oeAN3^E=8=6;&~B`d{q;xp8KC_t=muXb4lP4wmKG9!|oz@ zaOd3Sx8)_y^de5|>f1-p;BK~RfT2Nl$8*kGw*$7iNtWO-G)QB$^jm1<58J@wvxjG6 zzi(ZJ5tKyEo}R=gcko!;Z@hg_kt;_51Kc(ukq8Z5&1vL2t3w+_*)1LUhs3B#2PRZz z!&Wzcpu(NV4~cM$prjsoJSIr&{WfE9Ra=u*xR9KDAbkH%P*{r24aS-zQ{vLY`k})G z-sOV&2a8Ikwwj^@S>Xms@4-5!!ByJ}t66akSy}dTc9S&QULu`qVntL1wEUlsK}e6Q zmO{kz?J(PULQn!W^V}})7jZ zglVA_Xu3;g5WME*eFjC~uBcyf+!7blPe_0KZpUvJc}js47_dvye|tGpX>HeU^Phh3 zkc4HWoF?wF`SXI>r>76_N1n3R9Cp@D{IU|Y%)`ldtwr_* zDDbU&{Mrd0-Tw@`b|!@T$48ac>A?k9UA=$e)Z(o=d{?Vqce~6k{W(Rz8_gx2LC=cy zsAsj4lwItVK%7V%?8vHUe9;dQgyb{^O37_eGCDjao$|Yt$%hL)e*K`<96#04djBeV zR}r8heCiqg?OR5+d&ArZ&&u(ug5YhTDib_!m>6cigWaO}vbIx*XXIu+xa>;BfyjQA>dLE`6KSkdmeOhU~zGAZF_n5 z#M6;^?*4>BQILm{*mS2qs36}EltjI!kE=$n?{u*lAr`dy2$qkq%W9vH$gF+@3b!Eu zbOiMW2L{&vGMBFcD&@N}MD%NNvWlMGJibb?;VWG$C(=AAiqUM;L1#I8hMxs2F>h@N zma3r$gAbwz&sK*5J}8CHnnYRbmM7TopL_qPvRJ=KvaU2l^e$f@q(j`w$_iTG!XZPO zK1=USgP$%e2?T~2p-b0*{E8DUZA*U&=zjCH4O_!5SG~)g#~;h_Qf(Pc&F5xW?(|z* zTeEH}QD2ji#)WJEwv>{R;?)P`Q-9jV*-_KiN$ionTKbr{tM4aVZ=-z!M;J;`dsW>^O9{aE0k*#{| zL+D>G>2|$WlK{$DH8ny_P0hUT-zyB-`~%tvTIrH?+H<5LERrtQ2O)z1?{1UN?72K8 zC)YhzRj;*(1db?o{K}FeC++7ss_iy8ZOlo z&Rn_z6&mB1mNrL9PfxG@5(8&DhM%hi<;a|@aP8+h#S6v8rOUnftgAJYP;bHs(w0P#5pp|L|~9a|P>6eZ7U?tcVlc52bXt29-R+L;;?t@>u(+y{!L zD7hk>8PFmBFo^~^6dm)@ugdViw8)DBmG-2~sUDU{V63T@&>ayL+HUR|Tgvblgs?Yi zAXkEH@@fwUxCywbZKtis;;Il9YFhPC*WYv1GE_FAaeC z&u7x-pVaXMIvWl(n@tBFQc4y;u3(>>9Gj#Ir06>Eq@4OzpTOP3f*Ovn!rur!K$4ot z0T5&nInLCfX$@%dW^e*4u=>AyEzk53i4E}#D)}4y6L*y!4ZX((5~O1Kgsdj+PBn9G z!u^7(bloi2a90eo*qwl-U`|{}21`iZ>0uWyF{b>gT$Jfs#R*to2OUR=`<7=HjQgJ+ zk`aE+8$R&uKk9~z`AcnA-_EQ4$aE0 zTx||kR|jlMLt~ez$NCKJ*dx_}pWs@c2x0Z7*sCA+=(np!u0$)oVqp=HgOiUA;29>X zrXvi^L@uLxJ5{gAMiFA4Jdf$;sKSNUZI6OU|a zD|=M&P77~Q0xFTojeB?ox-xybs#?x-Gv##k*%8s3L zzb`qyz#$#`@7&8(BsFOpdR-$yJc502misj}M7*Fb0%)iRaIQPJ+hP}%rk|d@ZjH8h zSu`snr)pl=4X+2PY4VWNQVTMyrG#+m?lF7{4N#Wl+S77LK6ZlBj~S^5pcyV`HI!nR zahb}3tGuTD6ea8GP{EgQbRU>EjV4aM*5xFYJ3kQxwS$JBe8N?;lElK%4t#*Ek2$GS z+bn*(=Jn#k3V2rY-!Wbc4QKR!=KG-5Pt1iApZFi1LAMPA=Pur+`{EE@Agh7Q_qyw0 zb>fJ{IKiU?u_7AK7Jt=>hN)6l5HIrfKzvW=mZ&G+4PC;<)!j;TGqxyjTGsWl}Q-j$-28eFN9N=JP_8wWo{A zeK-^g4xzy{}%#6EDxUq-tX5{phPRElAer0>s5 zKflbN|A6fq`BJ~rwPbxR)4r6n14=KLcv;h#R(N=L$}4+n!06gWC_#Bs24oSu^1l=s z(AD8{U?*hKO^3i`k70pQGCQjSxIzSt1{cnPBX;q zMvp+(KX_)^OyxRf>) z8-f@=7baLF4%|R;()Tui)3}Snab3aZVhIi29{|hNJJynxM%8U_u^xV0o%H&Q=F#S& z(t5YQH6p1pP)f8;71H?bROO*ZQy)YO^3%AR;@8GDs6kCmHiuaS+s5(#_yjJBVeS-@ zFQ8Zn1D_Uf7{kzjH!LoeBb@baz$;L_-^gIV>0XLj47G5fxb|@Bx5a-$u^dsYbl^7} zZDtQkaAdHRjJ!rIStK7@tn~Wm#)KqV226@avD6Pxq@5DDBELafOHM(7R#32hT>T}e zJunvWzX?4@S8Da}nmX1w9d>$Kj5I0)kE&?}Pe3RTQ*6++uLpWK0#F4ku0?rwitJj-!1?HA1s3?WLL*`Vq6Y~}`(&t}{I z3QIX5G}77pb)>=K70jHA1SRU@&2kEo&sIF5mhyS;*ZbILWGxrCv(Mr6Vpn06BrySj zRE^D?mNA>mGk|lM*uiq^^vP#?KA~|K?6J&u-jqhv)*b}uj2}>g1hl?n3u#|DM%|@= zhdehRbk>^Lb`@ojpzam(s7o?Vg#t_#%wlT0>$Lbexz$G;22B-btT{BZkvjq;4dyH3 zOQVxo(5M=IuP0DXAUD~SF)iv0QE$PisVShT0Pv*5$;rt8J$|f!Zc5+APWnkLg#uC- zzZYL-nO|h;veNPjE5uNfFxZgCvLv&!;li!=O__WPS%?UdpXi>agJHR&+hNGWtE!d9 zyvl6Vd0a7_q}nXa3znRowQ{wk?k#}1l-2$Sq;|Dk+?ORn+QFewYiC%hxc}`b^yV8= z>@`%dHsR&DTCBdFYzFrk%=SljvE>3i(`m3xk?~4(;a~qlf5K`MX zi*;a9zpVo4g4VEKQ={K2o}<8xpVC(MPp5bH$}b@t4^}Rgcrh}eRNG2*=x1#^+~6vf z2&pMiwwr$v6a2!$l>^;tB8q%>T>~l6sFmuTKUtW6I0D5A-~-dXyu1Xx8pclp4T4|O zFvjuRCO5%8+6f4IF7%9dyl(UUEk$m+At{%nbTo&z_P1!sC3ZUQG+}wobZk+&PrWlZ zTn(ca)3?|-V3^#*Gq&47NEma6Z9U{ud=C&1-segm?$s<*cj-jGMB8t}c&4sF%Zc~C zA48+ztwU;!GZoQ!UAEfk_Uq9Q#hiBSp2}i9i=!qVYx%g&e)%S{Mqp!V|E2?Y%|~dzE2c}!$vrm9u$KnPLV$ON z;(Ds+J50OLT^~8&627irhJ!^#pL?*)dHp65l>_I)xV_Y`J_0eUp3#*J(VyNKwb0KE znrV8~cPxihiLP7Md?rFn7*+Uw!n=j!dt7MQo2}hYM^K*IU`)oaHxvUU})7Axwe;il6 zWR&dICwN4Lv73RC1>*k5g4Lu_gDw%Nw|k?F9A}*E)`r;4>hJSAjt{J-Vi@S zyBF^4vh^gEoF*kqV%s*gyXH!1n(l2;Dn5M=a)f}z*p<5NaE??dXADjJl9M4yt$*UO zl99yxI(^fGev^Yk8=_~q!&}xaU%#A=C;u}GuuM0CWB6UlMqs!2YE2}r>r?P6o80?i z+vSMbP{|N+|EKumv5P#iPB-{}@H0_GG4*B_jrZv(O(<%i5p5YN3Q9_RB)5_|b37Lk zz6mqUr-u{9tb+BbApge=*?~k`Qt}BjqVfs~09}4Qo;HftvB1UvB`Cq!r*%nQT|RXG znO=?BVhfU;==2ezI`#!?oMD__OOD$dk)9QONo}i=|0t!wUy-})ss=2VWY@J7#TCtj zV?JcTj6P9)G>6{B5aCJPbz|4(UvGs&~ z954f;Ng{s+ZSUx)I~kEr74vNdjEa2VmvRGlR!RWtNSvQ+T3Rl2nY%g|9_SaTJMwFJ z!4f(>&895c3H&;2hK8}p=MYc}3U0)F&WF{B*9H%lQ*r+lNhW?&MdONCW_rch(>mO0 z(%KP*!C%`P)1$KEHOu>(mR?5{fJK3;F^_N#&+CM~IGRtW{TYZHxo%n_Il(fwVll}s3o z;Y21ZD^fhL{jeXjF|iW{fW>3%syh=OSR0!A;ukcHpGH!aeH)=hujUXJ0uoo>wzj@# zz*EL=uHLsMb3r3I6GelGO}U_#ll&t&wjuy~3z}+| z4YgWW#2C*SY4WPwdg_zFhcj`vJ*HymeZxMpd@nsoQnkwG{AVmBxJ`vw&uU~geI*Hz z&xh@cuPwZJAD63NT7VvcL;e@E4d8i*jGK#T5fPKb^<#(S&OFsDg&ye6X&7)G3sLf) z@CvM?TqUFWvX0~R|D#erUL5HbXxNdB-WhV(NIi|xUz0 z{AoPWDp3lY#3Z&b==27B?n4Q&n1UA@g6#ba|KW4R(|7 zFNZV{o|l|(I>z=K%JpAc%17WJ3#2UUxvq-VN>(IZhJomeO{(~(8TglC&kt8c1Xyh_43_iQKBJ6!NjQr z(V&S5hte2&7v9a?vzkFT7Sdug7^C@2z~V~XN)=-!9#Ym5{AYp2YFkeL29sEg-~3EAYc7PA^~hck&>HpPgr5JGclrxEZKm= z_?_Kd5kLDV;wVdYNxt=&rY3<

      4U{nFQ z#pQlQ8aI5DyTtuEocZTCzM+ph;(>KWci{ZS(Oi~nmd?hCZFlNPIQD+(dm6#4KVjWu zy<$nX3Gqd(6ms@2Eg+yxE>lT}sR8ghd$3__T%7$MO9w|s_)rX(X<_9p?cf41>7dPj zdvw;-P_rd4f`7azq#w#zbnSj)6P7$GM69qwbkjoJ!BM+4*#j6$*r00pqg$Ubo9COh zkmD{wc<(`={+Dd478u!D@H)oM6m%{+uLn#Vm&@*CQp$&b=i)_hjzndhu>hKb?&p8h zp0+&(Ns{@X@%WyvR~cAJMnV{ z)R5zpN%C-&<%EPD;H3I~9=h z+v;X4OfCM+VqGX6G@Y(xyLHGSy+sGc$%ttRTMZZZs+NedmqqgdE4Y)iPcOG0@Y4<_ zKgM>B2A2^rL}<%uU|9f>)<(LJM{BTAl&`^2uM!&NCcQqIE~KC%F;XbmXv%1+`p>q`jmZEBk1QIV=0U6*(>L* zCy9N^CQ~dkmm*D1yN`Y+y_y&9kk$%uL2OZ1xnaA*mp&EDoLUg+_wJ=!+byF>J>B)^J6sbiL{Vtb8bfNhf1f|?YELpyuRAJZUQ)b366;rB=QcLno3}j#glI*w^ z!Vl9N-$0!E>SJ)70r@8?1A+)rDM0!E%~H5q&Na7zGNd@?Sug3u`ftYTbWzXUT>Hsf z_aj>E)&o5BU_*}Y*UuZB_7kv_wg=%|!o7$CGj%ze)_r?t+k{QK|J+|90<{G6Ex}|5 zZYvnAAm+2uD77rB-)5Tk*eT7PU8rEG?q3KhC|NS>i99*LJWfPw4j*gMC2F@~Ywi<^ zAyTRuX3JUQujZHbG!xnvFhTVtKa{CnHM58BU?4aKc9ljcI%kNga&8=_1u4&F?Y&C zp?@dL@8f|cMtJjY*WPy1?88>j2KPfetS;TVDW*VhDZuv3i#ifX#P*5V!cywkIIp>U zb?D8WnF|10^WH5l$fUDgv!}xL;{m(HEbG%T_pSG(KAC?_$=;>mo7=f7gJ}IA=;`X} zb9PT|ft3zM%a5(oEt1697)cXm_7e{5E24)EU7V=;5Zi8Ymrv~2<7Ng%`s{*%PYL#|jY%Dty0O&6mFli3IW!^OxEQQ3QR`@5X+t}|TvHR+- zt+W{{&WnypJGsqZtB+AzEy2Uooxu8gz+druIS47UKmrOLdsbfk;(k?=(6&4F5OS^6 z!K|U#n-wOFU>f|-cN8V6o~*%58-|)?oj1dLHp(?)GlD-@x97_+l{0VozvEnzAK2W4 zb|;BU4$}ds8kIG6f_!+5p?`Ad_|qE+U8-B5cBkI=bP>$fP@D$ z3uO*`UDC~0_`SRAXJDik2FFNx0~gA<-LH7veXlddrgzzZT1SVk*>9;{t=yu;^V#G! zG{|{EnVJYkTh1IVjo67?`7e%yg)(!a%ZkRnMQ-_^PTo zYsZ$&KZ=ra2QP)2z6kTyjg9G7qKv;BtcLy73n$~h)EQ&mm%m8Ppm4cs+(g`G+me`hYj&AkrG z{+a2?eHMEnb}x4u+#%)GO%a%X_M*{Pxn1z=_q-@d#|hd9vu7$+6#qpp6T3?D$zkic zs3{gt9bZ3Iszo+vsw+}Ghh{upiV`hqt7K@Hw|px?n9u?{`fC~?cEW!-W0%0eee&k@ zVZ1A6rVVvS3{op0fxJS?)0DAXF}T@PUa1|eI{Q*Fx#A`Y@u^gKKQ$?C&UPD~@w+eV zhy9}nbl#P7v%vMB`+oExEy^>7(@k`Axu+Ah&5x>nUTtrWOB!cLPuz9K{uGNN@|Hlt z@!pb{mo)}3Oj>VbQCVcX9tEY7-hEtLV$#r#U;*?&$9HPCJs#4QJv=RW|FK3osjpt# zl;`G8Icdnynh449Iy(v$I{u+>*RC{lUv_KyQq>Q#H@$4?4Ekuj@X>4&DfE%e^gYUJ z)Mrcg7NO8+k)QfLX52nP0O15<_V}$u;uci=38-n0U~PAq+`%ru+9F z9AqsRy7P@>ED#h815Ja=R{u;oJ-vlp<2x62w)Dy*2k*_sQiYfj9TU z(WhIMc3b_}cit?$M#fZLamI&I(J}0fEa4(gh!E}Zy6)~penZGxZUpGb`f_*sxIxQV z9|)@(q>R63AD2bD+$VQ-1i!bLY2KWAQo5GIIIDBpmaS{e)%6;=K2Jf- zs|h(4Os9SIq8#aniRFmNz!Hz892TdJ=*gU26rJ0El|5IEH2LRQK(R+ERgH|YB<)Ag zk#L}}`zXyWQw7)I?$zT7Sf#c# zPX2;-c)1hh`vS#RyHyhLXkafk%k$-?Pbr^wp9Yj$9eJzFq29k5?jql!uT;MV>_qnh zx0k|>rNc2of@Hz{L8tSP`>yse_GP))+lxpuXy>njKI22(vQ8o)*5*GyFFb0b>QRJDyvtf+ai>H62kg?}?NU~v=-sc`?#oQB8V^ zl%rtPolkKNu6~|TFG;(g6J4Y6AYn_(+G&OD@XV`G<+gdzI`IR6re4Nf*1?S)0`?~d zk;X<^bG_l%iTLKQKZks#I9%>dQ85MMmlra@F^t}dHsqh<@y*}yT?s}K-TiS@9$c<& zK0ltL2w;NvK0e5!4G~K(l(s;vj zYc;soGxesdMvkGzKM63CMDd8d99S8T@xHF9Rpd@m=8UgvUn3KgFvHZjT3ZS>sBe_B zZ|js9b?du^%QMA8WTQj%{ImRZn5s(<(=+{Dv#1QOXjnzdD=3?PH}Hr2C?=^1yUF== zJf%{f%%I-*I-`=jz*V$v%2CdVMA#`KY6dk;IB5x2Q@Qkbn1*D0&2{njMY_B7jIIvh zISvuT3$K7S{lr!#LOo_$ig7l))eM85MPYocpGD1RRx6UDqdA_Z&I2K4Wp<|aqjA&s5`UK z+@SB!XjD6JPIi3uy;L}Bb`vvUCVDDx@{6#c#YA)WdLdo%gk&~vdup{!y`qK%(&BUL zx=i*nkKq*ePvf)@);^^vU=--V7UHS~m#=J0BDXldZgxI><$Bm6OU9i7J8MFAX!*{C z6|jPj9WBq}e&QD4YkL|}iYay+zZwYqMQ3IvPbTs~0r(#fdyOx(%O$73=_?R>pCumM zP4U4O4H!a(CO~gzYhptJ@|`ZJGj4-d$lUOwzc75bxWjyP1$noV5@=93`76ALJUt9@ zG(q3Az@uPF;Y;Jbb~#(OT7I)q(lXMdo9x%m&ey&;(00clb$lfJTZ&Y}xIE8j6nEGA zk}BTD8)FpjIrN%W=3{MJmHC~BX7Tw|z_{a%_st#}WkEi%!dvdT?1(QCAtm9VWQWlu z4hpeBsi>qrPrJ^Eg6$7E81PIey67cpKIhIWZhs$bk(~S*JlOWJiaVlcw^S16P`$*c zTQ{U8FV-8Noz{#%7fwe1e(L?%S&w9(DZ^j1J z*TXk!SQHx;%A*b@T#*Oc1;KPIe4Ok(u#5f*mbdCG)LggEusq%R2XhB5e82jfGol>a zCtMUC;B1qf-5#)7?Iqab2-3Y+sdDW)gl+k*2P@lb#K_dp>oa@6ttK5D!q%U=T!p~C)B!m^sX%1z+Y>AL*7!vcqFAsKT4bPlSBqmi zU#68v#N965=ROg8Uf%M)*^n-JUc%qOcW|EUubQ{gF3N=QB#tkMkA!o$(pp=b>tV>? z-S1m{vA<0WgRQ2hHaE!6gT)&nYz)-LJ&x{9`SHIGc0o}#?(vgM;?%PStJ}$MbTT^p z0(t6xJ6ViqEb#?|&0*2IV9sLP(}^eq|0dNrNex+$R&$iEIjZ4ZU6(*@D) z>_&YKn=X%gYK{s9DVIUd%IFGe8HR;9Ani1|M7@8Mx_Y-l7{1=}rcXef*RqkxmOzYX z1Ew;JAW-bWylz_O|G)Wm)BO`i+xU>BVSwNiNY65U+*!5hLtoa$B>j2|b^h z-@WmfqFI7H(s_Ji5urR|(SQzc2je7eKYY#g7>(xI*0rTNEyJ`><_iLn<28;(N}bf8Q7>f35h*Yj9XxlPhv_ zBxI9Q{T)dMfcOq zXPIvcQjBFoBR2vRG2Iwf7QD_@@YxQ0@t%$h(6kf!q%@%F=gk=m6uKxWo^h@*vxBfM zAM8EN_z9S{bSPShU3kS?Twf{!$@-sfV*V zPS4P+O5)kY5N`I&{LQV-xt}M+5A<5zbvP~zuJxbZuZ1JF7=pt;)4$wqN@v>R5s<3I z-~a8P9dIK_XTHjwa3wrg1`KR0m{^2_yMWdky5AS^c;+uk!-ZtNeeZljr|-uG@Erg2 z-vjnM`u&{J3TS4U4&as_Ehrw>0X|0Q!0Rubz26=|lR_}vMKV@M)5g+Ng0;@PihM9N zH8#NrVlN)0xQRFz1E8#iUELpYklP4L&1io%J!RJYiSFrO24F$ydHtqiS)S7v(KGK8 z{Jpz{17wGT?e7C+D>CF2c#bdVq`Mq1-g~IP4$nwGWbU(D`c+OpBeTa>D5pE=R{( zgd9z;HfBVf1Lq8hPDpwy7g~OM1O7mpRP@tyZ1EN(mN0gqSm3If`@*I%vryl?&giFa z4=n7d^{c8+FR2|gzzlN6<1rcDkIQcheG7MFHK|WjzrN4<`SnMokLTpukbZLclg-|j zUIzj?44D#c7BQv^EhjU|VA^Pf!rid4+amJck7xUWTZC*|LSKl~TK*#I-)E{%GvUXn$D@x|e^Qa(*b$SN?Z-W0?$0E&eU-C~z=M##6*mIo%@m#`1lB zD5JJsvR;_T&b6123Z(K0(5wE>TtBoPOc0lG1k}-3Mauq6IZI4FIAec)7OnaRAFohZ zcH)M7%*>EHXY2S&^jF3GHOnL;RkbGqpFRbYxk9|y&hnz4H6ANe;>O?P7akX4+ia74 zh6C<$n*G(S_m!VFLYD>x@Be~=MI*Jce#+RkZ>C-Bt-9UH69&g~HoAUiEKry~aqPOu zbo9(1|7z!)P1S?!?sGyj>vQAeGuf2HESl2gxX>bLZ?c2x3AoYvnjY7cy^kJ-lb#fy z&afO42n!|K0y9oQb$gT43V$7NTZZTC>Y>qo7;0PLQh}~}GmC!4le@q5>_if^C$6pu z`6Q-r^%whRr2cIY@mglQ-XvN`Z-Kar#bnu25)%4qxDokY?6QsTPYwOBXKGUQI$AOM z&XUrW7h`LBIiRXDf6p&m$sO0jT(Lzo(mhB$8)Ch5xAd^HVBEUZ+Z}h? zm2Nk$dBSuVUDV&tSz1~LE;F&%G*{QI*c-okI$bq{%Id!H2!mez2o4sO`z$!8p-P~U z^)~?L%t~(F8|AC5hWKuf3hAk+%mG#ZzKX`^zq<1$?J5oYSGR3kphqMjjlBvcCO9`j z)KlvXP#N+mx_Ob=OKS=Yl@4v@7aILy_$+1W+$yN0?^JKPO-7cNJ`_!CrNPbc~s!me990?Dsk#&tF-z2ID> zug(#X&-L9WI8n`ApV8hj;2AD6SR11vNDxf9~RF(UtqOYQ+iq)_hc$d zNa;dkFvdMlLk5eRXrbFWV&72SN3FQOhXbo@$M|UPl@OgO$Eq%_t<6~cx6nUd9(LfZ zajixs%0oU0n<5+&M3YD=(+C7mBq3|QvPPMH``6=^6Gp353MOBb4GPH<1>2J*?eP9z z_G%M&8RM6<>(-ioKsfCupj0riI;aLh^|x2UOQ!UQ&wetbgQ|T6Yn4> zy}p{2y|}NEc?>z^sZKd#y_g{@QP**$5L!WOo<1ZW^b$+96uUz;wI%dOtsXTxvwIqn#q|vDJxXut6Qxcd|83(}=ju?cEf$u+tjv$5rM7A+ z%Rji;yVk?MEw&8uZ+j=s@W20g=$SKc(T`^@J-RfhD70}-)(Pyj1EBW9%d>L#v+Lwd ziqRm30gqUsF43+_5t%X4G6P@c%!NBdE50MW%&0ZXIVMHte4BlNdjk;rG;c=L6|OE! zLF?$Bg=07lzuT=7sWRl#5vZMX7htdg6TRxa(RmiU$JSuZvM60Nd1I^vx7X>a_)+N? ztj=kTI?@g;`i_Ce9Se?aE+^hKUs?X;_gPpxzf2b_SR`}%kZ>?Z0zO{2X%Sty7`!05 zI0#Ixqr3G$r9ZR&DEq6k-&b(ift;Dvi&d%SeWXcZ>A2|h_!D-kGKvO_HPz*XxN zll!bmnBPmZL^kvbdmGIG=ee9)jSmQNEJ+iNlRLMHnJq82e_ntoZ?T>?}n9K}S zZyQkt_xE1ym`9Di8}r5xY4FLp=*7kLyls}UHI|mfbD!1hZ+6ut>?CARazLX@WHb!6K1?q?Gjg;XF+}DwdgIs&_P{Bqwj50c*$vLH7-WBiWAddv`-2cn_MoJ zr(6L*HK*P4f_q-;jZ>{JWc-|HTGOLBDvlg$Uwez@loz+*!yTuer} zuz$8RcK6pK=!O#;jJGkLUrVSv6je#16dN6rcm;Wq&XN>-v)b3l>DJjlP;pzH)bhsg zCRqFGb%_X2jj^K?+R4030PNp>puX2r&JA6O9#_b_o3)UyWg3zi-C4EDqA`D{XgJ%o z?p|w!i1@=}AM(@d!^w(`NvJFLc@|fIan1)VFE(dRP+TWqMJsbX6xY^Ahr1q>W&_6~ zG$i!roOMOXa*Zp0e6>_*zpf@}Hva1AKHl-RVmi8KnMch1fZgT7BOo+3bZ_em|1Jfk zI42uJKM|LEcSx#fd{aiF-T|-rj4{x4>nZH#90NqYXT%+N3f5!(0nyE^`Vm{Ai*&)5 z+82xGB55gQFLUn1JU7TGWSsZ!2m8*Nr*rgJU3({MXAA-mN3jF(KTy%uZP8xlKY7bW z^@TPz@LBw8q}kVhnjTXTzfpd}Imc(?N6N<*Us^Vos5Uc`u|QcFpKc{DJ=V7Y{)>WC z?gk;5kq1= zz4C!glY1@I**e~~msi#rwL#BUK0iyLZPqJTnKtJtufe#3DlUDm+IG+i`@KbnFE3BG zenE#-;j%JLto`y0lm=@!vh}4rv9rO~wy*k>wd^q;u1TpLvDcvQdn+&f6tVcVeMA!G zNz_aH$;}0_Sm%v#U&9^psE_^YtMWI?HyB}>qUQhZbUK3Qp?_j%pB1SFRBisLlI|dH z!%yW2tI_XnUd^F8glwl4aM%q-(egEqkkaj>{+seRGN<7yJl2qich>mc-_2z$ni@@S zR4%4?5@i-wlt4yVLEA zeEjca{IE_st`<3rJKKWx<0$``h4;1Q3VrhV6g8m#eWCOHPIQ&x5Kbhy5{!7bu_`&2`<4Mg1ZNI2<`-T=i+X` zA-LPc-QC^YLU6a>!5xCzzxm$#>uu$3rGUCSGuzX0x~I=0Rj^mHKSQ5x&!AQj`q(c@ zY)qL{7vdLt_d<&WA6ZZ`Ch;pTZJmrRoAge)G%KF?YZreCjxI`9j%f1y=IA3<`BwT7 zVV6=zpBOs1R+86eo8?;nl?L+$-&aqw3ErpDc10SXy9Up`IRdGs>i~B7BxC-m%R8FE z((RxRMN4Jg!|*fg?~7g1hZ3F95N+D0$&1jx{3~A(t6Ka%j^L&2@?X%c`dx8=LJ8he zeEACx2<@J{1U_+!z|D4WqWPRjqi1Z0v8Fm^KTDsM81Po2v`i4LweH>KB#fFqUZ`{r zWrAn;&Xf#5n0-&keZR2P9J*$x!p!?De^62ipfRQ45yjB9N*rtB(rWJpP~&3~)!=l_ zGyMERO{u9)byWe)TkN6m61pCLc+oVStyrL3(^8IB9xW`a@MEb^LKEj8CBa8DWwtsN zPTvB(bGqm-@n&{ zpDC&whbS7MMx*+DHy|D=Dxk2-KY!#>kqq>xkH%hEoMxPlBcUW_G?7Mv8 z9mu`kB)P}@Ie^5rVE<~(ca2t!-6wy73Fd)rQS#YhpiQP}kmimqhjYd+dcm(J4rnWL zb44Vo(GXuK=LF;lY#h#aFs7@_c=KKHvTA|Hj6ZpgUd7Zdjna>1j@ySc$naZGXR5M$ zm0GPpB~e9v1={?3dZmeD-5|yNWq6=f=->f$g`jf;Y9~g}!Hw}ru`9x+et{y$zu_0f zBwd1*dhr{|-`g8P{HpfKoSAZeWxLJI0~du$vy8*4bx#LLWJU2E5SN^ztemUtzdLPQ zsVi&o33C&idbTYJdz>uoyieJFSWS5M9fcmSSdYxv_=-8Pq07g9bC351D%XKMxz5ia zFWxeA(re!YzcGF8Rb=t+w_kVdFG7}3nJ25W#L9brhUV=+oZ{FV(d4vSp3G8ldNCan za=%;;O;ceFykLI9w!7)W2)n1u)%I*IIBOhUl>#Mn)wCQ=JXGo5xpJw1Zf-^`Q=B(i z*+RrD2*XtsYK!%-bO9x<-h`;!M&7egT=MP8b*p=0!-!}qp9$#TI6LgTaTMVKdR`tDH=9~3moUNV z#5v_1nK#2(Je499u;OsWc!!f;-SFGl0?yfk=B8({D9vjLzgO}V5M3xlZkTL0KILjF z(l}~G51KBpa!k%##ZAy2i{4&&_6eO}9T#kI_6H}Gm-3eA_M35kH_+J)$XMKWHw-n@ z{`lgV7sECtQ6ryE2E{i}%b6>|vh*E}6J{R26q4m={xD1pWpTsS(l3L^rHvh$b57V6 zI}4wYx5(lY+Z+}gkdh9?%w7>FaeM>06C_0$hao0E|+K*~l9Xbz%lY<7ijb_L@Y z1SG%68KNtm^JR?>q-f*};xbLFxvd{%9Y{z-O=0Yt~6_8$?^Fo`RTl^~s&JqQ`+WiGP zdb${e5tc-ksdSJ|8ka0zh)*8LP}kZdeHbqPY~KjaR0e(+f*@=nXP^p3WgkAoQnu!U zI)Mmq=*~CE6lHum*P?2tIE7K8xHZ2Vu;^s|XDhx-8k&4{&<+pUWNjBy^E;B+6W0)& zUBGxOk1aI_coKm)^{Ct97xTV7y|;NT#LeLwWwD?&kT={rTE#QHS@%kHeHE(=_7U1r z?%I~GQO~oMOmz2(*zq9xyEwxp{MVv8 zepvlIV-l-pL{ZiMo^4a|f`D*1mpJk1QlDt`FN(gEJ5tWV0CkDpBy`i5gsuhQMH8xY z#VtAg9SwiSH`KBIMu^r*iT!%3w4-c)EL`G9RfS7^oK6DtP7KvK+KN@^nW%K$N2w;d z@_j7i?1MPefSwC%Z#VgvJE)hQBUY}7L`U4vgS_AIsG;Fy%|V`(^X&$56IW$KmZ8N0 zu)i$2nJxats@nNM@q(4!zj-xx%pKE=4>Yr(j!M>$KQ5(H1fOZ(KV#jVp=if@5k-E( zQX7q#M}tnbQ5u?5%!nRdsu^Up-AF~#_&0a2#9lt5fQ*h&O02lNqax(;loUno5rO0W z=>nv+0Ov1>vD6Q}wvFo^jM?9OVP)43Sz!c-%Jq_ddmp#7c|Ik*nBI{8ZaNoyv%e|u zc3L&f6!OKRTEFaA_e1?G5l(MSSzVbV4T65RQyp5g=cIqJ2<&$^{|1u1JvqK`We!Sz z15}b{id#T=>#zMDh$ZeVKx-kfftaenX}rBN#;@XF3PE*xN2R2}sSTxv6+Q&Njl9b_ z58mqZFRlCW@r{N)f)n@Z`idNX>}!ZH#;>HkeZjx!%VnNvmKUeL98tVrU+Q>j$6WBj z2a7C!3QQ56hEgXgP>HpPi{S+;{))Xr7v%559k2lv+zyuwrH&Qh(Xb{_>=WJ7!J$bY~C$daDp{zZtMQ9=pT;ek`07 z=u}whlyvh7crt2pLcc^a-um=TE)M)!_kS9*P7yhaAok{`v=(abfUR6z<|8M{ zm)kD{f&ph*GDAfm410u++M8Q9R82&2CzM(va}g)7RnsJ8u`^<`+4Nh z3W+K`!*$F~9!@FJF1nI-(>0u4d?N?-B0<>LNS%W8St@>$M5MRs9qGqtrI5&gXU)}SFqfPCWh{SsiYiX}cqgzKvr%pSo08JKgfH}U?MSmD zzW95$?B$dYv~<7MZFH*DiA1|>pn{1T8;Y%O(7}m8pRLnXxMhmd%!6}?t_9LkQ=H0t zdKs1%RE9_ZtsO0MX>N6{ANh*}WQ3wcq-$-5Gc*17v%R8Wn7_uv$a)wNQrj$Ew6zK( z@` z!4iTEavYmJ&pj@fzujBOcIgI4xm+jHS}uo-tey?XUE-k|t8>L;Eu)7Fye?FaVGt!6 zs$GOsjix=Nbb31jTyq>-syUS@Wt!??e#senv0OJBfnKz!Hs-&Jin^NY@k34U{m{iX z9|`+H?MimBQ75#2K%+O7$nQ1p4fP8TsL!i^!qlpM0yGAsAzxybw zKm$j5z19ZVyzpHMJTu1Y0#?vFXi@65_(J3$7pstbZ!gQrs}-v-Ycxwh>z1gvTVA-T zCY?7C%5cjpUYwwIQK&NVGcj8PZguzLoy%E~U}uWJb{AmdW>7bltk;ZYGSYw|O*WN@ z)#)-&5P@^?M@btTg>?iFdM4;YO+zB@JCwrpH^X{22n+0R1?miFqA*_pK`FC-7G+HxivgrnQ^|H`p`}ORq+iWUisoF;3k2dH5U0! z--6-0?~mr_N6hq#kn(r!C4sKmy^^|(U%Ep%ffg9WATG`nFN=aaulb1Lb}XR1O7wg^ zMYXP|bcenIGzuSF38qXAGx>slZOcwM>M~8wm&e^t4GcHjUR5BEVk8fUJ|~GKRb@Ek;`oT#$3ee&w}TESlK^UBMz zL9#hm(PPeqgD#l-IT+w5cc-xRC;y0nE@$oHFgIk_LJ~t0*6^xKd@ua#aTtWGJQ}hu z6n&cyxePo1*3Kh{M4mHwK+E*(rqw2n1s@bOIb(FW95_j#;v>uw?EL#wduBXYXmK}V zJAZ!%UG98lVQD5{uTUH_Ngd;5&erAS_JSAhbE3vi^Hf1r#{z4!QRU+ zSfi;T0Nrawn08#(zMM@`K?&JniZByK&8e$qBK-4_Zn_1p1l`hon0Xe33D;^Jf$cI9 z=Vu8%b&CP}Tg9{T!ew=L-5aGloWRLmv2TnR0!_M^_OhUaQ@FF8 zt}TTP#O|r`;wJv~S6V7htH;?<4Ym&V;ka8#QIpnx`wUkPhT?Um3V$ARdrPtwoj^UG zxw2PL(D&=bNwvlSvcqR#!|3mo;%3L)x}qZ?U%awyj0On#O8HCTz(azd^<}A+Pt=|U zw85J%SXr@e*^@mR($x)w2Ru2SfoWUtq+eP^x=sVCfWC9pnD8ffK4UnM60czL2SeI? zdJW{NYkf7>>&o25oLM4LxjSq5PML;On(7s3CPUJvd^Vqpt-t}F4WZ<=xofUFSO`Fa z?7Q6AZe$epBdHB09sKv?}9#zCBJbC3Foy4u3v4_eAg${WKXlQoprXXs$> z-Ydve)4(9>Or&pF7&=FXJqp9T>Ak!kq?2piqbXwPYEMCYd@#42Kq$Mj&gGbSJM+=t zGCqVmnQX4qNsgEXOrgZR7HwW07;><>e4|QlYrW8zXZ<@|&6*uoN&+MMn>S)tP3pNh zu?Xv$L+1BUDL2{O41MdL!ho{gALiwdfubl7K_4*ls)MzuW*e|0RzrRg zYak0|%hRxTp&MTGtg*Bqxfk5F&k>T5t(cflQKFK`%XLdFubVu{fF%gt` zx$F0pQzz7?-U2DQPXbJ#P@y|WyLaR6d2qwudSqW|&$y>1 zqWCmg!IzV`dKZ#t5!#;lY`=a=xK`Q89=FGufPF2fJBSE+hKvSx*wBBwfGS{Cj&c8D z8hO9UM5!grJj=lT?bfA}tmKg%CA-MIGcT-5(Mi3!)m~k6QGU0f_jl`0WWH7T-cFn) z{hYiINFm*t?1vmLH~n~76EA00$(9o3(!$Oz_{CFeiV|aL_cVR(;klU_`1oCqS06mi zH;z62EJPeGyzxU3fkU^Y1)O(yyK|~`u8*v(!z-m)`cWm5;VWz|LV$gC{MNXeZDIaf zQYNgWP}RSsCgJFh*VHC2M%zw8sj*ba!m)q&Y79OpPI@v|4rwWjy!43v* zYKf|u(+0jFHuc8Y`9+273ZE)r=$A78Ix?j@FrnCarh0AqhK)1}4IG$xgH1U~qSMeq zMbKQ9M&S3v)mY;sIDcWvHLG}*rv%U4vd^0SSGIh5c*tayM(gSKG-w5pX*b z8%3CkN>B8!TrZz`caL0$)zt;HuC5U-l)c~*l%OXU;SMYZm zz$NrVDCU+>b1bD8a~Hg_4Jd=^6IzeBkZmz4CrnLA2kgVJHSN~gP4_rfLW=X|+WXiy z_1ARC4X*&^EN>2HSp}k_b3zHuG;%ztghQ#BCIJ}>Ypyg4hw>)F zIx140@b^%BO)1GcX~+Eg7S3AKqzCfN&$2X__^Kbv{%!AjZH~W;waDq09;i_2-mkK$ zRFfhH4k!;@z(T~xoP?64o7~~2d8<*b%Nn10@J+!?Rt`cxZjZ5+92L~KR&~yr;hozM_`g<%z0Xv z+Oh!z`C(vK=q4-0&glEqr*4;P^VCB;K?815s5Gf$F%o&!R4QOuJ{7AfVX0O*d)7Wg zV@@*m>Xbx)>zsNP@k~2i6*^rfUjxh|@x_ldmx{WjV1V-D(JCg1mX9j_b#9xghF|R$ zZch*`nW|8Ih03!{WP{`=A~%W$*cOEM+Qc#C>SLGrn@1w4YLWZ?r~}qZRN`ZskL0o|6H&Fwh%bDjA*Ni zL5IJI#5tvaz&-V{&M9-ZvTL(f&JGfA7I`^cW-fIJ2z=iJhi>>)vvs)X#&t~-pf_>s zi+0_XW*qO4FnwPY0I)v(JUL`Ims}dXM19;`c;$2U`Yi-X=QEf7#xo2~j8NzpHFL>t zmf91+^(@j(dI@)psPlD}oikd+>w{V8RDKu+eR3eA7QYebg7|gHf~>9dSF`Vwg{ThI zfF?kW-7D@=#d2c}q@Azh9SMBAtLgC_Et?lMS!Ax1uJ3`w0-^$i0no8A z-BtL=QHF%fWqKL#f(``W4FGp_BgtU6;xx+s|Fr;+Z&6aB8-|TeTNeUKaY(Ft<`1;# z8oe8SElTfl^o;gUWo@-k7oW^~-lR=IGjI3&g!bhr>+p9s+F3%#uwq_{ zt1kQY=j4UVVWDP$(So&{KRJBarog%}Ww?+!N0|ThV3zt`e6)d8;A2hQk-;nVz0{zb zF_=ns*CsLZcxZ>s=&o!gr4GDGU`e6Xf5>j=g75H|^hY%aYj&HV%mF5HGib)qSSB9h zrsU~Ist=Wj4iKqxm%>>8078YXD|Wru*N-L>s+~D)N3B*hB=%plEqF=$r-(5F^}d~% zngY)-ol>dlyr0(pyr4d;r#o9BsmU2%U}Az}Yt0*eE^AnOLX)oZb*2-d5;E0KiSuV- z)_I9Vcjzzyu3%QdhfNJ%nGySL`4IuGot``M!_yJBDkQEz z4iSyrfnag_{qiH?yk} z7&U53qMnn#yW#`2iBv!XF@*`MQe9SPSbhVEG_j!LqH3TiH%d!{|F%AP?$?6U{PlJ7jyq!+}%9!%+D5zv^Gh*HmOYX=UEZst%haznn4y8G3i`f#EM)0mMuyYJ9& z8*pj%TZZSFn-jXGn(E%}2m$M$HZs;?jA_41QjkpN-r%4d(-<>ihS;(k5qLRLtEC}^f8Fd8)=c`5MJHy>If8cDJ# zNmC`@u7%g@gacF%R5j@RyEK7&7D&<_`6Fkdxq6B0!V zikBSjBnEiss=Jf2Rk=F+3Fia6In?yt<3J*>5YU!`KQv^7loa!DmbdRUh}0nwVftH&D>B~q$SDvfWvoTTx}w)+Qi z)1>a?J_Q)t6b&%jMHTH{`dq04+3T;De!f8$J$AygvDwp|V5NT!^_g`Cjowa?na8Mb z{H`3kEa03r(h39j`)u4QYZ0rD^@S3dC{f>6WEgSvJQ*k40{~;GCVcCgSe?bf8qWY$ z=vOk$bI*g-s3Me1YSQ?zq%>x_a^vAUHTIv7dv8C_w^#7Ac+=wWO$`(ZIle-1NWjP& z-ujQ)ew~%3;8n(y6Z~B+T)!Nv6-XIGa<|{$Qq{_5VDC!KE7_O(x2A(MW|9J>RR3@w zVzRK{+QxPHJK$R^hE`j5X5np?Qa&hf-A*;vUrpZAW~zK$XbVn72+`O|Eic)6ePM(; zf3+7uS@%CUCWv<-zo1PxmTNfc>l)Mn%e{#41Un`lZ*p}(fU|Gw| zJ}Seu74n|S+Rqs=Hz+pGOZF8GyoYiiOThVq(ZVW8Qj|QGWF&+~;asz@U@y#pOu@%^ zVzqrPzg$%nE^2CmJm+2X`1%Bav1ygvl>A4s4S?XaKZ~Cpm*_`J8bs@$w$40jH9OHMS z+L-z+cKJ;x|7P1j>GH~+bd)?rAMqmMY9r^K?1WOGy&2m}{GGhMUU*pFu1~-FT|flR zmB!r}42b#UnNssh9T9-)a^NI=o_l;8_{vm->dqzi+^zrw8ESH-P%_QLH_e1$g--;d zFnCa)-REeITTVjk9{9*_O)QEn3p4JJXa;GT>P_-LB}>t5t(c~?jfeRr!mR@cHLA{9 zp|U8X$7$0>Gr?&#EB8e7YX%277eXTgYeeRbLn2H$jhVdfAVvgFh&UCeLMKb*Hq0`c zbP$s`UB zf^UpAH&aCOzVQD+N&2N&rZJSDEU}|9eT0KnOF*aZ{`zos(*7@0vfsbN>eo5O-g^Aw z2LF076*r--o8s%SOiU(ltL~L;(Whi$a;=sW$SS7;F4XrUvd>D0Xh1yD+#JfL)>&~F zL8hBx%>S!;QA>bR+|J70y{zD_@#$WSZ{vt5lVjlEkP%b({v6lVx&yA8fGS1VVPSPX zz8a=YgJu*4Udu1aCorlqCC+75cuAf!o*Yf;Rskgeu5>sv&o(|#cLM1E?XC@xCkkc3HDd?_u*i z=Y95gQs$CeS8BtAIJU2RkQ1f9NR(0lPYQ*!9$jgvKlQZe7G6kJ==2P&%9=u!C7+En9a8OU+lP$0FIg!#3MiWsgRwV*?6g3~eg8v!Q(Gt&#%A^w+#YnCuk_ zUue)Jl449)lEaPAcs9h8-$c-+9~j+ZFxtG232JY^0f9Yy^Xzt6S;M4$1XyPJ_NN}; z+*KnAI{(h0thaR9Mr}Jsm2rd1>cJG1o7X|+y^_VdBQS-Q7hDWq@d zZ{(b3FKy@n%qQ=xN@3VQAEBC*XHU4WgYDEyDXUP|Jdrkahvf2m>b^!G?kO+-T&DXs zr|oP*0(99}&=>i&RfW|hS1^Jp;BNv(zQDn*4^5sd76(AP3;H7p^6hmJ#Yn{KcRyS$ z#PiB|lYSI&6?c zhSox3%)-QpMR>E(JYJd>ceIroB~=!D!ep41eSaa_-jalzCx#jDgSVd}(e?fg8t&7B zSU>XGw4O%A*Z4lX2ZKx?=xRU9lpEiMlOaPw`0nu}d* z?ei5j=SEusSeytDT6}DKw&(mCv{CMD>I=!{Wrs}1A>k2mD*0-@`3{a@!%r-*&ziT9sIuXTr|U6oP&9?++rjcIOv z9R&As^f9toft+2l{-AsKKb>=sv|LT@RgkF zR@QmG;V;+>d&Z1$G;c1sH61;diy4~AOOFe7gyJ&M>JWe}R$6I~*xJ9LYk|vdIBoN* ztIu;y{NA58+km!yqaao(@E}h2B{a~iaaB;rd>{k~Xj+$Xd$Pm^FiI6Mu6Wq&&lcT}Y$9g_n$}{IDW~@i?{lC8rTCUIvT^tJE$U zib$RHF>5Sv@<{mLnvCye?a?P|F7T9wq{|*Q{FYwYMmt!FnnhiHnAeLIZ_4{S|7>_+ zN~)8*X%5QT_E(yKMK#hp{`G*$gtG41)O-)^_$%o?2kXj7-ebb$13kS6e1-LmfRYMrdw%WRSE3T;JrRz`xWZGiRraB)=5! zF4Gnwd4Sj5bbgFh!#769(+sttBCa;)c^5=5EzLUjs*+~Fn-Og4T>2d;x|HzLe6Uum zmeXdr_jsYAtg`a-#@)k%XG#F5kslfg8Za`O%ANq)!*K5k{zBxU6x+tK8^gI3OG(1LA;4Q%!wYpeBnqYM$N-n!mRF($osAK57vwFR^ zg~yfl8zU)u75DB};Lhe8SxD83jneA7U?J$$P_H~nVc6QcDtl?s-qr8Q&X{5Fr0XLC z4*v(zT$j3(`Z=EwOi3`so-O6LYPJecrAJjuD`%m?b@ASXMgiEaO9i*>DrX|~!W;l$ z>TXWByVV22{?u0qx=Ff1hi0#3$ueKfOPwwTVn5EYksNut3}cfustp_Mk(1}e%jG-s z+QW%)MxHcaJv9}1VPj3-PvO&aLz>;cVkypWYNMbjXN@{sSR>VElj)FB-iZ`@Qaed3 zlsz!$^JA-tVWqCO$|1X4_vUATDOmd0V&JnKy|4aMEYH_P;~X38rRh!GELFuSot1A@ z9#dv&H3cb4UeyUX#2w0nXw7x{18Ua2!jyc*Qmm|WRnCitB zGq3a}#CLglUSz0% z(G7T9Q^=~i@4P$0P{+IK$)Fc(X39IVh_m3t80xGVxM0Bc?OPB`%&6mcWN2;PF|A$q)JSJvG0(2*>>}hhT za)BwKNiOATqGS%O6E!x@5V_0@DhC)Y$Wx+x?ac99xIj@YHS44m0b940EPyGp^RNAS znpn3YoI<|{o3!UfRo27X@T&EnWB7N(Srb!p5KJ8{AYfK$*{C@S!mu0;kaem7hR`}S;q z%hYAhVyj50Ei~?9A(tMtiAesp%*pzs3+SUYCTE4t&yoj#?wfz4YVht`N;Tm1nhyGH zVI)Pa`M=a;J3}}IVql8$ii%kLJ}Vm=5J21Y@`qYzJA%IY@PvVBWn6eJ7{#1afZJ(CJ@C_ zAPTCjP-KLDJ!o<03-%$c;7D-ScNc>3;rpWHdkZ+}kO=xtO_}EyMcO`Wr;JUCjz@X1 zT4)=EzjN668O^9|-u-bBCQt3ytlJD#@9-Ea*@NmP`474{R2RN8g&<6k6Jsb1aiDSu z!EEJ{(aF#OIfv$yEM6 zOS5Iumk*$o!;vWP479beDJ(4wd%oz&0h-uf3(BQ95UiJDNhI6#7pGv3;MCsnPX95-#CL%rPA4Pxf<)ZePl)=$hQ1 zK+OZjf-RFxF?_>lX+NITCe+o{iRzJb>ClG6ob7)=i0uilrFUJR4TJ0qKg!X&qum(_KqFZ;3RZfB!tP`}WQFXA#4aaeWLv$iJKh(H|f zGfTk2>})@td1fC#ZZ!gMS2@p5;a1}_`~cD#K(SPkxSJfoRaiV~K#QG6Z2lv(YL1Zi zP$)IJ31BFKNPtitaNtP^N0poBL;aEoDmK|q_tOMucmO;?xKL2x;EpCsha7EHDh+hvSf!7h5Vc(qwG&#M_*nA1ueA(yv z;Jr(*$z8mRZ^a;pK}Id*Y6`QSYM)yuf(O7K3RQ2%qOwzse9$U{#{+(*u+hveBm@*T zlx8-2*r%r0HhX3^LuRh{3BpuZlc<(q7-g{-zu9;Y;~nb_>|F~OL0J4(^)KCc)f>`V9PEOppAO&#>c(p~nvPFSyjpU6 z?p8L=p3ggCfsZ|R$1L->ubL_)EU!XbV#y~`IP##T2mPv_bs+RDc1&!MFDw!4?L*T^FsVyTF z=Qd3022h*;7<@_4Tjr4S!ma2rvl|6NFco0sKi}P0Sy)8*NKAbRQ3`A84Wp>=WjhZi zKj}lW3^TdI3mx>(HnGWXIdq6_*~Ra9UmY*P+F>l;ir}OQ1D?7(9sIM*k_q6*=(6!E z9Md;D>MB;wvLr)-ZznJo!=Jy8to%AF%T31s0Qe57T8^q(p{6^aG3k;mfT|CQ_<^Ml z)^5OSLSD*~QJ^F*kA-tOdr?yOAL=fF?%z%AL!>~K-d8FiG>x~Xk2W6&#-LaQTNPHY z2v^efb=#sj(LyU0C;=xCwkz8&lnPYd(wG2D*rsS7ts%5_p0W4-kr?$MhZy}ON9W4- zu19i^OIB1=)Fc;(w{SJRCBQ!JMo83v;8oJb-=oh?QOVrODw`E{)9*EI>s5%t;hoW4$c?Su(M3ni;@^erIsyO~(e6<6tEq}v znN4uX!z84m=m47XK5AFa%uxK)Z`?VjSD>JKYnaWBc!A?9<~gMvaK8}+mWgsnVF#|5 z2&&o`fBZg~3ehIXFe#tUD>Nqbi``Au-tH{lQvHF{Vf@CupudnWF(n@H5tAkG)X!-0 zhJ=s}LiP)&lTn()WJr#mv69hlc!9YvrVcM`KVN_Q$$mi!93Y|zUzT!{24-LItr5S{ z#PDjb3ZHkDeCIbqP;Q4{^@4$=#UB>6Z+-7cYe%gI61?YaAA#c@Q8upsveyczVx02c z1N5M^yRv7IkECBmYtZ0GztZIXSrVw>GbGL<3Hoa8oPU#?dyicJ{JX&<&uFw64E^|T zbU^vI87BEF1McJ?u7z$jd@JcpHBKu?BOBYNLcO#vby#$#ytOpXE5;pT=tg&&c zFdowg*iZ@D&3$4qV$0m-!}7BP0YhY9Mi?l*R_gnr)VPoK?oWAa*^E+_M7XyO&>`_$_J z+t$Yk+=2fCM_}R+VQ*$wzLrR8Y)M|I4W&%#1Z-#ocwF0n8ty*h$^v;G;#424lB3&Y z?5x<=M%n3(*Faw;>ExNV&O=>&x!q#I@4BElkm%W;YX}Ac8%i7-$}Tt9mFOB0up>nU z;qy*p9dnyz%sNE z^s5Bz(QuZS{tGN{_)N5!=L^Fl+?%OJlm%fi;H2qb1Cn*~cD54sRk~-dCy;X=rKj8KbKQ^Sw!^ zFr=4G;Q7xAdPO$)BfQ#G+b^*%j3ztR^hr>XJC`Q%#@4 zQ#;)QgXrC;vGDG4P;XwY1q4^4+XaI|NVW^s+>(WSk zGN4N z1TvKa>>hXUj*LIe2qqf@BTPnz1{Ys4egajtJ-f20C)hi>cPu~+ibQ)PJ|IU1KrS*) zQ4(DNxBEuoABbED=*E!Y4?s@mU*;-^2vDb9?*yVp+@H<${Z045konm7)MRa%Ty<*i zWKYMtH1SLf)L8)sb4x~*{4CI+Aa)}IZ?tCd0}O+E^mrWNyjP9QUUo;SmOa7B1TpG_7;&(1TajSz4F@5i#X@m2x_v3+p zIE1*qt&pz^L<>`_&Bz(w(4l8&By2Q5QxG<{nHw8;;sGZb;?Fl~Nsj(5@8?9jo~K#K z{0$C;|Lr3%Tt3R3tQYQa>`mcunxcy+PP#<+?@O8sXnWWbPOKykX!|wQBl9Y;MLigK zQ|t$aihzVuIHkd%pzgn%bw|T0oCs3%F5m=@W0)Cce<^VLqq$__m-}q+9%^^anrNy) zwkBck*dVb9M=9BE#5X!oA#I~C-)zf#h?Gj59rb)&W0cm7nzcLdIHU@kury~HX5Cd) zzNjjBehh`*co+RdxxhWy?j$GX7>r7@PFnB3um5p@z8HcrOipl`bo7uv_|T^a_t!`Q zVl$e@o}(B)!Veo~BN~Fxyv}9~_anrykcB}tK!8B4RrqAL0x$*1Mtmh9!{HHi-uxRC z_9R|?z$wnofaQ~K)7SfR%h=hWuYYA{a6Wb4#}9Ai6@m+!?Mdo9I7X|i74wZi9f&vc zkF87(pNgd$8jsA8fsDeq%D!PXYv_FSgp1I7E&hLY=M)u=P%3% z(A9wbr*hN3$J_lDH{PufGw@V>;hyzxqk z8%&8AEH}rL2>Kk@^J!&|@Jdz2;7`9Wy3S(U@6Pl;G0|}oxyEr`^1d|vXD6?km`PYAe-3`W!siz00E>d=!sPJdCvpc1!fasTlo6u^>+MOoV5*A6D59FxGffmh&516siV#2ET ziwUcBXq1cVUUIDI*C;^h)7lRY7+!sW;qqV#YJl8W_Qt${*#&Z(X%xqtgWe_HzvXxs z3n^lzCL4}`$E6Q(OjIX^6ZhBs-gEfNEbfd6v^y<9Y?=XiGnkf2J^6bx#U(<{THt6{ zuM4eaJli&max5N(7Qg)8B<{-Sc+sSg+Ip2)5oU@h-d0vSt?=iJ;E+p(ovB31v4NFq6V79)vx7n3AU5u5zM4epjn!58X2<14s6Mlsb((l ziH#(y7R$G^XrIxc)ROUq;$R;CI1nCa_eZ|6=ps%5JdnKzlt>K`$rG06u!g9}N$nbb zRY%$lz0;i;h9aYOP+tqV6T`>H{~+Zl>WjIk1ss1Rz1|^*y6A!Sam2bC!pXbGB*mKm zN-n{Dh1Fhnx5EHSga}+cU)s<0Gt} zI#pIpxRSWWw|GbIDRrl9puUwNk`UNNS!zi?ai*Y1LZC{zc{%>ZdhD-w zYRTjfyYG`x69+!N3E<1!CJbA<;x>QZ;OFV8Rl?|t98D5-eZ$7 z3yLfW`Ffq+6VkMMw%izxgCsahM}UfsmE*7Vt7?u$O%=A-u8e6_uL`4k`FeoDpm^Ny z1g~-Bx`g%`4yy9$$(PAzM|Zbc_0kHC8V)KdubT^`gJr--`$HOn*GY)5K+?~rwM~#DT@&{u;uB8wR%-x=Y zspuzEfZV4IXOTbw`r}`MBAcIJuC<+1zUL?G)|`mJv4BX=Rn5~~xGSPb=BK&+<3W0Y zB1L#vRu-A1QC-_Cpu_ zfx?6#(D?RW#8C-&p&zGB7QQ4V2udFBayK0&h5WmQQzbAoA-{6|O#KZ5CKSc!0}5GZ zh6|I4=BNK%xVsh*)(H&yn|^p`r$&QoQHND|ub$*MQpxf?|gan}5U>0;>B<4>Ohc64s4Hj>^l`!f zpM8_#MH$KM7lHdb7SNOy&CtJyeC*wzJa49#7=v@&ZrGqD!{h&cCHvgmRQt)R;8~C9 zi;S2Uh+Wh0=;wT=YZ;8FL0`Md;s-=N9tNA%UK%vCnT8nXWFiK_38Zf9MZFC_pp*T7 zf8IAc^Ed)sIfY~St3Xwseu`1ZKv}XMT&> z-0L0=_vyb|2C;4>l^AUK1&+4v^d zCGW>saHP)GBP<0>QcTkj??-U-XF-+cezt!2CFh0KD^bR`pDF{l9$u|Ihx6O3>TYBI zJ0v52)2~j}Y&6tu{jcJZFY!y-p!J75WG18Um$)YsRS@sBz<6qFlLq>e5u<<+;UQvc zW_<0Q;w4k z1bblwk4>%F1;Y~!EnDp4*x9?8hDU}m9azzdFxN0{08N!iKSTv$s&;CxNfgFYj7#st zdzGkZ3_iz!{lKu~#~HH!k&$>K|Ak<=3HdF~Uue}+YX$9FoOEw0NGh@znqCh6O8@&k z1gWWMy}*RILsz$jcOm~rTtJ$JuH+ahcyg7G*Z}++ zUf9pxsODZPE31Bh*D4ACs?U*wvmr83=0lwQR~?uXGbU3JSxJMwEL-Spy*%E)!Nd2p zIUjrhdMu3GbvPd&13aad2RVNE00pWZE~!8?H*qP_0YuZbZZ2mQ3r^G>0zlo2rsQVi z4@jgK$Z+a6nOt;q^oJh+cRlj+dWRdz2SrUyIBV+@h1ke!yZ?JGf#?LZkdV;dh5Rhavv>)a#|2=hP zE{3ZO;3h)D!$o8OCQb&)whD1_y*Gx-<3yoKhb1B^dbMWXo}4?FPKs`SCx)5VN_99 zm!at-1JPv?qhG2EV0$wLn2JQ;fM>G~0`#iwTqMWrz)Are={gqd_@qIoIxTisyCbnU zB5*98e=V*6B3gc@p2wSmNVN)ec+wzlue-|2>udChi3vS)Hdi8hdwV5S)%pKd)|rPx zy|!`uHzOI@%5;XZQ>GJ*q8Pg)d)XpnpE!{vG}#Sf3!!i*N=X!D&m^2sGL}S4Qi-zf zTM`=Ed(U~_>%HFVdjFWeW`4_a|L*&Hf1i1t>-Qr$Aks%kOm$!#NAPmEL;DC^ze?n9gy1OAFLPDp- z&@$I7etO#DV(8l9kR%>oAfugcvRsHR`sWH{Zc#NG*FXQ{l*kkcNsA*#)z@g{sFBje z_E=e3zU=7Wxp1LmrtpocJS;E{0d;;rc%Ogl5xXkqrO`G^FUD8N&Dos#H@0(msr-e=FA`TF`INKa3X z!TPORx5(E8Bdb=;2)9PyDeAx(;*=wQHLiX5_)$t-U4#oKo6-kCl%|j)HG&%taml*5 zx$WTKkUDaN2k~87Bpu2yI+XEdo+nAAqd8f~Z}255TP>Kss;Ua1!hQ9>2)39e!_TU# zs|`4>04nKIJw+9{fB!xLC_oG!-vV>6%^CiN^l1VJ>b20-`Gk~|-9rlta%eU-I>0J)Nu*qw)7i5bf|}tk7JvTyEt-Q5S5s3n5VY47 zV_j;$2TDv*TwH5yeLeAai$?}ZxdG|eOt~bxprGncb)nB`9sOMj2M!#-1O^6@JUkBD z+uP^MX-+})bYR0QSyi7Jxo=EAhNWu*3G6J*NsX*{$twE)lRFz{`1aqL5HeltShv5o zw|789FHX_BZ-<*$10y4CFE606 zEv2NTEwzTn#>U!IUf|)9eIVLe{5PAp~t)GcpA{8QRoTRS_Om#5x$7nq6xuVkcRVsLA7 z-KNTGU;n@WiA0K|Qjytj4Nfz{^rj{RVIe}YPG5yY4j2J=^7*ZeUkHN7<6B+jr#5JJ z?`&*@C5dT@!iantHWjJ8ajlm=m7@-4?84!2i{HP$0uAj;b~-@CaA{4EfWSpXN3#JZ&D_jv@skT)@-?e_*gkpkq};WQ{{SA(DiW}O@d*ik zkja&fBlMOQDHWCdz;pHu3~YhHRKQxd{oesRIT2gRI623p8yZZEQ~0lkz2TsanI}AP z!^6WVSy`M;PEJitP4}y-ZKlbST@t6W-FTHn0U1{$cmfYBs?y8_)r%#=ve5A7xbn5 z$$CMdp{KrhLz+FiE`i7EBfB{{IZf5JxC96oFIF%o z3@N1cRQ=#V-J;FaZxI6WS*K_FYb=V?m6gv`*VbO+;1k72#AGV@+?)JZmgDZ_mCm%Z zw&se}mY~yLj^aiNi;H_aU-*qQ8*W=Yww&wh z>ytTrxNq*uin%ZX)qVa=`tNH~1vv6GI~(%$_K8zPl$F`fhveiUT$8q&zqv5C))!F7`$xv_RUSdSEzh1cEZYXGD+ySq zM->$QVt)GknKapz%h0F7)~J&X4kE)NBYsPxhF{%Hd+4AfsW>}3yAp3QnT(XIN^gGo z-q#$`Xjt*ymh&rNJ-$+s$!h@s}?<@FxZ)zd>$ublRHw*QWjPw%wUt@iH# ztT&=GUl(B|?9fN22zm@Bc5resKKRG@z74_n$?mly+LDZfhYt@TgJlg{3=gGcvH9`w z3;nhJr;1vil$Pp!tEZrlm6a8Q>iwlkLRPkTVC&MQOSWI+vt|>@F8cV~fN1u7_+U7~ zRJdyV~dAnWd1CdL=4gJ;dhLQpdA+{w$&*YWW5@X&U26g_Qh%)_*Wg0d-h zMj?Z*qB)3{M=^lvLLp$tqT}K?j82_O&CGO9`N+k`$AJi|1?`4G)%M)GcaIk=Qcy_9 z^znP8*4xl25Cr;krdsfl%X+eq5-)6kg2uR|KhfXay$4dc32L79RY`JMO%@a!FD)$% z0`#iN`^(_W441yXzSrlcV`2^xjM$nQWsQx+ll=`5Vn7sreO*M=3>*UpG@E-Jv(j%w z?*&+p!m=`bOUu1`cd@@|Z%@5@HyKpJa5ZexA~G^EQ_*`r_+yy=QYB7a9fVO4!#9Xf z+eJT)aqmBlLCPuX0Xq8ndDdl)7z`%JD1BepvP1Oy;wS&UzBLVID8S2CE$+^Q;^OLJ0WWu*e}mwh+Z z{6OKR!i075Raw!mEU|lj9tfW{PhB}fR_BugzXo%#?c@#`RXNK{Yab)}4kG+mEO^eC z@J;5>;?QdhH+MP{dI}pj6BeJD!C)W=IQ7)jR9`>8?iv6048|hth9NeNXuui{P@b`B1gpVJ@KW-H%8k42lBnxz|RqPLS*I6h0k z&4?04q4=%Lo-4MgK!f^J2QG**SLPB09E1K)(R>}p*8*P7Z*lMskXsH>QBeaElbbiM zi$CZ*h}Djt=Z#`bgfbWH{xJ~$of!R(Q((;=)-@mRrvC1jZwA9a5JO!Pol + +-1012x1-0.50.00.51.0x201y diff --git a/v0.20.3/img/output_4_0.svg b/v0.20.3/img/output_4_0.svg new file mode 100644 index 000000000..a4c6d0f2a --- /dev/null +++ b/v0.20.3/img/output_4_0.svg @@ -0,0 +1,3 @@ + + +-10-50510x1051015x212y diff --git a/v0.20.3/img/output_5_0.svg b/v0.20.3/img/output_5_0.svg new file mode 100644 index 000000000..37f01541f --- /dev/null +++ b/v0.20.3/img/output_5_0.svg @@ -0,0 +1,3 @@ + + +-10-50510x1051015x312y diff --git a/v0.20.3/img/output_8_0.svg b/v0.20.3/img/output_8_0.svg new file mode 100644 index 000000000..ffc6311a6 --- /dev/null +++ b/v0.20.3/img/output_8_0.svg @@ -0,0 +1,3 @@ + + +-1.0-0.50.00.51.0x1-1.0-0.50.00.51.0x201y diff --git a/v0.20.3/img/random_search_tuning_plot.png b/v0.20.3/img/random_search_tuning_plot.png new file mode 100644 index 0000000000000000000000000000000000000000..46b005310f466c173c6d9da82cc0d68902ede303 GIT binary patch literal 44565 zcmd43byQV-*EM`V6huK#Bm|^8L^`CAPC<}vq>+#=rKJSvkOmP1k!}QOK}tHLySu-2 zuIG8j_};(1f4_a-V=yi`?6ZHh)?9PWwL_E@rLZs`VImL+EE#Eu7YGC@EdqgZfPo7C z68OXZ3;eicBqt?-xJLdjy*}>~0zrCLiMku`bA>g10e;}zr{{wo{%Eq;@Pp!AH2h9P`v3b^ z?9(XYw2X|F*H@P&?AOMQ)jFu}nvQ!alD{m9W)jGVxFyIh)x2E~yhql1%yI*Pk!lsj zB_$usD48WAExqGay4hyH)?vaPMb8+NVa=V~W2Ty~R_EtQmmazCw?Lz_ zv(sgJdht9C1(CWzA0(D>mx%q>-@mby9kXvR@7#H*rbdq^i(aXts!BygH8|_*DW28; zMU62;qFJDWhZ#S#)8vENOZ-WxxVSjl;2u95i~UauvR=?u708JcP*?t=%PK7D%N6vNGMu}8#kc-6o>Qd(3K z{nOwdJ_g*&iY=Dp-n~fldyo6fIYsk+_V#roeB9kQcW_QgQYVXAQ@V*jq!Y;t4yUB1 z7V9?(S&y=jk&y`s3d+cIw&giFIV~(M-o)RIKRUO2yu|xqSh;1@Dtu2hr3RW1MobD$yXiVJrKu|WmZ_=K>uN1yOUtu8nT50o=Ujiim2ck;HTq}- z1O#T=;qQD&bE8W=PxtiyJ~TEqZdkIoK}D4{AsW9sE&#u*XjXH$b(fIMpa~%=g@Qoz zm6sbHybBBrjERZa*s#V9$|^37oZrQQkM?3%b>7CpI^5l5U}d$ne#*gd=kDF1kr4uU zHuxG{tX#zbdwcuOpFcl(^oW2}$Mtk?vGnzyqm40DHMNC>1vOaJm04^Pu<QD=gnX{Ty1mlj{`^$(G_QXrJdJ17#l*lcD0x*E z*VWb48cN`Eabkb4(*HgT9TsD~$%*ClY;(Zz_RR6_?&*A_@Y&(IR963wA3s){&W<)G zu&}Uhl9t@Gu&^K#^6VQJVCUj`n9mmtb98~B+L`epe|C0OR8-W^(2z{nTTM+ZMbP7@ zP_wk~+qavk+Zu3QB#McYjOL}sb0?ec)g&=Sx!5t#)BlTwFBd6*trt$r!^pt!?Nv#0 zceh;MJGSw%9^tb)rQVK=l&ZM#pB#**NYWNFMp$Q4?;sFOPYi3~9*~miDl3z((Azj% zE^VHvcn8Zr$DlwhG5oEMd)!>D!#Z6J!@szn8*OZ3b9uC>Eh=U4>8wVJn)c8uk+RpU z^pOV-d;mu@LAEy#ix?9V^Xgyyi`tI83g)V7nH0}I{>*VY_jy6K%;JXlnhKzh# z|6OKgX73dTY1!G?3pBnp3&7by6td}dYu35$=EzZwT%TN?A3w3`pTEGY`D0QtqPHme6@Q_nrP!l)U*Jog4WMpl7DIDk>@% z8S5j(JRY(d8U*ptS-E5DR+^7r44hTyyriaKx_1b(B|((?zzUAV^40s=+S+ugtDz4! zrw4d^q)gLr#J*I>?|t}F$2VO?4I9uGH#ds}m(wM}MP_xxMu0hZZs2!K*VsP6MZFEj z|HQ29oUD{zzkZz`Z}YHvU!Ij078cq^wHn!smoc!gDEO{wl^ITLb3N1_)Nd^O#w_go zhAhk(o|v)-{;sL1DItMSLqlVFuFh@$i<+gfGL9YckEd$>{`w`MuiyB03IfXF@r;j> zk`fOa+i1N+*Lu(E0iKF)8lBH0xl@)6?E(QjMsQ7q09W`ZTdI)6|y|9#v3k$zmTgAo1m|0k6 z>OJzlem$Gdg}wCp&u1123j5u;=U3PqS{fQs2nsw-`tgLo34Oq8tYyL3lj7*M>i<_=?%u7v8t*NQ0s^V=!yn0hTnVZGXp+a6~CMG6t{!8PF4y8YS zoE@wVHlHCwRDS;Hs;UHK2@K5Z{K&=%c~GmY@)u4{Pft%y$}Y~xY6Y%4e*S#dX##1W zrnVLx9sN_M38WMMK3EA3cJ`&-)NTW|=>{(=3yTn(4-pX&VPOaR`&QkxaI3!CWoiyF zvB*oH;T~py!|q(GW~tdX&C(eqol3fV}+QQ zm>3vjw6$yX8@(aW=jZ1yE-v==_P%)WLag&Pq=&(C)6S@v7;6&6&8b?KqoX4V3W}|* zt-7l$Cw{Mq3Y$v18E!TK+oBgOjqHBR{eqg3-0A37m{j_!@zXb$tfbXd?+*jnf3`?s7S zdmkJeOqN>_9K3@0@x>k17VY`{J2NvgLEcPH@3X(Ze_-IPyIz1@(iO(J&kzA({X;|9 z%Jj;1aL2NT!wlmPeQA|G!*+qwl9-SXjN=Y_2#%+OCF(=?Oy*tH&d#2m@{$r6S=l5= z+1c6D#r=JKY}C}$+}!FPci}Tmq>yPK^23;;KYfBJo92C*6dgU_i*f_j5RC}VuE@x{ z`~`?IZG(fKTs#L47$d%ZU5iweMMh8IfcypJe6>g6N5&p{dei@}e;pkk*K6>s&dfyR z;_JG7`*vqXM-%%`*fgww6BaoxE zYEpB6c(QwO>ZsQV>%V(^+z5g4hiNyfZVmN{C)}&%u6*VD+?pH8fI4Um_!cka6_t zcsV&a>FA{TEZ{Rvh6##=PZQ-~bpep=&bPydxg*^CaFTfKGXX_4&B1t9_yH+sSK11? z?V(XP!S**(T3uTc>x5tV-u%Dw1rjhl`mU%4w?;TAPvn}5W4u&Xcc`9hdkz=|GoX1y z8u?_jg;IPxcGG3wzkiI5examUkX1A=`1g@Qd~2!}riJ#&lkN3&kHxOoot+(G9^0nxNTk6A z;1V{wj*bon8fv1vElHrk&=-}LZ{wOggHPz9=SEY}(zcnE;)($_prN60;!ho6 zrJ}k~`W^<<1W2oYWJF0uMik?4dq!VO%;?W&K`t(?dyk(%-cKwlD)N1R95^K)4HuW= z=4LVeR8|It)8#ZD+sP^wb@k!DfB*LN^&uhF|33IucS&h!Xh_JcpkBRud1~rZeeu5lj=hnT5MXL zt@^(GFnb6C|5f_e<7F0px3DaAOiEq>Y~*!Z`V9xXBvMFOM@K%Z-~D)NDk26xTZ53N zX&**j6`J#KsM^AaN-YL+)%AO4-#~cvINs7FA|f(1U95H429$W|)T5Bh4cqw;fT^vm z?W_05y&*9!?Dj4v$Fx|lzDU1udMydC=$|y4aqNnF| z+0wDJWQ?00aC37rZjV?L+!`q++b$X~n}4@8T|d#=TlDu^$=C+$Gl*z_!S!WjG38F; zY$liVjrl@XD-tVTIWNo#BQFBK$dO*u5*z62BmVew-8`T0j_YFuW%aA8sj zl=$x0xVWWrv)0zuSwaE?iiJ-k*TyZZM+cBJAUjg|L&kvrl%T#^eiZ%f8w4UVh(cn) zig~YJzg}KmuJmxR`C%|TL|}FGb<;Qij|(?h220aw`=7r@Jqp5?HDBSMLrgUb-Z`)p1%G}TWnTlCjJvW<}$^B$B(xX`Kl+JUwwSY zMYtGHAs(bBB*^n3gEEmV35 zTvqtSUVZ7JjEsyu>PLHf_Z@^+4;za$8-6WsPE-Oafo%A2sR#v;uKm+JW`A`sFD&dX zoO)7H(p!E}pFhJsg^(M*Yf|Hfyu?c-eSEITMkXt4h})z+sTmk7%+2Y6Lb!k^EpiMA zhL@k;cA{bvmP1cZudbU7o+FvZ4q%Jt#mQ5hQ`oo=O|?Z`HpdrNSNQ-Ww{>3u6oA8* zJGlBvk_cL!5)M;NaKnFe(3ii!owjc@$vE9zkeS#jmZx-TPnyh zeWqpKtfi%;2?+@?=#dz@=v&U#4G4fyQBlvIKM!YtX%!L@8Xq5rC;<08+gl9%_dnrN zi;)k42RyaH>#p6SBWw(ej~_qw{ry}2c71rIKTDH2?w>d^&SJs`Axeq|-nAgv1*FWv z$vM>B%>-)$f6K)p6s=eC$O7-2n>{TcdtpO za;B`SUV?xCC@0hfA10ZP>38=)Fwx&1B_1^_~Gb93vSqrOYe&mTLGRpO~M$=MEI z8VSB3iDM<#`CRDOHUKu-ERxZYkd~v#17rlV{bc*8-%nQnEHLd!y+uch-SKAo9?ses z85smEWFt22LBFzb#a1?TD?3&99oZ#bYbilM*Oj9iekLe5KR+MOqK%J>J3BwGs;FDvY01@20!By8q2gqaJYFSZH0ZRjE6jgDQFU*;cHP-c}ef3P7Fr8ypBjKY*b4XTFLT)|l zUP2=Bx*WZlXHJ_MBeC0VO7F||YO%szHY=3~U0zhHxjadl4TWpIbmP z`|DRm8k*K^wbXkd5+CY3x*@{a%U4Ag4vmMtZVw~!bD@WIVM$o(He@4QAO40s1dvfr ziy^3qN^RgJ$mjmuiF%#pB~8T3A?ZCDGj@kn5AzqYb`Hh|rU&Z#F{lsWs;{rFs&XC7 zyLIm|bAmzY?~n&{9HX*_a_@1)Nc`7+(>>9%UFLIpQR>4AJFOS;Eg)KjME1pvu~MJ| z+yvdPeoX(GKX*_P{2`kL>#V7zHAjFP{2f3-%=mIZNcO4lb3NzNlqj|xZ_XJJa}h!y z%rOU5B5G^TVK-IUObnNO;xzAaoU8g6P+-}qruH!~8xMo}>$BMOf`U*N_+lK21lf*- z&QA~V@Mz@|aBy+q@I(zFGXU&XOehgB=cw`Cl5`$Ql$)*;T|1gcJd_BjRts$k+JD7r_}F6)hpJHHgnz z|JXMACc-!OpV18;c?V(zFHemAs~HQH)e6F(+`POXtN+&S6?z?1{ea}GA!ivHQdHXS z$og~JjGI_lF(p{1hWPuV#j6Kgu}iqOs3VsB5n`pJu}ZYv7eF2Lt=$;dUugt)mq z)0oERm=228kr5x5OoJQF_e1enhGy}kZ&qa`?E9bBW@Ga|o^I3- z6GPqBfxj06Fll468A&Eg=?_exwyy3aR0;mdkivn)fok<8@%K-_@q|42%wkW-OMiLM zQA3as|EFj7bNK%r7TE|$ORruLhPJMamF5KnwFB_b)zyVk6EcLs3d&>FcmdPllzEMo z7kIbY``xQNdV<5k{?DPALeEu9<%cXfeOd-(-s$=IH9$G>Ane1#!@tAB_-~qfXCaqG zPA?5#MFmUa(TdTp2P}uJwx&5U;9*Yp!&Xs)tpbdd*SYt4FbxwEa#M;*uEp32yXsg8 zqDNin8=Q^3$&MKq7zhgwpC~C$NJ!{tk2KBBnVOt5;I%}E#J@WP2&@bbZE3ldr<4ZR zV8Z=&P>&hV0n3Yvny+3#QoX7Y^Zf6Egt|RlPbZ(yw@N!meIFyZ`_zs&zK?f7OWXB{ z&V7d`(cB%Wzv(Adgf}`GbREsE+u}b$f_hSTV>(61OABykSC=J}XAr+IGN9fG2@QpM ztMW|CzYp*;p#9OlK46F5Lw>ogY6ePeaA06Yth96=%EY9%*xT{bl0j9`5YS-?T&Lf%U3?d)r#`fAHnwzXB5nVr)t`%B-~EL=TFt z>w9}K=P%B*kO1AdEo|1&2;vUE{7eXh|3@5Vzo8h!6{EhtI>FSraCLdUAK|KnPefz^ zrTLF>fcpb%BsS$Y04DuFy9-Q(C5AGskVPg{Bzi$H7w4NK3&^xD*v%PMc2sqtv|&D#aiW-ki{VcH#9T=>NvGfVnKaJ4s{|ElU&Y=)AK_q>if@U z&xeMGokNZfsI<{>!*p*SSRTxdoet7T%zA6jT?0iYfU=02e~ckQ;FrHpRa^T+tE>Zb zgR!wh4Hh8#KRjdtqOU!icx0^`&=RmPu94L`wkj%7H8p3E1px2if=nwZC53+T=H+fX znQ##ltxG@|t*w2aaZXmI$HT1-GFv`kndZoY^#GlVkN>f%ipoy`TrrGYAs5_Jx;flF zsbSYOLPV1bkRl)y0R6>bIYbY_5>SC$R>LN*UlR}zkO{i0iHm3bjR(H%{PZ+7KAwq@ zvBCZDL(A-|SFgzZKYaK=csDRE4nMia_Cnw_dm^B~SB#nj!;L;Z0LK_(G4*`__<>|a z5hV?r71T&oz#^HNn%dj%Lb)FIL|@407Y6eaGLU?-Wj;wpknmX!eF5(9`fxlI=(=$WY(y%%{?X zyU0dMQ_KB#&HBUreM94972mvhQ_w;IYxSi>8>uk$y^oXag$Gqi6Y8lA*w<(aCq}11 z7T)+Qu_Q1J*g=B;*kK1NC3AoCsvJZzG|yOcb7smc27w0$Iu$rM!0$?Qbb0^&8C3BQ zL!3M>8+iJ=smWluHx&*61R7vO5`?_mfo6cI8+Em!gS!BTZ|I=D-`U^)l%9TNFi*+b z+Z)s$D78{;k#r^E#Ij}c37~5LYA{k}dU_Dl5R-(!5~M3=J>et4rlX}b1F;E|F`#Jw z_V>FTtf*L8eub!uvj;a=Vb@wKiAzrQSR2Zhi)RJw0m1b%|69vu|Jyye>d29>Swb%5 zb6Duh5O;QV1_p9)Xh;GBDtK<~_R-N-5`p|sZII7t5$y!F%t`=a$a5x6j{aV7XxgE^e=`a24t=C{&Lk9LSebvbpx?7q*W8As-yGFxb3dL zvb#Hfh=lw8lES>IHMfAEpzezo_iOI$jC@haYKz3EKdgLNA0iNnwh~ z^L6IWA*{@QUVzyf8ZCsl4cDCc8XVqfM<~V55Wb4vQcK@|2nh)%6>x?v0zm%)3yn3E zZx=vlN4pC9z}NYE688w%5bbO*7ycKz!QIc%P>Y}cIH`ZP|AvGL1;Jk&CyS&_0UF_m z3Ak(lKcipmkn{b;TNf(HFv4f9Y_YPfUgQFfj*fzFa~9T_2#bhup1(iX%T%-doh=iz zt8f#sk_}pGp?;%YAD^!|<^2#`Lmu^ShqZ^54?p#@Ih_RiWRih;S!QPiDFLMVf-hgn zN{HtE{cD=}flRk~|P0&dwnG508wb0Vy0B8Ymo?N3uXVm2t;oEI_fv`mO4S19^{4BWH9 z+>91A2cQ`LZEgy}peT1I&VUYpATrY54}b#4IP?EXsW1s<*3YZin8`X1XS)cf2xzke7Rf|XK+2cp`mD@XZ zGtK~Ku1Bjh|NQ!q7L|DXz7=xZ=4dd`*VPqxUan_0i7HP>U7YI*RL?14$$<)c-QlqEv43Go(eDz9WcA;F@Bk1P9ImC=Sqhj=Qf}MH;JYMIQR|h9qVpf8zXOdlS!JK4 zFaeN3VRC|VJWVE8S`y9UVzi1qP27MCjAB-UbL@P;N* zH0)e0%WpM$rHwbJL9T>p+4NvRq~B?B*N-SIb%am?t0?SsCPDiN9h>9}lqG#8=~TC{ zNfP!B9w*i9YnK~v^q*sVvOlt!Qk-e<3TY)$(5CKUKa@<`XQBf_%A$qPN`kU~bd<0+ z$O)61oBOBI{eD~*P9q929jFU&Wsuy+x-&B~^ANT2rh>3Dnbj||`h1Y%#klxtX7|38il1xue&$op?Fz4;FMMHgO$^IduFz9odWU#M%d+5Uk z)6y}UDeM5T)A{4r={xc1VT2N~lnbqYbSoqv)ZrweAaoz+Myu-UZ?CMJKyd(a>0Zsf zUu|u`SQi0`0t^Sj$GBm~M~WfAcESgj{T;0s6hq2#VGBqZ$3v z075r6%(1$my~26N`q!dtdul45n~}hpgpLl0%0!F?#C>T|KSQp|D?bddD)~Btl4ijNl#(jo0?#buet;nxVa|W~2R9OyY|=-X9H%Xfe`RB1G9nyYCyw;jL|va0 zqQ0DNlWHMT^zZ{KVL`9r%*+?)GeVTV;oSZGTYh-xZL{<5XW>0L<(CxEs^SB&Di&wLviya8$ej2&zPBASkx8p)s065`wjT$X$>EVEfb6KF^EY?P_A z-7F?NpI%-|MNRGC>eAy;c%TaoLWJ0Wze}^ks5O>J%DYTO$0gvIqgwc+U783l@;*iydaPN~PsLKyiRxj87HsZOxRDb?6PJ!4<<69FC z7$U&N0-5~;^s`WcCwir3Sj7P%+KvDsPt#{P==*#Y>=+;A`#JJ@9j$jL~#7j$(CujT0^*YMREqP4;c1xS0501U;Rj$;k+d!oyBCez+!Pdgpv-}dOsklZ5Epkej|oEh(az5IvNGh>{Y0nq zy%B|68Ior&J-x9!U+{uE-B$6;ntMR}g5s>Lr3ETD8)29k z(`2L1OiRl=WJbsiuo=BUCn26j!$|;sxF`_Ul3mxS{;Q#^yX2iG7DRqOp}4R*?gm(y z|3eTb>Y!dmk*o3hmnmX21cvH5%`(7TFtG^O&q=8CR|;=Hy|0+e-2(&%FYhZj41ZZb z#s*MG%J0+xgx8-xN?Ka{%*=&j{O{hq8_ZSU;NsH#VG>eaeh8^%V%H2Y0}@r*$F#7j zls5d%W)tzttIbh0AT*v#y)%j)5LEm7#X%4oO4*2TsA+>#HQs&TAlCk35gDkJRBo(_ zK+Ia@Ec^zQ;$&wQK%j1!Ig;u&GMWPng?0C?n7BCnV^V6Wjg{5Jn$W=DAXpYaQ0mbx z`|-p5{AjaSJ-6R{b#)a`f{h!2h%#)(1CRU<=GXhcrHaniI ziTD4cJWyiUujDA;v9@lH`5{5MyK`0lLaIG|`E}npM#;w+(;2kOazh(|J#Z8j1CMXHfije??2FeIIX=!qQ2*ZJ3LWCk5 zX1v^j46ug6auj&DF8{7p316L1K?%jkxTZ041rP*8V9@kjL2f&l(2VF_l$Dd)xA!BJ zpp25pUXYfPOA~(BgsgLEw|&F}>O77k(kGKA4jzSBUjJd>nK-uF0;y(h?#2JO8ylWs zP+$k`kL32CN0Q|96YV+tkiy(&{Mdqc@e^NMG+VjiQvt3!d8i!gqSsPT)F#SU%FRHhFPt!v*rNiE3A zGPbpSjDzEg<9>PeR!poJDn=YJ*kd42E`BQo(+W(al!OGh>axB~pl8}a8gs94_ZOd6 zb|zu$m2kPH{iVz2e83hFrKnf+JJk_eLC{x{S?msKHB~&b_Ax<(8{j>y>W3gl@4%_9 zYp)wq2Ra52Cb7mF2ZuGNwF+u)V_>{-aS?`d3M*G)+z!VQR}2anFiRaD9zMr_1NsGu zPWuz2@^3v}man^w5&VUx*eF*%4cFd?J-Jg^RrOAUx4$u6)WEib6@1wNkaV!*YrHpBIOiZ zEB;`0Xtteh5@_WCs1fXoQ-GpE)5z6t^@}>^4c1MOk&zdjnjq!CY1j6ykBGo-6T}UX z01mLQ8i7N7KP21EJQ}~^q1#0`2HG!6^2zB9E^kio@U6^&$^yv;F0oKlUp7IvH|>*c zMNQkZ8qb4vXvW4T7k=$#xdx%8hg!Ax^5`Z5*9c!uO3!C7Z zb|vQAEgAk)#he!*5-NNf#%zQVlqyc%E~T%Sp*|lyyx~HE9rV)xWA2EVJGmZ&PFI%r#mvG&Q&kl-4}Tokg!_kwf({E9)1u1a;w>Ff zRFKv{mjSOXIK+bNK10?4BN1kRk(Cwk{rhvPt08vq?9P2Mk4j!KPj5c4<_7jefSlZ5 zW3=R3gKYZOuU`YPN%_GP0Q3rw?w&0q&s=vv$bF*UfESeA9kkCo4}RV5=>o)2tQYR})$W};#Xm=k~Z8zQe!XqIb1rc6#u#3JR#n<0dp*W2B_e{|HwBbhZh zISCwdz2_;^I!Ip&0OTef_~Ra+r+|bTEYK+KN#+3xY&IDkY{gDwCuV>uL5P9rCu-QA z2FCV@<`3#;&q!tn6{0##An{JWr1A2)MEbw9$}!uY!PhN$y@CXVf`S700iZoWYLqJi z!QyKLXut`k3B+F*ei@AGgf=8B%*oMFRTW?xu-@t~UxKWgotq1_^6bM-KzEaqo{;u1 zGB!3gz?1WLWMqC+#u~^b5DmdhCV^pJtpj5FmoESVnCc*hefP^EaRN08NMv9~=Jf!J z!g#%|uLQSuVdsJk#sv&F zbaZqi`*`^H<6~ooEB)e1N>3;$jbMr)thPP{)~I$%&jGL<2uev6<`xzVOic5!dHHbg zqNDG7d3n|Ok#i@L5)f!;Yg-u_W@KmkyO8`cVF%r^yQ>RonP4wQCj?@v#^b-k!wLHl z0P_ERly1k6z=;w=MG|QLKm8TaHp0dc<`b6dQ|Cow&!WR^G}IZMJ7`iOWJK^U`+tN% zo@_a0nGcTi;cdQz7R`1wi&Ldj^agb|037>3)r0Nev|;h=%pD5H2D^$WSSP5} z|4)ZPt5FD2hXaP#%LKeLk)fd=m0-_QhEWM!&0_37$tl<>$jEpnPy0!lnqnig$E$^TXc7#RU{=vNAHa-Ss!ge|y31!VI7cF69z7f-r(F2WX&A zp!p<-icvW8oiEVD%L@ztQc<0(Ak9Bkef{-#xP(;rTToK&Ok0mD9cmTwu@D0Ip^N;M z2L&3}4%HpQ!?YJ$r2TwmYqY>fRO=K;lM2o^ziv(PNaMq*72;N=R(`u1g>c!_OS-fC zr$vCE358_QNfWS7@q-sR)wJv#3o}(b;NNQ#P^p~X*r+q>c~qzRFRXhp5KTAFeYijtplf$VO8hASMWAO-P<1zl_YsaTRoMB1gQPS1a ze6C_qmYOOmtSe)`Yv0=oCVUcX^lc~7#f^>Mv~h#4XgJkxKZq6J~z<6`uMysG}R5W}9~= z3{} z__|P<7#WEHrp_% z${_oj&A-ES)-1`{*wT5D|E?XyU0v`~`TaB~4FTp0%EvZ=h$d3tV}WnL$e8iDt{+nc z>IhI(I~5+bWaQ&Z&bi(qKPkrt%{^hat`hNPplq!ZNAYy3jczeuqXmaraIell<#q8e z1v#hW%li1Lg)j!yOk-m}GXaU;?+o_$Q$z(vN2`I}1EXyL+8-G8^Fd^5i$BRNeNdW2 z<9laQQU4WB#HM(89DUrsL$FR%R+C$ja`7U38e;5&nMQ(C6$C%m+qZ{sBmk&Eo)a5bYh~&!)IhEW z0}B{`XWtwEzhz@HdyApZ=528_Ql=2y`%q8)JtA8|#QaBQSF37X`hxEfpU>3=&?d>w zS}@%+(5%G5%=||FCPJ@f?-@U_ay=EP6JUr0xLjzngZRvP;Mt}f;z8Q0Kb8v<%E-lT) zVj@Uj4zk8o%isSrUoVigvI2JoL+~?HDE(uOgrwdZlXpIqR+PPih1Cj2Ju5pKV90bK z%Ay{0#X4s5Zc`bVFmaue5k@-5nbOUFj!Fe&3$9y+m!67s$=IGOsc zTYCg%;luSm%`PcBQPSA}a(X`t3NSNoKvj>2X$ln$6d`21Z^)+r@kBBATctkyejUoA&Jqd0Ao5={G^)x{P|!U$0n5 z;h&O{vc#-eN6V2@*XoU?<9~G;v#%n1u@dq?tov3?LUwjGz}h-hC-%y1p>)2&|Ewc} z_J0SzNLM{A>R!>4;(L}{X(Nu&!{-cZCnPM`a+Ijt3M$-T<`i6)kE6TYgznGPa z_5O9enYFD9tqVk0-51A~$=_WLb3v-~DJeSZ z$s!<4rKQaq+8V)24(%-6u;4r%U$uMU z2Win=&V@;zm1WZBRE!vOMyr+l&U^PKYZv?GN$zNQlP9 z9MCyf=jX%@-j2r0BjUBDd!pn&YUayHgfQ!I$Kxl0e7*UuBPtSx<1e1Tk!zO!`z0e| z7N9NYm0=b4_HXdpg8%xzRDRQ37qotzG}AS&^J5P7nK~PEAW8&O;%Vx#QXqFjV;GR4 zsI$xZeb(HyaPC0v=m`|XGfiAg+>hGEvtRou|wm z5?^}z;(d(Tgk=9M(jhoM3E3hCFEuo@@0OoiE_9|4e=z@BYj@u0#nq(EMawp8PzlORe4=(A3dmd4O z%Y0i$_rY)$33sv=L~kI>z<-;&2?Q6^WpBERd%C*D>)afTjF5i(w6vM8FAJJmT9S)C zMn#ptj)B%3aPEUQ1_~PQixVhJ>lC)p2y*drkM$;P45^9m<6mlNjDs>iYkJlQum<~l zLJ5;KNKHe74jxelbX+7PU}gppKL6Xdse|6==b`wCNl9}FSe&w+@v?&zXASbX(H_aU z+1X8mD(`*s7occ>wkP1^?}z*{tCxb-%|6S{YWKshrKKF(``XYN5*MfL<_4`1M)R0Z zR&uhjv9Ys@VZdnuPt6QG3-npqc=L!YbNyZaH|>XSw4~PjC(?M}(Lmt%-Pm}$)rDl= zFleut*H2y}Li5LYr-LO^YTLZF6$V0 z+8^AXS3*l6#|Z&T7v5DX^>=`I;C7!&MJ;RYw(6>wQV(koj~YY!m94sEJF=ROQ9pVU z%n>pQ3aQWB9BO@NM}~)w!0BSI0xa{_HDL}6=p1`W7zP;0>*8b=K#Xz8tHz{&Qs_1U zvKUxE@aV8VeQKfqSwFyf=FX+dsnQu6!?VTDEUl97E^PdXS?6;52lSs_3!eQ^0T&sN zI2E>Wl36PGYA+_1*=%HAzI;2M8xH(hM;}O~&LB$1#=ZpNQ?6F{M;u^()_A61mK*)q z`T0a{$47Q|;>~muy;py+oS<+7O2FyuTN{@I{IYFq1eC??N@Uv;wRU;eiWYHYsG6Sx zUg@vx0h-IY3SsBqu+k@FPn_J1Zg~P`+U<@gYzb8R?FK<;(rXgvb$MT90ia|~BbD)~ zI~aT+ia;lyD^h0#>&|W?Co~I`TmA)Q=;S>4wv&HO&SF7c6dq=PSVnLMnXU%3^)Tz! z3{6i9!6>};Jqd}4^{d{zJLUz-MppLgs_4B)2m|0c#1#Wo0vJZ&{`@vO+9+wOk#D;7 zW=I|qs6Wt0(Xk#4A%UvnZVyNk7-;wXeqNpIQcY{}zk|}b?bk2PxD?<>UgQs@r;|@p zhPLv%AAG5=7mSPkl$cm;J@(_^!0E;fKPst+3)gpsK}t>FZus?0gy-c9Z$eL_C#yIr(kDy!MP$9_P&T?AqLeP zpi`(z+*WH#i!p3N(0`u2`RALL_qtRF10!Q$!31#j^bUUNN+5Ti4fM@|8w~1LIGE7( zV10xK&jxD_{^N1TZh&P;Nl3VDFPz_s0+|9E?wFU3ZW#z26Lz<*7zUVNyZKtg`{$HV z;123FVW**{-sx@jK|>AbHl0hIqtmN%jY>;vw1AG>#5&XNIG?LiI%v0OZq6LDgt!a< zxQZGr{Tu8`VdT{qvC%d(1k8Gw<~&eT#AJ5hR)PPMj@?9TQYC?BIwmQ}68t8_m1(j$ zj|d5Y)46)(0G%x^&@&2FKrCWzkl_AB;UNkQUMWSv6T1O7Qblkr+bum2S(Xf@G{zURRkS~B&fw+PcE9Gf}0t21DPZ$C*j$W(L z)L_o}C_oIicZO^I8m*p+KQ$t!;5G5|KEye)XRaSpQx)thux0*?hFzzuY78ib046k@34otxS&^xi`6KtKmi003#lDw z|HySI-Q`rQF}AQc1??QPq_ofRaj~)WZ)V?TWo3nh$)kIi09j>Wu^M5Gas=z(ujAm~ z#~}uqsa#Ln+2_lP3+Js#Xl()3Y0TOgloyb#fXEs=SRh zUwSs1%|%s+-3-m`O*+Drs^;cqptiVeEGmKnA|k+1!@rGhfgJ=!?u@J~=n4hVqwx`H zq6oRPjLa-M<}HMR;iKVw2#P+}m&X^U`#`kH0g>^b8EQi`CM@ng*f5gm&gI_`zEm8am8Sx z!j?veA>j!qkautSwaiY|xp6Quc6D~jG}&SMV*hlCc|bz)3|vIMzP@c_;}F-e@7|SH zRK&i0+v0dCB0X`Ta=R1jhp;#83yB=KVN;{yYYOnP$GcHVQxCwt2gFkEuqX>*1~B)| zty?|4z21QIAdmBs^p2NX!Es=~`w$m59B_xgV3M1B4iXpWnMnx=oB}1{w4Wdz`qX{C zJ+2@rndIl~5F06O;L-Z2oe z+9PYLsthL03XWaH>QPc}f)^TU8$}rz`?qg_V*<$V`_CUR=eamIFtV_u3V9)0Dab&@ z>fg${3fTD&-Y~^5(Ddz8!n@&3zu3cvv2pr$B+wG&ovkzBh=9Ecq64T$w{G2H#s^O+ z$jpR7%6)={{9PqKe(*yj4BIqWFJ%KPu+q{PzZri@fjHPdk1@k#Rjkv%OqcO{_J>8` znlR5*wONm4T^33qNFk4;Fv21u$)IS3=ZE~$X=PYC1{^Z4_XY2PJNncEXkGzxE1`h1 zwSlX=y3_?^lDZ9i5%2{9Zpj#yA;2&ftD6HbQZ`zi zmCr~Q7Z@^=X@exQVy^)j`hCwt?dx1wUDd5~ErBA+!Wi27q51>B5vjq5cr!Gh5&>Bi zI#r-wwz2UV;*(E}qKF70g$(g84;Tti3Bfy)r;rS91eoaS+X2o4>~@4iM34t(Q?U^w z;ID%QbXS)$g#oxYn6#gFX!Gjh7 zQUtGg{=)B6f%xO5$w?C01w=Y2xC)@D;2jQ}!%txKdCZfZ+~TC)3v51kaE^>?i;L|{ zOsE^FQ2K5H;k+D zs0J7eEgV%6s0jUpqQycGCDyPIHl}uVKA;Xk$vd<_zYHd5I3a9abSs}B9DtwG)y)m` z2W}hib?zJ-TwI)%8oWp8TLp8IJV2jczf7R*mGcn`8`~S;Joe$G5@37_3Av5re^7Fx z9ieL%%8E8to@bB>V1nR|c<{;;enEH@02n|aNPdO`TvW6LA}UaV+7;G(Z`bg^McIU& zyOjZsc<3?|t{{eXP%|^5F)`3n0MkGR7YQjT^z3=-{7<7k5HvuFD13fat?70nodcS+ z8KR`0fi8^fFT=!?rjAZZdK5K!CGY^TvcYz_O^1U4*+jSkBtA#p6lgp(D~0ZPd+=$| ze&Tc97zKsOj0qM3%(9@;A!~vYCbzqJnrT`mNd083c`t)gETWn~w zNlYArH#6+*?}Os>h!|Y@P^czN7w$7b{RyuG0DuKOP&aPf9R+I%2)z}TU4!nm*%p62XnDRQIXW>v&Was4U@9p7;LDN525Bw&) z_(8?*O=}cUS65V5w=MZ&R5$M=#iU+{frX`@sMscefyhLKL4Ekp1&SlQQ>MhEGrCQZ zm^c|IRU;yz%JlT6p`n=LSFl0=T+`Fx7}wk1gn@unAd-kfQcjMEhvyiC!^Whh4~ltE z?VUk&11S2Rou;L^IoS9;Ck@bHssO=9+(jdTeuMi?a)Cy{Rg+8_@1?3E9PI5aj)y@7 z_|%D)zb^)_d^q7f(*$QaT+o{g=mF}ILE$(e4l^*Q?^#J9obX>s#XsUSSB9((I-aED ztq=)zC<58onohK!Xk2t4h8N+`!uVmMje!pxY}5o~WZTQjpivzU61iiby$3@_W+r8< z?BezaS(G$h{~V0|dV>o2m?8dtTjJB->~ad2=%m~tnQVPcD*IcCKQGz5?kUxO%LTns%;m$2Oi$Gto~;UEGsG_$oWDlD{y z1aE2z&BMc!lk$-osK34!YArTc#tPKF6hStl zr~vdfR7K$0YByl>*((}-4tU$tl;+8kVtDTZz1JO_uti9ufDTsmJJ~2H-+$i5M*BtR z$NC9gZe(L_j(lkp+?@&s1F$As@Ws${;O6QY92$C00F{TQ5-cfcl29akHbz7J8Akdj z4(XG=^gy+=wEE>H5fZayrc*FwG?7IYiM4`;@>DT6BxD!z7dT?d%vD@nPcp=VKp(c9 zY1o{deF~m#=K?=pgBwnK%dkd+KZeqkzkpFgYaWa)pn!v2zEk3D778S*)vej>^5>0S zUHDHcA3o!L?0GbuS@hPf%c)Vi^wC^PeYJtoB{aEG11g(DEq{vO&o@Cd*l&~E9TRXFnxYUF#N%7noo z5Ksg`VFp+UKu{eJ0`SV5?biQ?toIJby8qwDFS0jDD%nZO$OvU+luDT)D>I5DWS0@i zPIifqD0`M3LiqdglZ- zl=0}5fBm{fsD{P@NYX7((`IIFqHCFz%RQL-f&IZNvuHQQ`!7vJs@3l9+dZ&oxa)W> zx}E1%TIeZw}YY9e1MuJaSnLA*U-|flN zR8%mdkQO-c=LXkdu)mopQZ| zh8!mdc;sEtK1GGl-Q$1s`>MR7rfSKQSW9f=#&E-n-w0*}8_Wo7!lus<2i zqZl9G1DENsF)Mp})Gy#z+26SFshW+Pfgz3Jo!X^)_bLG*6s3or`5D9fsgU}?>>~9K znzHjs1OhS(UOhQ@V4rnZIf5Z#Kk!b4FDk{bNbZ=CUC|+Kd%pM+`hfz92dfBGh^$91 zpLU>t+sKVuw>ahx@vjr=R{e7;5>X8{Ha5O3E8D}u0&&eX+x;q#rTG{#M|J(ZS*_Qk zOP1dd11igCvSGy9XuWBf~be>c&@r6R+-@irIe4PWd?2!5OIvs0A9$}_HvYiJbiYzUB9zAH)Ii6AfgN74*Z1GSP>Y#)558(-t?T#L zAuAafnbSGnkTqd58LrhHP|p_8y7?B=F_A*vF6m ztSwrD^+cX2`NTrEocXTC6!Hd!ku>|f`J=?3|Ks3DhA|Kt z4T#i@%U%A?!A7ZlWQL86ZZA9g)<|z!Vq$jk->UuhN&z*mWe=At%bmgr>$_~lq zuYv;283l!E6mkE=Q#CU29Z)3DdP10`l%i2=A-SiDW?mTNBfGb>6OlF0q`}jQehF% z{jJE!aO{M1u_23xhNhOnU16@J8@`fymy%@Md+5o-&U9H61$wk2ZljYSCXkSjpjkmJ z6-QUoqH>-4?aZm`sjJ?WAynMTq>PhP{CY<_K6ANE&gmIw*G90(mbeUfxJ>x_5bnEW zcSiX7)}T)LmTOE+SY4Wk0<}FkZz)wJ1cop+SwEPm(u<0sk_5Ra9&$P4zxm;9$#?p$ zTt_~M4wcMssz@~?C}KCf-nC?dHk0oujWQ1QPWzUJr|4^*m#z0am7`eX3D4cTTs(ZI zr~Iho;mwVJtn9KFA0Jbx|C6nMPnZ0;>achy_<8?kHOi~YpV)B*&F zKRz8VAx$6U_JN50#%cnMd(vD zLba}5jdeRW%%ge)NzD6UdBk_@SbvqVzW&?n?9jg+oSZ#vJ(ALXUJg?Ue!?co1%?!L zZVLms6aY1TR;>cjN$n0#dpaW(T4FzU z#CYDrhv&YROhNzSCc{qPM1L?bv5%Ft=kVqIBz020W_5=`BE-H3 zB>wQ304-=vpM#a=nwi<@Q>ac;(AB(pHQ}QaMswu22d1vNC9N+LpLPR>VDkzMvP3JRedku$&M@WX%|QMu`WhM? z+Fg$V1JA+{Mqb`Ke}0E24hCWMaA?1!qEjscg+5Flqh7aa^GBaGGmC2@&#n+rBW7&swJM${L680oC$;kHTIFG#6-t`A=N?>0TL%nQqj16YHoJDa|csY z*ztk5w9{psIPoMQ0U}VkSZR#SPitu8|JK2s#{|7P30yM_g396GVtPJb|Cp@bx(|=W z5oUn^#fkB8)@*eoc#1P*1o0Cm-bqPI#!0fKyljiR65tP6{F8Gty`7}*A~I%9z4e}X zSHja7T6-Jicw0Lv{mI$cm+~Bf)T|51b~XspvXr+2pTnYpi^~kf74Wv=*!uc<_*LP3 zfx!lWGeC8v*C-`0rZ0W(0Hw!;3(e=E_-J4H$HndS`LUJq>gIH8EYks(0$Y+SGn6RbSQeR2 zsvHcs>5BQBn#WAK=(3kH0tCX}L*AYH?@*xB-g$ z+j|xr@2l6ZQ!gvp+S|`+%e@+=lNkJHY~Dsfmf+ zMb3}>Px_i}Y4t&Z-PmJBL{R5Rx-_P#Tob9}+BN)aW2<&Oaz^oV=$S@K$6MQRXc*Be zAXhIJ<0sRUK5G)nH7ZYeCEqN3?5n)Yg>Hwo&M51i913$g#cq=}03N8I9KQvjeE_?dhR8?gz#m?kfIrPNxS&>(oHT%5M5y(hJX9KYqLeq>qL} zT1-NTispumr$TMnM|Wj#0FK}MF8;vQXGj4Yae%Kj4N`2gfR0caRICJ|T#|Zi!l%v7PMmkULgYvgy&joPs9zI|4JGQHSaT4XB27h!y!oI$xzZQfH2i9qN*^gYDWIrXj zh0H{tA$@8iV-CX9;CujZMRmpndM-sl$oE-S$F z1SZ?ZgoL060};dxbg;S05&)|q^TAN&#HXqI1qH`TIN6K8Z8;6Dg&bWk+ovSvqoB~S z8s;yF791!h6aW1|epp}F{rV#F5h>+2 zmPT8DteT|`e<1Ud;1>{}W2F(f`_K7X$iDq}@Ojp^zdM=osWc=}oprEMv0oW@k(1ME zZ_Hr9;88^1)7yI&X=wWb0yoOii4TkyY##=HzF*(*RWgcruH#a<1HcocjlZov%=o)BnLZKtk^he+alc z(hiIzp2yDg{G_Q^#f_H&i^3-Xe&J$y9P1k$Q#$$7xwDb?1;b~~hj}fi85t##n#WTCMa;C5O!Gq@0~a|I z&UfI`=zfHo`vWXy1z(d{U;+)C&+JWb7V-E4i) z|7;iW{hDv~`qmt~Jzomy#Cd;W4owt!oUA&LduZ#^`06*+8(p6}p;fm|VCTC%Qr85V zz(-A%dV1ZbTg6WarRT@#zTi!uOJD506^{8!PsoZOD8vn-y zu-%UAPa07CkUFGF`QmxN!(TckWoO5laxb;DTbPKH+Y>J~wzbJQ{^Wmrk}}9gJ|y&n zWED`OHhayq`zS^rdrFjRt)o?;*gr5ZaOmn{&B?4Jmf+Zn2J@sR&ik=402%@j04NZO zBr8kHctIQH)9G5Wy4eQRS!tn>#034Iar|1E80EHPjb>T|q&8?@m==PLS%^e5GNj!2rV6D}uy`OI& zy1VB-c&(bx3ZzCyhgtB5HGDjhNVeodZaWR5}Wo&5Xft}9~=REu0{1-Eit$= zAd6wVNGFr))b&d+v2>!f2{X49Bz0J$A#&seHL@Fy76;S$$O%J`u#As4`u=s4N(B21$&-likDS%vdBb;` zz_>Wck= z|AQ1?h8+p@Sfi7r!UL*&xTd9$asiV2L%+j3&_5J28h@*3=xDlqkNw!H2cC|`UV5P0 z*$#zcA@YSquZfmT9Sgt2) zI-NwNz;>cxZm6{8q(bOTP?2+u%Cz9BgEU#biFjFTpv{nNIkUgLrGJB*T>R}G=fQ3~{M2tWLPftKV?#ow>IV!j zu$*$^AwBFsZ#QFZAm(PN@-5e}nOK$~@eF`|UtdtH<6xQ&4HXS7EsAkCiJ@lJPvF^= zO3q5-GBdJPH zMt1OW@m^22Z~o+q$!bsP>^}uYzubGvUhNd)_;A6s?%BKGjkJ6dvu;Zi+uOc=Yp^i> zs(QrLvK9GTr(PX>&6B3&|AZl~@uS2iIctgs3~X$mBqS#!tN{!G^7FB-&e;|H9SKb6 z6~4CjJj7dt!RY+vJ#sd>v!}Nj8iM_lU0v=RcZoetFqRJhPz_?o?EkteXJ`FVNuk5F z-sXMP7u@?*-v&hD*Q))kwX_;x@h@`$vcclVkDHb9u3Vng*2!@5yy+dHA;W9|ojMa7csd{c_d@i{H{>L##UPxl ztnoncSEAEL5SSU=oRXg8oNBT+W+O-Xk(1k+AFjP$K^0hw0j?J$%yj2?_>P3Xb8rYA zcz^Z6GdtplM3d2vODZ`)Ft7cz0^uG;pt-M8AIC+g8!N%!j4Jr=f!{+5Zcikm(VuE- z?~XYiP=lvc?aGxk^o#&sB}F!A*0Wmf1=g? zjp+(%b^HrGO-;B#pTBq!!|}a2A7%l5z&B0;Jdi=s!QSvSyMk4X3KjP9)zvj)BYiG4 zn1oT4`izJ9`6)tFMo>U?j%5Zwx-mjURK)z4?QtB7pRXOpw~`X~mStX3ciT_g3GSMv zjXA%gu{`flPof!l=t8WB*Y(It<2i9koDA^jw`o6aBMxN40Q^H3bq@}1Ax?pPj0c^~ z3d+q-f8ML-*$O)J!3UQOwcC{qvFq#z^{_Sujcq;q>BU4T5IFwQPjDP-COWe`D9n4c-vO z5u*<;#O3{TNiEpwB#`kC6yhuG64pOoy>mG`@2*N*6X0ab7pT!f=YZ9RSMaZ?bDRvU ztToO0(>aH3$=-PD^n3Y<#I*2n^5eNEm1XUt06~B`I{kIA9NMiw`7pb=I)nHGxPiwH zIC7r3G~E}We}B*O-vy`5PVVDDQmXs+qQyDkEkXk*jvyHvgWn>$W<1P5EFy(k6Fl|Bul2;mPZg5k0F)^Y3;!dJ44JETkNu5Q57B6-e&dghMy<6mHU z;83$3ZH`^|n1--!P+pHqPkpDm0ue(RvG{&cRsTUjX6|so1hENGY3jW~G*ncG%9uPe zKw9$nabZcx$I%7c77*n!1l7QmGs|zrRtHRr;Y*L<0zNqUr_L&Mc)&n&g6;u-$!d~o zq^IrR$FB#UC5ym=@~d{sF)`0xoKzVw&pq_}OW?shiEzn+aUiY>=8i~`1Rs?Ca1nuI z$V150!2yr>qu5wEharHH3fpwZ#ym_xcziHTKpO+pfF^RFr|0cro=@&gjw2zf$45g?@aWhZ|=cRnh#)gD-BrI5BadD-90SRwoVfaU=b70a=O&x+nUu~K%#8;;- zD8AQg>sTVG*-48CTq1w}4MB=-->sN0LypyP#AyvMW1X2lz7(m-R+}y^ern>fD zA6!J&3<@#(zdxX^gTjO-5TGbh13*!@NiAKesl$&K=V`ZSo4qf>QwR~dtRO(@6-*n;}7;&3l%gItYX|F4DErn zwWOakyZpWBp&m1q3kpCKP2N`!#d0Qf>+BB^ubu@3 zL>i_VdLAAE!IF5gA;R3{C^5kY*gv}VQ&Lh6j#t9a6aWDrB_ksycr2d=^a|!rtGkEI zAZCn~mdfiVcMRwwB80`n;Cbd;-G-hGaHqLW7=n$7LiMt}@9tBiq^qo=QY40NJvaZj zT%E(j7Y_@rgiAnXmz5(-lF5w7t4&3gqy~YmFZa76BS7|(EbqO%Ooah@`HL4n!FfBd ze?Ok-6Lvig$PPq-+)+)0(7FY?&&q08s6!#-FCs3^`^Z4ml3i+uI8)cwcKPf9_=bZ4 zwkz{t1|v`kQ9eN)F_7GgN=jyav|&3(`vWUz(Gc$2U-|nhC-*-W$FYX;;OUbmU~?Vs zNp*&58W18hMhHTwFJ6dGf5D6-zn*D+=AeeeE^KH@9&dNeDebQvU2v00vbs~!FmGfj znjqnxuga^XULbVnPeEK3%M8v??!g^~H!^at}isF>ilUT1vNU z4Iq#0v8*o{UEnC_z9bDWN9@ec@9^=tZ>*mkF&Bx6uvmZ9sOxomd%mb9UZ#>)WeLOP z^KLR}%S~Ww0YvyGj*|*SKxfpP*W%3L5Bz!dqdD;qE5uD92n#J>MEiF$lBI}@7llC=g8@z^F5TbHvqAZw)(o&`*qOA>aiUXLP$4Hr zXCR4K&t{HX2Xaq-Vc|%k;+GwPdr;6loG$cUtXq0mW)}KxP5+(q1kikd$8fk&JWy0o zad+U|9XFYKfM{w> zIOn+rSQT@Vb4Q~Ru~bBv`??$j5JWc_c)C}Vmmh#h#J!92ZX19U&zAO`WChPMF}8%% zyjUsDn2MGb-*KiQn}(nd_9w9~5)>`jeRN{ywn_cjb#n}2o;>k@i2&{u?A}odM|=Zz zc7TnIhJYMk0=^T{&Fh7n&X1{9b+pv7JwmX zE)tIq0`&89f|3DNeZdF;wFM;UJJcy~W3O*-gKG{Pl$;P98w(&Ark&+wWq>^Kdnbd> zM<^c3b^MKnIJUDYQkfgfpf26d7{@+vbHKY;M?DGm3tT%SrE=h>2IWwKs88<@_VEDk zWoE|o1O6M|8s2Y8pPLZ;_fO-A1jPwc1mcmv2#^tQ%Ypr10|hD=gESzsf+uePO@R;q zG)HCb+el7{2?Nx`sgi&spj1NcfOhD6UtgDjgH%60#cip6)RRO6pg=K^k^9U^U`G!M z>e6)#fM=y~zJVTZ$<9>OhWNYOsxte)$;m182#sGU0p6dX8yo>AH3UW2o0#=GPM%dx8K@qi*{Jt;nY zNO(B=yLPAqo=^W`7zJEGM|K}m*SD^7CJ2|vu4MOb1A2GDC|;a|P(3{VkfJRtj6h(M zTDKTcK9~7jhQt;_MOgDa0-yB5VpyBMBm;v5jmN=SSm40Y9j}Df5B4!L-+BM9qpm@C z9=6Uq*#YCeb=C9dKhx>v8X=kGg3xCo%fOJhD=Xwsetc}HeRr$0bz2!J`)wKu(-9L! z5@q^RF4M4s)qi6@04ILtJ$o==M;exUPQNpfyKfSOU11JG)kNS;q7Rr{89_g!_I2g( z(EQImmC`QZMR9_X28GV>895>tBaHDLn0&pMETFA}ogTzRlXG*hx~an<@LruK5+EWR zPeN0VDGjEx@ZJD<&dS<)MBv>QgA&@>qyU$EMV_NAu7#hIDE zr@xUN5BI4t1qKam6rxJG!!TT%-(#^4U7*DqAqt98(7(#u7x`F1AaDWQO9o>sU_}sC z;EcmN!&9r!iB3nrZKLp;^Y!4g=Ke?IPZ=M24~~v}{C@nn=JZ#x<8%n4Wf;K&Izd}{ zt@X*rCoKlJj+7Ws;F=B&n&|4XfX!>(!9Y=m(+;x9+(3DQWx#eyO2Ojn=megOmnk!>RdG}|PMYs3k)k|HG?d=@mD#Rot7F-Mz>Pa$J;EV;PH&~+JzasP@ z#Fv$wMdw0nZl};qyO%ETsfZadK|xI|J1uQjL_^o(7}&_CW8+O3@%9BL^4HnHp6yK$ z`(OJRXs_};bPNh{cvI}UZ8t3>@G;#=a^L<-gGJuy&noVDZ|hvYKK$STbT2JjWujd5 z0-%b3U;z#YBYVV~q9V_Qk%08{LrYi5iHOFFL z%}~F%y-qU^ID9)cCoE8%474W;O79W;kV+hNNBNiZB@qej(a{6qw-1A)GJG1cv+x0L zKh^{N$Q~G3;P!!&gk}p%Fg7dfuEBcJ)WC+~VF9f%U&DtFBZzFsDSm!_2v1nDwnIEa z7Iku31n{09SRl{O?3nxUOBsu{_llxS62+mY2c|^KRAZ<~krWPQD=90J=zQ5XLXY5u zt&AVd3&3dwJU1Pyv=`Ci=+P{!2{2gN0$Uj@S{Nu|ZNTKrOb$YwWdKcDI%!DF^OP88 zifh;wC_kOUDw{6N#rc3!M&`ra8M5a$l@uvC)j=-OcOKPZC|9ASH|8U!3tVB&M>1Pp z*?C#N6nLqjuPF7uc zqr{RDiSl&X+FOLvN(%sdW-?*|oQ0@u-mpPl}l`GPwqP>u8gXZ~diijW+7P>Te zpl5=e_~(+rf#6+p+1MOe?rLux!upVL*ZmrWAL1;`XZC1V3pmS;K~ws%t?d%L$3f)5 z$H8b3k1Wa?G-B%uqvS=0MEyfU3&ZJ%$;omRLnT2-!OKRIJo|TJ@6e?NF$EKjYEuPs zQJ1k9vAur1sU#!n9bH{%Uzq(~_j|Wd>JJu^FADxHyv%HjwB!WF9R@-0JcLV=K?s^I zRKQ?!V;zU*&ql^;0+ux8t^sp>uBJNuqza)fC`(}%OhU-a&i1bzuITQ-^bNoi=8yQS zhQ*hoV|`R6x06DSmTunQWD0s@*gCGLp)o)E_vlgC-?ieuUDHN!nI3kwAci%}$^<7ZiUf?G%?YXJ9$yy^^N4_&pr>Q<$r%IGuIM*RIx9tr>}1_UK;+~WP!E&lJr55uC-Tj zMuPep)U#xn`~QWV7#DImcyxk|LxmVs8@&xm^><^b%NDm z^(NI@y3%`vIKxVxm4?5KQ&-HXvF8Ihd1|7cBhi ze_w(putR(|HolUwwDKy~-X*_W#IrOYZ;r*q)|WK{V@^b`io5IB)aI|o2MdK9lL~O} z62cy1Y#+6Jd+1dDWn0NJ@9#x?y-LTkTX@i=cecW1`>KYS62%$O=%xhVnxH29^9XO< z9kW_mJuvei`NwjOAs4v@#WI>fvuoF46B3|Z&_Rd-X(lFS48ANAvPL3BF6RQjsCt56w3dD43{4`P#hA8h-Ger{LzuhtDs{sOGz zt42q%Yb|VgS0WgVB`@ofHi;Nty*fmq=&J7NDk&{r_3vj;+=C)w!QV-R#rjy_L*F>4 zkROMTh$edpxG8}th=zg!n;$N!s+y+d6JaCF*yQZw0P$gSEKBT3C-*BrKZyWe0R2XL zAgB@8%~NrEv>q(1dn*Pk(zh9d>Ok2Z`~4fs65TyKmR;FB|1Dit*Es@6{>_^epHYoh znG&{MQ&Usl@@Ut$D*Tg^10y0bvZ#N2`zC$;adAw{9n&Edl#BfC%p`|>;O(uVvi=6; ztD~v(jiVQutB3`Cd-*s8;Nui?C3>X2$oi^P+6xdK)2#1jHDBO6a!Ib#TQb_y(-PD= z^6r>9d9RUJExs>aPRo4~bm=zVgQz6Hilzw-wJ0 zmM?{xY`D%|+z6=WI0CChGXJ*5r@5aR)MtP6bsv9`mDOC`IGyoWqGpYb)8(MI2e*XS zx{8~AvVF76x36l3=L-9=c0}j=vTTHp(%DPwvT7Nuy77sivmlRyiH3$31&8_*@d=de zo7yucrKR27HqI3D5&JSgFa?*NOnnN92g9I!V=pQ~6};FmXWtW8i-B8z)1GLG5k5QW~9Y{Ub7}C+3aM2n~Ut@w9KkV=b?GSK7c&jL8xH!TkaC zs_58htDi-x8-r@qA882$e!1yXrIp$fOOiW#o2vZ3QqVPA&oH+AFnI8@06#xS*W(ji z*bV(;Dbo4hwhIN>oM_+1bUM9tLL&AwqEPrwRqIc=`wO-mL`Ch)E9*klu3nStzI+#t3PTLbO@_B*YIl75Fwv!0oj^U3izAzKqE4-h)UbNb?Z!N}JHmm!Vkaql! z3LAOc*CV|v6)syP+aD65)SgA}U>r9myWdMaFN*(0+547JZ9? z_wT*t5ym%Ivv<`J&gUoWdtey{wDpup%m>5&#e0p7yfhVKCkcG6*U zLrzM{0tVJQGAa)6?`$DEM}e@LPmQ&MFN_r<=^i5QI3(GGmJ^ZFPsZy;Y39bxWd@47 z?T*zjm}c6un>4VfwU)tGp?cV1`JVD!)6dB?kPpdhX8mt`!|73QD=>L3MWSb{+c%pf zv+?KMx5L%1l$Ndze*eCogX2ekzYGWwmm1~a=(_vI`&qd^+l{+f*(AHS*OVl$?GP4A zo_8(YJh<>x+H&Gm;~(av;*JYf$>znK9Gz(>#5vVY?0UEC(;xOwmh0oLx$Vw?!OCmm z4Ugvq*$?M_&`UaYDf86dah^?ceKSWCqMm!dzsk*J=k{R^JF~O)0hg^>55g#zH^DQh z-60Y*>k4&gTiu+<>hqH+{|4_qAF^?Z9?M#a9}20tw{_31lZ*fv7R(7|=>00pJ|~A3 zxKx1tim?Lpyr|~Ex`8$m8a}LD)X-WqTUgQ$s~rwcUfj5H<)o&`+IRtB?_Q6h6X?~2 zi?1riN^&j+)zT;BUp3w181*dIl|T?pP)m74q?cDh#ryiHD(eQ-12a?8M^CrLyynMl zeT>MsuD)Q-><%-*|MLZ?dUoi@k>0AOSS7a6Y&|=5B4*G~DP2pgMR2Y%bh_$oYHqH{ za>YDavpq4Fwc~Qp0R77$MXX{2rhGX|7XS~S3rK%3)s2c>*tu~91-C)N0cZ$S^fz&U zoPfp1yy}K6Q3r9zdn9hmDj6GNwVtMyZt3m(lA@wCF*677C;w#pJS4L#gU)Z)s#@K2 zwUVz~r5xMeH2UM>h#=?>5WBHj5%?W!wg5R~>E>Wt)h1es-W+@8q1jT$sFnI36~StJ zebU0Wj9qrw=%Xu>l>75uMPEN2^w7~mpJ_4FEFD~0iWUj(ly^T;rp+Foz*=<81+#8y zYjyg*n|R|_i?m>b#}ve7UaqO3;Q*G#6*ET=)gdhwxiu(*9tVE_%L;70U4)&TYX=Uj z!=C#;X|uJB%|peM9V8jT0bAQZZVr zyQOUMcONIUWT0C7V-!r$^Zb``{00BGPXJ=-uX9Z?C}EzVgHaQJ{W7QRSI<;ZG%$06 znU4@so2HJYtMkH4pCs|#Ir(L*LsWKcTlUz$`J@6veuwS)v4nc3`9mDHnQpv~Iigrq zX`(vwG$nbr=?yDd7U>5V5wzCVW0fj8vb*5Kq)4C&fgK6a4k;YB87cG#^Cxs*n3d?K zO}6i-CS;tbjpOSm6IA-=z2(*RBvJC2I+9kdJ=&g)e)03DWG;QhBP}DHmb2`9Ert|A zIwmG`35l9@z;z&)@R&v>p$nYDA{t)apRiec)MUoQ%S1`GJvVTGxHs%D?dNBA(C9?p z6~@GQbGbs_c57s;nWK~AwDdL8XTpVufIGB+SjydeOC(P~EVzzI%(30I3?{ayh6fzxCD6@EMwxe#>I{iU5bAB3OdPMMD zoLpTw7IWuGh|LtX#c>8@*xNm1M}bVy>YwG|zd%aWg_)J&uM4yVy@iPb@4y_ukpNK+ z=ys5D_}cb1(LO8bZ&Ja=eJ+_%0pvxJ;vEh@`HrVi|MsiAw|P=o(pdJH?rbdANe!Zs z(yRxu&&;A7XC_O^e(8E&4T85e=9U-CIK(06Lc|B=LlH93uf0E;+4NV;WzP{WN*;EF6nC^X% zuUZ=gEPiOc|J3QKI`t&^8O5&=zb4`=C&Bp`7zp07l9B5@?w|R)J%h6G5BGR5S8{9K+x4&1+>ui8yC1@^ z`6`jOv6N%y{z?MnR9{9+|C2NSn4AQEsbi1J^t7ExF~eTpj11B<4DL_o;m`WlPUoW? zPA;ZmD0UC9vZ~si=8lAfOWJjoEt<)JVm6?^ig91WsYel`dx&(-&xRqf^29}S~-S2Q*qIxLZ(WUt>4Ic9zARnw7R5f3p9N{5sH zO)o@mrKMg_f4vS*?OpvXQRi(D&+nt1aTmCZuyrvs+;PCc3GXP&#QdPx{NoPBBPLb^qT<#;n7ms4Pi?u3C2?n( zt?6BBxliV}>@vZG!l&lkiFGZx!)68gE>rst0wlh+*Nf*J2~jA)hsUSQyg&P^tI}+d zVYj>jpS3?61$Gbp3v2b_vQI)>qLzeID-Pg2RhsrzpxaBs?HMh?ue9q$488Ys`l zqeBG`7EY&NgJC$>`G*Kw|Lpoo1%-qzsj80WRc-Bed4!9EsW>(pox5-W6XjuGU_es= zdru7wt%!7fwhHAeXmpV?%< zHYhkELLEMx6cj_l!_x5g$;%U#UEu)M0Y18HZwiE1NYLa{{ZJ9d#bJ@}kI<#GNTu~< z<#E>HVFQP#*5TGY@hfq#Jl9 zd73}uwU-O(K5Mm1^DqVnC^61*x6tr3di=P zL@G*3+R`VdQ#4=#3t=KkGP3E8Z{L1RPlqaV*Vfk3l6~S2wW)r#F z%(sDD=}KT<1j5_5zFo)jwJ95?B6WVFLdn%Hz_htxbmF#kAVP2_Bzc;TFXReKu!#zh z6BJUkt~%e7J?I=H@ge>+berQp%#3@4o_?h62fpyRXs;NC3g|IDU@Br`e;qTa?yfGw za+et(FZXQOFz_Dq`~?cZE*9PwTCcF6`}M0=?kE*am2d&D_{IWv28gZq>V>n|Rk!+} zDSF{IHfkTF=z%c&^o8(`#QKYsHk=F(Q!awF_$AF+n37~9)&18|z3f#%`Z*Bp^Df?l z+)k^B_po(0bw3Tar`9ya>@5=eYxbXSRI#t8$)r5?M+yQV)$ zu9p9KE0_^0(vtftD%Os)^+=i|okicbnWM%sa&l{t`_`QzewZDlwhe#%q}CV9^RQ$5 z@7vSBgop@ao?EQMD8KM<#m0J@`&ZpvKk+;{tE$Qy5VW(iI6b}kw%&Ej(!sUEX1IlY z3v%z~2Mh14#okTfeMS+z@KXED?>_0y?J$@I8BA8b{5CbeObNv$WfW@&9WC) zJPi6j&TY%iZIDh@`Uc&2e`qe_`nHMJNu0rO;E-z<_Nsra+U%5=cO*|ZPkFO#!*{}q zZC{P~&Ct~1;-&hMr-cJfb#HZ)8P9(ns9(GCqajpGC<;2&3h&Wvd>rzMHOkx??0}e#+BPRywfn=evA=e6T3F)=v14jY@0#g% z^Q5zKQER@i3cE#1I`C(M5!e?f*sfPg6%QUZ3yn-}5%VoA={m<}S*%xoXU{~7xGE*} z!HRMzK)uBMS`=RD&H|wa4sS`XEso!{v7kk>iM7Gc4?^)nP=IUBT&p)mw;rFnvA}BJ zw~vK?GHMVW-O?OWqW0Q z$jEq-S}x~~5=Zufhk#e{ zXn()anel)jW_*jGo^6?E&hjT8RWdr<*OsnpM>)zz6>o3<^u1Z%%_7DtS>^9v_jl;I zJO{a`Gz}ywx8|;Tu56XWDR5#@P?|>Ok3ai=T3&03@1&ix{PLMmY zl`wIH#Bq`T>Q+znul27Fn)OYGa8b!6>h(5s$Lw_Be~B- z5gneTC)BlE+e@c}n5=+eNJl4nKp>v-p_6RANhv^T-l`)!75s@&7>kYhj zIS0qKwn(rjM60~M7t9U)UKjeno%(b`R=2GD_7_P~M~+o;5Pm>2a>-haL#Wyjj}8bB z2&TYwt@a#BI-uJo@+~*_m9y~)hY~v$_{G?}tCK`)q*|A4WKfyFWI(!hJ#>oIVq1548ci>?+ZwiVHPBacdi2dtVZf2(Gn@8R# zCcp}7S@nXc5jNM>23iS+{u=?MO=QA?BNX4`o3hR4dqhy(QvXW*-M2%zuyMv*lA{eO zxbFV`7C-rY&#tnu9?l-@P*+uj(aPC)M#GNY-X`Q1X+HA6T2vgjF9mzTIVwd%D&yFZ z1hx8Mhn9x}Jdvuv{GD&#Hd}49^Vht}_UrESF3@J{MQ^nu?SLWkM8sN#T|I=%s>bNp z`mfJxANN`DkwfN7Y`*m2h~!^*a$v&D-uQEeSEi6p@&EhHL(N~jtu*Eq7FQ8%7JsXw zYJ>!`?T8%Kl3!=0Dsf-D=0Z&bs2mgmG|zyNP_4lq%QP^5?d=c) z>uGDF8N@m&Cue8Nyrcqr7Cmgw01C-_?3kUUCDPU#)w5s~si}p|Z?Em9pN8c(;y5J7 z$UHk9ZYNGGPe7)C?q~(2A>dm;AG&&a5pKCKV+Ev+s{zIaMmCsX$sK&8YT5qubzUAl z9Uc3E>_uzq46uxZh36nXMFlxnWy%z(3eqvEtc5>+KA4Jtkh6txVB;@hf|QgLR#Du9 z4-^!aP-^bauLZ9LtCSz9zJL?dj?r1*ghqpG@z34G_n~3LkUg=&BU9&wg+&^?B7p<{ zU0UkN2>^NJI3CEI=NOtofQ_BuK>86GBULT)UKEfAC~9hHO|A*WKY0SZ+RDU^x6}?B z*YYGRGjN_e!f)EyK^i@nQ46aspu6XIKPR`}!5Ru&-R}!%@xkwTF#i7DrUz6wjO};O zmVxTh84g-3t}6z5b9AuiagcgnUjEbt)^hm&Q<1GP2?w1n?Nw|H4g%iPXR?Ucmv<2X+fKW5P1f>;dW z7;H>CxM1WaBM09)cyB!{18o8|7*LGS-@nhUr?4$JgN#t|`%`1%`0Olu{$aX67#J=t zEwL>a;aRBo4apxEdQ$_ZVHN^2++s&5d_4$AIBs=L7n9|lmB#a-rO(bzjEw9oR|4sT z?EVC0PE2(4jzN6muYbEaqZWl>FnR*=@bxn)(3`M?fOLB;FO(f)KX{dc0wQu}@(U=K zY$u%`R(PP02e;*Ac6Lic1GrDpm-Ic^Jdv7s7#=2(9zqc%SA})RU=5(#MArcNBjmkM z{zkU$1P+3NI`_GRlvl%;eewhJud17CMQ}v?@Xs#XCf4sZXjD>LO zqVGXSU24Jkq$E*HF2j}!D9WMkDZ|ciEQSObXNQ?`ebeN+_5O+C5+V2u@3728qz?QjXg z#j=`)7);rFw=KY$f=?kWJ^eOJbJ*yTMFR#u z-J&58mG&u>+RecsZ}x%2#g+&ZH_(`nq40vJ&C2f?i3ehWiw> z@jdIRTs%BcFi35_f(1<77_!J8E2Qq^0ON&}ngvKK#4(137=mAN;r52w3cT z)5YZk0oHD%@J_lWcU#Kn^l6C9ryGA6qKPXKAQ!3fIs(B& z48L&xyyG3Kvs_HyJdZgri^tjE>pyNgp_On6`4@L$kjxktH>Ge2Maa+|Won8S zpK7>mJ9GaQ#H0zy>FQ;A>eLCbm6VpgE7s+?bKBF)Oz`>N0-HTw-l<&SSJC8@@fV2T zonb?nnQ(`~n{3$_m|8HM9sr|J7?(;di z7c_P3?6T3!A!%X=h5s4}akv=33BpTG{y%cy`he??fWCL^GT6)X=zKb}N`l%;@R*Q5 za1h?Ud&jbOFa4<;2=u{S1d|ryXpFT>i;Ju0q4O<1+HNBb)EdI!YdxWfA@m}VIK zaPj>Y!Kx0dB-4=fX;-WEokek<|3?!F;J#sv3jRXc3$4x z#KiF88o1>=Y8NuPkm`E-`qG@9!^4{|Dh$3OWz3*CgZWNJMO6rY6f;M#QUzjydCx6T z4ufT6B(%43knH|2CN#vpLjkLzIGB&mr}LgmE#--8?zKB1pn`aVc?IeNup7_B$ASfW z6(ENtjR?!GyfK78ntDakn4Lef zQu|&8SF*TYlD@=3?81(-?Ur^K1*2X>~V|px5F$F!K6Z8Tj^DY3_@EI_Ihaxoc zpb^b4ca)_Q$o7EwaHRy3|6zAVyxKMITaLo9_^f%A{{(>BWoN7LGgp2oCJ=Ul(ktg4 zmX}v0=8rWbA~DRMyeM{BDD1dV5=hSh_pJTL6fvH3aG3AD>OJuFYr1aM+4z2--GSDV zjEh%;P4ZwO=KxQX%ojpTK`qCYs~o=E_=~y_4guH`{jWUd02i|5+Q8hs?(g5v)60Nn zf$qk{PBD$5l=mi`c(LU7GFA!9ImaA#EW`4{GP|%~A`#cor9Y^E2Ef$4UhPB>T0uR9 z125gUpr^lT`sbaYZ77Y$5C$pZCX`!!?iVk^(*q@eHa{~Y9XKS7*|OrNK@l=Vwb;q+Vt;J2NxT)unA>Vt?zFJe=fgQb*&i-b2Zbb!zuU z!z9U?gU+XWfEt2zKWEa)?o7pog@H!WX!`9-s~eUz!h#mvjH@d>6_sDmr!mKv^D5if zo$@4vj_&TAMPxR$Y`vk7sWGzc$Y+bw(*x|+GZ1de%HUy2SLUmT*bc01!ucdlvE?d> z0aGgf`4o-nPyR`FQv_Atb=~o3#E>FgYoab_ci5RxJa;%+FeoxBhElRXzh%sE$J5Yd zTEEDHA><=CM5u;fuG=U_b&c6~`?+EYb|vlKoFgOHh@&dlUuGo4K!N_JMMnRr+Dkk? zLCQ?5tmC7jU4w)6P^U`TE;S2Rp?ATKK>xtNEFFQKf6w_5jQ9PF!?Miu&&|!$C)6xX>FFOafEygFEGhYlhqa~#%cTC{ zfx=&Qa8~FaLt+_oEW?}K91z?eRuA|Vu6+m*HuumXD%{%nUir)-Vz`~x9ysrzQh}(* zBoAxosRn5HC)IGwa4~Sw(NLL?QeWP#&(wE0V2r}$Kc`K9+4Tdr-gB{pq%^kFcS3t- zx|)%E@5arWt&rB+ojA<^gGvRt+pxXaMUcYQVh9IKN5MVDbOJCA$?57xDEMkboltfY z6yO{8?AgK4>B-5R*=S0LDFR=PfeHgjM8o~qCxc##7x}*zT7{&g@yLqG!Y%-wA0SRm zO;KO4ym|9kTpW4&wOFiXWmM6`XL3*pY#tM3rNKo+Hck!Pp^1YI?O>V(e?%N#W)*Bv zuA9Dy{wI}R0G}Sl2uZRYjR@651OU!{#-Z2~p`wW=9Mf$A0d2V-C-@%7R2s6`U}u}q z13*{OJNPS|c;+WK^n#q6pd7(H1_V&ystqQU9_&C3tmUF04nMH!bq~q*v5TuKHh?1I z%CN)9v-r9;jyN82O@BZMew;maCjx6JF+Ci*PmlQYn2BVe^DIZr6t4fG+k33=MXgX9 zOS*?O*z3>Fb3$Mb}^ai6jocC)NUn1?N@$8e>YZeSdpyZ1LplL_9f4b4AQ52x{#=e#mok6Q2X7 z5NYd7VHBo%@#5vIuVCQzRd`AgOz|5Hs>>KlG~$ad@^yjtO(5Xk1KWSB%Ii^a4=|k4 z`%fBUtLy6+R_qI_$NUv32WEJ5#w6o9X_=W6-&bZPCr@IT4Kr`R-k&d zJBHlgwqWVs`E*B=%di}O_iq0gpLC^_{WK4danZecZ%$ohR%sghgS`OAEPyYfxABC> zhXH)-`TiZ2p>zXn<^?eIJ>A~2n8}9Ig8>6L*+I&R|CIya(9z3p;ADQeap(N`;F6L% z78X1znqWG==)SMUr}Won-=626;N}ge?CaFu8zuiXSAJNFur?CbaI$dNoKGH9(L@aa zE)oiZ4rg%{O>h$dU<8z|ApXE*7Uw`q8m%S42Z}~K;c9u^-bgd+(`J`cV}POm5z}w8{LjX2kWqG#98+2so`qw5C}}o1T=`ANS&T)c)^X0?l9z$ zSy>nUUuReT2=%^(zagn>YeV*&xg{k_C&e5~vQ3InaxEuAxhfPP2gS&!kt~^VqJ=@- z6l2LUHBHu=eTWvR963^iYh=k%+-ExXFSx(?Va$BzGoR1rec$JOp6AVf3AM|kUo(*= z_w?*@y6SM3;a>;Bqy7SD`e3u$vCvvlw|0V*QiO5%`{!z};EjC4#Lb~iod+GDJSDuq zE`(s_?sIHTIRY~PWbRN0(0Ye$q)JkK7JZ?+si~=8hstSB+q5I~C@7W?d+w}S7%2*l zv&^>8WUH)ANlwlL^_+h}2Nrb_6kIR$iR%~vkMdvoNitFaN_%~#4zRt+)=nrNE6U5& zOxm?lS>Hw0mZ22UcZIGCOj7G1XL0%8cGbS7k zHM``2E7*y|pkDfyn?<&-6BE1VF6QI`1X=GaOR)KTRSNJL!-y|F;kQuv_XV+!N;CpM zVRC|F8H_&mQGGj>mEREzt4HSBcwFoj?VvF#M8NBK6uT9N)>fOG)AmOM#KQEW@C9Hf>YXs`*8VOko;7+({=*Sz2=N zGzaGf)dNtE4OcEMjDae6u`=lnjGSOBy5MJRW3w>%+>!B5#?!+c=I6&}&+fF3Nr6!| z8eMnHKj#-!DtktNzbp85uN2 zz_-M0{xvsySAT*(3~gMG_mu5D=LfC@$POaCfIOfcCI~Q)L0&20avdp@jGd>nm~|pM z4Fj%jNCTRi(AlB z+HX9@pKw4qi#>>bZ+rhZ#LPw~-bi?0eux;6UarRTS1orpjVm(`GFblIr^GeS?Vhtv z>G~c!zE4N5t4Qch!O$37Ly(HV8C)uMW~M*INe*c8h0%aAI!$6nX~(=)@*n8ojn_U<_!5qjNW>?>P3OPD+pPFs<9UD@ngY%wr_U8KLu|ItOqX->6eJJ8Vyo!uE}H5c|&;m?hsmoEBltczYlR0kx4V9E-j27&|o zRA=F#$CpDf1pw^nvzebJ@6u_Ii$bd##hXr;cynUo`RK->DAiD5auAYDavz*&!=MF! zacX#K?!aA#RMK&yt7jZO1kYl7WOFAJExo+nL#&cya|B#pBBuQllpKx`+{?i0w2x7^2MZL-wQjvl@%0R+}#r@J5{7F0D{L*F~#77HbZ_`=}q8kV0|M$ zIR&mMN>V+&luA2-Kx$ZzC5Bw^Pw?^v2G)Z0oskg%3^1GeFls)`-+vfd5j-o(K{-9~ zzvf!>U0l8K_pOIaM)r@qdsOGDPbHoi_wxQ2=;=&v2zfDhtmlL@cEAF);+J$AK_ybuWu==zlWNM#frBZiF1a zoXxh-UITt*k{Wp$1y8`qlV+Hr**FnkY!fkGdra7$Vh%=Nn!p$G_HTW1bU+!B6c)Th zsrO&{;iK^k6c)TV80XBtPhO#Fmpym@o5Y|_MrPRLpc9KU^!S+B%zFm#!pH>6Z23{w z%^2}wa{4n){rmRXT1ytGBieXPj!QmM+Y&|x9)VAmwqFAVtLs|KhNA-o#B&9>u^cV| z#Shm_F>dkKuU)lSO+F+FvM78he3j%TpSJ7U^D8P8dgiCPXje!S#}+sSS!vB!K=BtC zIEJh_y!A>>9$X3_%?9LvhJgssHjlnZqLe)@0(Z!fK6ha6UTF#ogcp4wns2Do;IE>u zFVt9^X#kec@#-b=92Tkj-Mau6PuHdZqZze$J?+3>4=gOSy)>L_|yJ}9{(BY6yl z_%3;DIV%t0riA;2h0zm&9T515wmG^-Zf+FT2bBNRIBb0cwAOS3@k(FV9e{RW*@*-Q zK9>RS=Y9%f@m{ayO_}SVv^x_H@{z6zg zK?xc;_T;N?+f5vifON!~V=72A0bQUFkY(1L^Qa!0e_lEJQH4b+cjvfCMKRyz)v@O| zy3k#<=39h>lvY($*^y$&=>oR44ovY|2GHJ*`fwaa*2vs^5yzs6$BtdMyr_q{U0QOY zSxey(eWs5&GZPNIF-mhx;6NXXmsDwbW*Sf1fCivW+QL*3os*Xr^Xc;ibUNU0N~2OQ zyK~@@*VxkX7Q;th?a)&x2=+XL9Aw$gvm--8kKh#F@@W|(qI+;^4t3IY|5w#nRkMOX zKrBO&&4@JUh%BiRpxZTqs|apvb@tdnVer?LWGoTYp>R{_GaeKzKAQZ)8d;k0s4IfP zk9z}^;`1y*GQQUdS~U;rFK&}qw#*)tzf}%ZTUAX>Fmg_eD5O;tI_vy>)$&VEfu~za zMlg3dIJAK%fQBAAzZRMn^?n1vvoWJ5NqEC>-M;{_Kg-G2L`YbAhgiNU0I(WYse)i^ sSIHymipNX|6ynU{|jc!SGV0T^`f2`!O(PqV(nm6Xz3OEUm_5-xc~qF literal 0 HcmV?d00001 diff --git a/v0.20.3/img/scitypes.png b/v0.20.3/img/scitypes.png new file mode 100644 index 0000000000000000000000000000000000000000..fb1ed2c0529b5c1c2752d08c0a11f8c2a0fbe030 GIT binary patch literal 64286 zcmd43WmubAv@MJkx8fd3Til8}v}lVLcXxMpEACbt3KVyDm!QQ7?hxFao9=e+bMF25 z{rY%bLh@#1S>{@6jxon1R8B@54VefT3JMDCvxLZ3C@5GWC@5%FM0m)ZdQYby$R9Y9 zkJ2BZpsJ!#9`#;9UXvS0e3gcRa;JoX@(qN7x`*8I-GhR1Vupe`(1n8HNr8gGw@q)B z=Yza~WGkWO00o7D{reXh>U%l?q;@qkMO8;tX(=898!JXVLmPb~Mpr9aNNFf2K35*d zQ!67!J#tqoOKS%nSAL3rO7K9Q|Gv#cLHM+<%mRcSeLVHGwNK9~haL{;M1EQNG`Id1UR)j36!lu3z8--#;b)uX}&n;bZ#U`2Xt6pOOA^7cx`< zWIm?7#{9biavDKcQ(TFhwvA5kJDg zy?$MR-}F=E^eytb{sWzTpM^M6Hic$x_Af=aPuQ3^?*hNatbiyrWM5vq9mXtWI9k#e zOgJ^X<449kb~4J_9BF96A~;6xW0(jd#ok^@5Eozs zeZqtL2(QP36>3<;BtrO8vaz{&v9Y76X(48goE2f5)@aQk2GD;pcz*>3tCXqN>78x# z(UG~X`57SqJZ+Ec21s2xFZZVkWm1@PP+72J@Jot{LW6ts=&q@!5+|pp4cFoBGLVmq+A$Y#$eE%8P|jEbYt5$ZW}a?V zU(UOOP)_>Wb-GPb;4=p#tr4*XN9*L6=c>nF7SYQ|9%3bnEpB`~Fd0`li2(9sQAzLZ z_F&Tkx#az!Okl=e0e>jmW_{aY8Is^gE*&?*BCg&qz)h0m4DvS>Wkghh%aKsiUFWJ! zT&D=z5CLwy8&+jZM*7s`$Y?^?YgC=|S(RfsWwe0E;dDW&nkLQ4Y}jauW)L~gGJQA4 z6f;W1Uz3Z797`TIN3L}EQ|z!D*H=Bfa-sLY(-sPxUO8tjVq|dz=<+NHEIXTqAQ2&c!DW~H5MzVP^^r{Cm6<7p?p!}SKg%#JY%-_sc}Gt4{= z@tg1O>+-v=f+G2LHQ?oTU{w%YS_4Bppsb`cLz{y9fgeYIR}WQg!6h@yt7^A>!*R)2 zyV;fRm@o0ToXeD~Jfi%^kqpsr{s$PVRhk#Jck{*saLcc!n9NY%NYeS4E-F?5ZnN`64%Qist5u1{A8If?G0hZG^);{y19;zJ zCU0Mfc_d_x>1?33x$vGAss^tbtJX$Z5Ys>J-8fQSXh$tT=?b}cW5hFg zESm!(rO~Q(JKFi)psAf9BAd_Fmc@&xmz5v9aZc_v#;ObFLXiaoHg`_lv1qo%j5Um# z*g1()6l?-m27iqOSkCi@>v*5qfM#D#g=@y@(O5VJC1l`4gnnPnl$`^6rD@j9`};tq z4wA)){I#OVN|R-}d(TUEmDZ!prM&qU>cFG?O~wK=(-_B{);B$DrQ&vb-nADYZmkz? zk0%>3!LLbz@aC$f+;M@|)jV`Pg=cRrT&Vaja0x1-0uwbVbw6&!2%|TVf#yb1S*iGJ zR`@DDQ+%!8t^GK+1VCTezUm)yK%Z427-d%G% zC@f_3Pn^Jt1bP|@5MHo0bJeRRgp0$$-WKlO#G-{5d){vt87mQ4WGTN`-hR<>!q z1CI}Gq>$$0o#=&#<&dJjVPbL^V7q%1D32M~-5mSG&r=c|zixppuf2H9z=ZFaTNRPd zA*SPTu%ls;?))IyL=vUN+;EYm-X3>P2SY*r`|1gU0+coAeV=-@*&ius07vmjv{kk0 zxZHn`8mv@LK-$l+X{374sDhXMbdJbQ6g@CIj!3U|2~ zUCkf}vfcRR2|ASZ4HP=;i@fvEc1LUEWmqn!wHcquOuznRHNMVnU2xu{(LgwP-?MvG z^my_$usupE@o4Iyy5;+L;R$_UTfoalHoQ{LtBRq!8}IcW(W7-8cbju+GDq0ZuQ4H) z-?|w2<*1GKMGs_e)@yNLVIlY|TG+5c%;c6I`qibvqJqDrLF_2`7>pDUASQZ0!a7b8 ze(Du8oj;wqSmCSx5ZhTN!&_d;zlBN%7^RWhd%vr9vae^{UfhmB?kq%uZkCHYfNO%~Stb|3VmufzEZ>xd<$Y`8J4fK|xoE4XfTQ z#Qq0q`tO`>uwsI$Jnmeo9LwE4vuW_)zl#az4^IEPb9@sbhOstsOyhbYnTKO>ovCn_ zLAd^KV0P3fR9xtmSRsgX%4}C9#?j$jEEuk=_U}Mc5ho^0TkP#Cr@iK9YAXBR2gy@2 zzyQ?6SJm(V11**pU>3*E=(jyUVYs}?357hcT_=E9N$#IzsOy5cHn8zy<`DgSnF)JU z-QnI8e)TCvf6B`nmLnvK-s#aUIC}M?(+n=RZnP38Z2eWcwPl`Ft9ST6dmUuE+tP_` z2F(z4%)7ae#4M{hbApIZ-v-#;n%Xa_kt~l?Y{8~?4q~4=wLix6Lsb+NQ~fn(Nbg%H znaK-eaNQ4Fv_Jw9RlLAsa|cgSlwdKLU@?^;v~owInd6I@mcDPj=CC)pbk{tn6r030 zKwuN+&QTJNO%@M(qYT@O^56aBK2)efD&s&3sZ$)rswWt&zp{fgnWyrw86Ma|}^8cKIa@*(3{f~M7w(x7nyjuz{ll$~w9L&F9v2(z%c3)`)2 zk21A=;C*qnuv@ejEGcg#!y09ng&D6bGkFy9c!2_wh;%#Ovfc9LWIy`CspvAgLEDY)!LVVYks$+ChtMc1I!=UL;Cl=X-(W_R(q zQZ<8QP5aPgpO>fWUToQU-jy@-hUhR>v>47+Plx0@WbMhE$qlqu)>HT`t5@XabPK(1Ydk}-%7LZqb??ylG0TywRC|-kR869nZiIPlcf6B& z*n2Kg6_S>UoeJ_u0xunJk+M!YsH2$LIT{KXG)+cM&M6!|8k@|e(otvB^WUVHzC4r& zsv3zlkzPfqB}u;10I&bL+y;0zWvouc{N0D3Uz;WiRiL3&Fq7C1zTt2=)@(WLMmdTV zC$kF9n+MQm786&2IgjIgys0c5~J$PJk^6q z^DpcP?%CogrR|MW6cxE%3#=3}Fc!MDH1b(DNS{pIS8_>%T9@xe?bo^|O%;cwT{_YH zYq{#Vv~(|bnX{OD(jQu9j~_+saB=M(HeZ%alXdP$c~1_f4v)l1%_F5p@%j~MmA7A> zZp!MNEMOTK3GPa9E8a+{bq58iV0UJ(DV8J!ZZj8uPPIl>P6t$k1Z<;g=we3RNcki|DB%F7J>#@b#WA1T zpiH4BsEt=10KdI0{p?Ne%8Oz3uQ>@(2?^)TIsJg?yn@XWkFK}z0&4^_selMg()AR# zIIWjVFLuWpvR)o{`^~<(vX{`=Jl$ViKVFOzqeT@L)ABx^_AHx9*cqkk``z13Zw^sq zIVz;j)A1M$#XEz-$%LTGo#|Jml3bIZ6Z|NWA4WgZ{7-h!yRmS-)BG}p4 zn|MgQE+@;|ZCcNQN!|By>Tgy&@HxvkL3&bx;HQsZX|#lfaYRFh1spbq!QD4wS8PI<+fEkN))P2bJp|71=Q9Tf& z6rY`iEFSL^r8l*Ti$n*+hpFP)&T}C78so)plW&>LzuJuRbwJG-Ef$=x_g%00Fj3h& z@o0vRcUV41t@hCBetSGxz+W~Unx@%2O36bX(lH5|#4hiduhy&Kzq<@)_B_xdPXvXY zaCJv^>sp1!&sF%?ezPr^sGF~tP)hmd-00M$z&szer=eY*m{`NPkYs9`md?tRqJ4fi zYRDqB>wR4YW+oV~UAnp5jBa5kO5?OKu2qm(f`67?k! zl)MK&^OkB0H~2d4XDSZ)Pu!y8N3X~`p06q*kp&~+cm$pDAjpRBmCplcb%~9UNXW5| zKILUr!^Y_?E54RvEQ5OOp!A78zxN$br@p|1l>5qJqc`;Wux6a654;`%sE_t}+!RA^ zSawdUo8zF$PV+&&ir1f#who}-Mz?;xl6_J&bJ6aql<1Pkp47{s^;mnpyPgx|Ul*fe zyt^fIJ{|F54vTUAe#OwRVo(#+F+bZaA)#-mzjE?o4&d2Esdv94s;O>#PY%BGY1c3G zK3%oiKc9iOfQBXe?6E!b-uVu{!@xyXD~R~jM`8_Ttn+Z4r%80K4XKCT4jq&5@vR;KLOJfD7fkBvXf3*3-BRsMUk4`uhQ`cIT4> znrIL+Q9N*afbG7)Fp=`_DHq@`1ouU32uFpMK4sQ`D#J0(>(BsDb1^Qs>ixJ0;O!xg zmEd}xu9yY}c~Ym`uG z_2u~{qpQo&5|qyEaJb|!?v%$W^{_DRJ$u~#V9goU!)Ez#;`8Eh;{i-!;Eiau^gwj=ZtuIahr_b>bVp#$#|o`G0RCF;hX}>lokUlH zH$EQc0A|EzAP1w{e5C)HeiaJSedW_$NCT)&=Oy&%w4Z5V&o1v!QpNih;q_Rf_;>lY z;=1Ou`5fR%*u$<6-CF}9IL!gI(WM$bFTnrOl9v8Ub{ z=P|XDdalARdX7fNoIb_?_Y=#O(~X+*tB=k2ADZoMdB~3VFl=mJu48_D>Tm)pgAAhi42@RBMRk6x?j4&US2c@XHymd%3*?}A=tK}$>zz2&<&_n< zGvwXN4$uT~wUq=iPD~eJ{UWi=bq-044e68&1+(mN^Tc%2>$(~QcJ=Ay+?F-0hMUPZ zbPf{O5K6^v)cMb4FBIlH0&KTPG^)%qqy=?tBxgC*Yjh0!*sSebi(lC>l=W&!zHtE8 z!A`;pC?ah{f3vJlqpL z?C7}TE}<)ZV}`pjM+`YGBK#3gve-Db{-8O?<02;F#|6gJob^QSq%N|T5Hk0{NT^fJ z)x#~^TL{t}a0ToN_di$>q4?awuF%82e;MOFsl}Gz>LSEVk5f#)2>?81w>{s3tz%9Y` zD%30)DGw$GYbFEU;)j12W^p=qE%{kgO@I8J&9izb^t`Ye^E1Svt?inal`BpPVCyxA zNMxjb7Q*{0o`SA$XwqUm&aCB<+bAmNf z45pCN&T*}@X3QSU&Xo}s41{b6_+j?;chV8b+U`ST2yQPAt2R0>_j6izBo-ZzlCWw+hme{DGS-{sYD05%BC z7{G0|3-SlZ_nU%E5z$UC#l>w&TbTX5Y>&TxN@t~5s?%v4lWTWjvrb`zc^iN-(j>OE zt%45{D@8$4O6YnXQir?2>2Z(2f4(9TV>{d7YPYTzBt@6U46FIoO!*MsGK}>PiX>oz zyEQz%68=R;;kk*0ul_I@p2}*X@6E0qfHdHxsVhBjxnxXIC~em)Z(xuI9Ed><18BJ( z)yVqjQxxVL#4 zY3IICaP0xT1M2y?;8b*w1~tg>2eIQglI*+gr{W(bU_7=Y8gy|nuC#XfnW;%j->Qxew>o+rU9a#PRXU#U#(kW@@A@MMH??g% zAamE;z+p#=JI8V=x#sQ1sS^%t{q5yhP5+dyHypxhx!Xxw9)zRoufumt$Los?t1=i^ zY<0U>x{shMZ|pwW1PteFY7}yvcd0B2-0$Tbk1CVcW#?gi+VsmKt!Te5_Q)t-g}9Gg zoc7&);73Zt3wtkkaLu^D!+di4<@<))G?bn@N}BpKkfu);%SqEN*~5WKhq>qGG?l;^ z(v99HnbmUutRwE;GQiu^59=3mX1#@PTYS+HYy=dJ$_PGflii{<-SW>jVyHxw?dxGB z)}0|I&!&<|<<*mUVvQ<`&mAxKrHN-C{GdtEDEdQ}PHojZyAo3SLvH6$rL5lC{IlD! z{))p#2bmfmBR@h9Zi;ovP*ZFxSexgUdK4DQvOm!>bNFw-NhXLgwmyv9Gb)GmhhZW9>l9d$;Tn^fff z-E(#4u@O&;XX#hsa9jg&3Pm2j!9}7+$tkL+w3sO>)^eM7#lE8DAPHTgMu4oF74e;Q z0r%Meiw}8vc~=D;B;#! z-lkKd;rRhU@V3i*Fnjw=ULG$qf~k5sfUC@Z>D;9JX>Phl(DB=Q+h1z?+S3ziHQZ=s z#CXmQJmlk3B;qW##xsg4JL05^#{K$7_Y9!JzGUqNCom@{w#InAmY<6E?c0z^1Z}{v zXGm)z-R!)=_#^PT%RM@kwIENJ+#g&qDIHqu|0qWPEVI72UDqcc{r$UV>Z6R9Tw`)2 znRPLR_a?kv$=P>M;mdQ}XeV*W^{UJF5>hard%EbgYR}|u5McX138J?dISHLExCvq8Wy5S>erKg zG?ZuzZ8}P`%-ZQ}_Ne9JF7D9CGe$hlvk#4)5f5oSI3)+Km7E_B^J4Hq&9L=q-zKj- zv%nD%M`ZaIDFW7W?dy@m5PKFsxS|mI^i=)~)LS>KBuN6Y*^+eG8nKqkVcZ1SMWjf5 z=b0Sq-3v)ueDyX6N$V-KDkdw<>)2GzXmZa}`k|Ez_aD{~Lem693Jsw~`eQ9EdKa4F zc;lX2ob}n2r6LmxL`K^%F-$qWCpB_p&yWRPS^+Ov!Zy8lf;~aFg^H6x>&;kB8TVTxVK+h-Q`@YD@N0%dI5C43-4cNvt$Y z&Ox9#ZB63X{FfnfgMR#Hp1i!vR}#7ZI+)fF2lHO1gm7uZ`6oXvNtCK>ERRnCG!~onEj@-$CE0T$g zwyJfyeYA+LH`UFrs^rWonYGk^hVb`(YGvj)Xh)cowaFyXznv!ekDG!PA%`(i{bGTH zE>R^f->L;OSw?~UmWX$p)l51kvP*^>@zeTa$gZaBovzJTOofl24~6aEg(B>qgCZCY z+MCeRw9XW+-p4l{kz&5=EBhs}wIw`r2FvO;K+i^P|#g{b3$`u@Kh{Y>A!#*$4wFK-oUp2+KWJ zH)dg$UX|S~cr-sbYWlYZ6C?~uivN_)?m!CE_B`v`_OzmHA3@<)o?^d$fO5=l?Y2V_ zghcS9v_mQ~Lt5fw&ENrkQtn`TNw)0h^y}dA0Y6u`W%;fN1z-XDZLa$i<+OZ%zYw2X zar__nY)yrEzjD)l(*YqMOhkVXdio`QDdXgNB1S4^EPfq@oI-e;Nvr>D^6;u zeG#lg8cB1hCm#B#;qy|K8qDsRY{v$aHD4205C51~+q0raGDqXG$-IF0MyJAghGe`(fh5;;F4|{>aLRf{W?xi^aq0Y_(=C1+=Ie zCb;^c{`QqXxI>WY!Xvd^tHFRJIe8!1>0C`tNt@1%PzEmyJ!qbi ziCm|>1h%Y!d zvYDBg;n43ztcW=xhXb^-%HSJls0y@8R`onzdow@%W}4MT#0j<0U7ian69GL2omhZ5WJo@kUtblZgAkG)eDk%6%Y+| zxwxd|GK$o(d*~L5aQ&1OPOR~@Benhngi%KJVeFGqBO`@fIu23#}uip@t&~Z zVsdqPY*jUOq9`?|iP0j378~UT(Jt0t6}@loyX!-BBR?`)&3-j0qLf@^xn_9d0Gu&^0{s> zHahFS0xIKg;ixypD~kRJ?tbg6eAhWZ3%EIy<|pt*e-tRp^1gSaKByW^iA zS+3Pmzg__WXX-z+pvTRGuw}W{1Jwz*aVJgL;NKQvwWAt!N0~XPS#k%FozYkpstFES zdTuAG9K10=;nO;*HJd8drB}*EH)8X~+y$isz+?rTB?QODt<+m-AUj1w5V97Kz>5=n z`SK1PK{y`))Ax^c`)vm{)qOBhHd=Q2xn!QK(!QzVEg5ohSUWUhv zErPM03f(Qy@)c@4&VfuxV5_Fbd?-pH;!v*LbUv2Ym#e|pw-#Bh8=+Si{Ff6orkh`W zcsXAmE=cZCsLE&=zwbrUjX``*D&F7aVo z#odH( zw*e@=yC~x5Fk^m}*5#{>?lZ-%Z0V|aNGA0hoE+8*jdn-0D~MvVQADIn6+c5azlz{{ z)8mWeBED@zXKrAL&kw?aQ+o`&>TGPy|M=F=>-iB3gUZnvh9q9|k&Prw#30sGO^r`k zo-nF^Sp7Q~j3o`yRjYmXtH@B;YqK8BVwPSl@k>FqLB9;Cf|im3F%wfolz6VGFxPir z&IWXOh7|mfO}Y5}C^2XGQmyrqAo%NNV6QEZ{~M$otN+Bf`}EEUuem(2+9j^9EjgCp+lZ(yt{|4J8hCVnz$i z8ux`?Aus2H7poluE55PvAFRFti}Op4>T(>>iy8hp5puLJp2NSmkJf%kGG(~0dfh~R zWa8O8W41oJN6Bsl{gltsWa3ZQB*8Jb#M94Z!HZRZYRG9d(M{MwzAM6+Iwg1&ZT%U9 zs)Haa%Fpk1zI6%_tH4q}@9$>$2mr}Ed#A{C1;*S8e#x{ImKDayY-ffET)>s$qoQ0J z8-Mr`r&Z;D9S$03wuAFsPGJi(CI1k0*XnRJfKedKS)2o&ax2Jrv(7&iBxe)VNq|bQ zyI?&}8HG@m-Q3(f0->{UHY89;jH=U8k4)Os1llT4HV0E5ycST!J8gV@zz|kD#2*J; z*DwYxGV1+l``CN}u{m6N^Krt;6IE)P0NK%I)lK{DQlB%~a$o+@gdw$HnM%zoiO7!|z&7>TBHBJRGt7c;bH4cg+k)7|j z!Cm28y7xo${|*g5>n##_LE?!@!k^27V6=iWUl0>rqF`(t{o2m<)+L3{^2ha&ai}1l zjX*X0z=FB{4bEL>lUa-_lBUxc)r^jt%hx{+aVjM^(f{#XL}0=2@E4grTHzIfW?@(v zlZ|gM!}<1IW+K{1Lb5bgxg=;f1?zio;2n~K^zJ>RLgpLFbTLX0AHzPBNDss!hO2L4A#q``*J-+P;12+GuhK-*9 z%h+@3U%`XW+c_#pqC0N5i`$ExoItm2kh zN*?iwx!;C1!lti7kSU;+c4U5Jz4?2k(_`b+>b%AIw=^mldYyF3!9Ze#(W2t>u$>Qi zd8z$);X%JXvJnRMjE*vVggM}ae`F@6!A}T62Gfxv^`IUv_PPe3#Ex*6>UY{K9yYG& z&08+dV$p*P>)p-JZ_+Kfcxb+n@($T6)x|oZA0c5aI4ba@l@_6Xr=t@yvhB0A zGUXvb%1=8bI(DUu;MtDBnRA8b{GrJbKS&A8O*Ir%gM-E0Uaw+LdR)#6x5;ceA$#HW zM&)_uugVpRENajs_b$HY3GT>j=-k`aj!4s{G|L9I-&Fl1WS>0sE!gs1;GUGRqZ^x6 za;@rS(#}MGb6KQCgLSb+L-~wEF2Vmqt~ub;0i{+Ol6Ud^iHB(UlWQL5;T}@Fw7{FS zuYPu4q~2VDNVn-k`I>IYlm&BtX*4B>Hq+mkLyJZ z+rB+2A3UZmi)KOx#jCWg+B`TuH*~LycV@vZBk|^KMXmtePG$xLMt~AMnH*I1c@|GD z8qj#ZtY)|lUmi81fzr!G=ST^cKc^*^cl_NY$ze#bY!%`c7eIO1)BfpE=jU&ZZ|*8H za$880kO{RIOMf0~4NWCBJH1B(9+)?mPfMD?Q~9l=!?qqT@rjCgTzy3ys{}q2Y zruw2rGUk2#^e+)6$*LL~JBaa4U(!1p{|34u9O(+==G6h(a@3DT@B2*Gw{Hfdn6vJmU zL;X*${klYiFgCj!YZX;q@9{$u;hWvkx<;8=1{V?|!y|yi8pyl_8;lA(G@%1w{tk1` z2F4xnhSM&Q3Z^9lD$({{BIi~Cv)peLS#bIoRyJ1};9qFqu^?a*$Sm`(+)X4Kfasc^ z2MGK6TG-cDMLE3Z%|z*&Rk$c+TbuyKS2`#@@Q=ysA#oRZEH}CGCqaUE&-=OX_WZ+g zti@s9ReniVTY@FkeFOiSfp5N48j-2YBN8D+Y-R?Mt)ShHFsSNkC1MH{^Dz+ z_odrGF(4E9miSbWtjXpl_iFiT)A{$Oc_?TRZ%NF9m2%A9V;sD(<9fxeP*3NukX`4QJe zgM;7M0ri5+mxwOYGdYbd6oc*eM#guJd+zkKc^5R?I2*J|wf@w(3izMy9$^zODo`)l zwCT5e(uK$3SC)`45{6iWuIp>vU6^Ih+nz?~!q<^}2v2`Hx)%tUKAAPt3oY+y|BITY zZ>s+k#yY+6=V9ycbfu6h`LpibKS7qe>`%04zV?B*oU!R%G zE&u}9X#?feTztRV9n`MiI3xVs#J%q|T^KPHX zMgIMs8KL><7yEK0FeUJ*17Ck_Iz<+Hqwa_Jt&^Et&Qi0oDfUxZd{6d~`5$&qi0SvH z`o8l`(sH!=VWe}0jwwhvkd0@|SFjed@S*_flOH0g9!%@lLYFPuquD9LX)9nt3r=vz z%B;4=_amfU$o?s$0i#`INnEko6YqP{W_ZhVYwUWh%h5S?%LP#r2g`;?Lyl^{H(xq+ zcH<}8@z-_y0np>}V`8^1ovdgXVtf}QmnwgiZz9>)Fp3m=GQl?r1q9li5?mdEFOoix z?=6@G+NVv&PYiDrBZ``B@dPl8I?eINHps9}+vNF2$}LGT$>;RzGp?@h98n!0XR{Tq zTJ6zw-SK?;`h|7$Lc5Ztv4uC&#iB(;9k{%uoZacoN})lDpAXct9;7UH<}NPUqv)N3_Nlhnt>{d!LUrYZf^oE9f%=lc+gvBSkC z2Zy7~APEBrR-2V>-Ldr476I8eCF0ijMzu*8cNch!sAHL&Ob*k~GkH2{i>HPhCI|_A z!Y-C8N!%{SOA7=?=@~|(wt^qM+=_6UK6!-cSB7?%fpJZ=Qhcb$FxT^IH8@zixQ~}~ zt8Ck?_|O|=8=H4;_A35bIVw6#jwa(SSyX$omCwViA6YwC>%@Iv#p5J{_iig*kYxhA zn0SvHC9EM_M5mT02Inq9?lPC&<j>y1n&h-_k1NeGPGa2w z(HTf^fIuD2S?*hi@|3fr#qGp-o8e|9Kb`<|2V&;KAjspHF`&Y)g5J{M`9kE@{Pvz= zJT?%A58r6&Lq+ng@bHxpYv8AeMH`|%XNE^p*IBb}h;q5mdZm@S$$0d8-)$SRH`*hf zYdKIiqlqKns^#@_+9_yFjVa2}RuJ4{mDCp#wSCJ`)f#)6w^n3{I~M|SOz61+Tw1>M z!sgrba?hH5G4?PxHB^2j>%FCnKK`hUw_V9_8L_(@b4^{?QM?Y(w$DE zv*Cbi#8>O46jk<4)pwy+3{LeQrt~8}=GVcE&c=nSGa_olNU6yM(=nyilLuPWzYq7S zs@*2oVMzk&@?tf}#Z;YZ#wTtUmo}RRMusMJIC%cB?7nquww46WBCMFN=vwp`&CV;@ zfbjUuH5l9G;s_FlJY9p29u~hmYydj+bX3!;hF#~oi#n^D@FaJM7~1UO^9?K*TI}XgORHC>CY3E%>Ra&4sM_pXT@{TK^vgM0 zIZ!mwrb?<&B#WieNK#@W+EpM`Cn1S4UVG`aQ|54&rHfWkQ=_yvbL`h&PY<#;noYN=9i`H=CLRp!7sDKxdGclcM@5m{ z3%3MY)XNf5!d3l)#nTXyG~nZXU6aMH4++dZEP0%?rQy@7o~{KVVbcoRR;kjX0z$>| z)GlAAmVQy_;vs7;cQD z7w_UfOXhzaqr#e=(^HEewTMMLf>(u^{8^O_L4`o$ntCe`hlP2r4Ehn-{;E5;jBU7B zoY2TrytSZ!lGlD~5D{qL&NCYdij0h$otO|BcP3sWfP@_zX48&z1=v7+A0k~WiH$1D z+sl@AL1{9D8aH=V!9U}3WXjG{GOhI0mSolXt_-=u@fe2Nt`{wUkQi^K&$_B7ppS1P zcTKD9^>Co>)G}Jy0*?`0^zHci7ZM~`3M^!C{g2dG z7(uz)IT8zvwzc+BM%#!f=#AQX9}^N2d+ZU_a`P8h_HoMJn?4o=vU762W8KFidTIPT zVT%r93Z%zg39^*sNb51A(qw>UFFEJvdj*rC)?Wc#Qd*A0FUj3?@rLmyF zkhQ3veQ1Y4l4XB~A;-3s4^edU%~D%;js*LGGPJxMO7*@bC5i!g{GicPs3m2*$UIh5 z(oFZy`I6k+dU&V7xob3{eA3r;E`~k`t61ux4>d zy-!FRQ)&tEK;!QYO^U2r?my!t|G~bvJ)svUe@Nx3V!g^2JHGgct#Xr#0#Po z={#=WJ1-a|XM{KM5@4K&8~pZ?+Nf;t3nVkVu3Ja^1zRsxDNi_e#oyitAe#`%jMCq4 z*9^t<>53fM_T$NDX!Bp@AU;!!Nq6;z0&5$U){MK*9=l&Gd8Xa3-_}b;EzpFs@(Qne z5*{Pyo7q(+$mK*>(rz7dFk8xDGyoIFc}_X=_VzWC9*v)wZr;XOAx+@z3>jbWQ@ZJW zGoftu%tHpbsoepVl9`~nNyF3Qq0xiGTJ~F(I>C)qOo5B2lea80`l|2m2A}k?k1T0&5^xkKE_%EBk!v#{8;V9mBkplQA*R+psjE6W<@JvVEkJNJ+AI8cm-Cx|qq!D^t=Hf&5tlYSpw?GL79aon z)zU0*Hg%M)VFO78I0KlC7{)N@a>KaPp$g4o-Ly=KbCerP0Gs00likP=^O^8bVX(a> zA;Tc${mG)4!}H2PiRNGgaT=@CExvqt>+FM3J94{Ltm)#X(D@A$@25+F8XNZ(rq4Nl zT%di#+Bpv`p8X!7Ul36uUk|rQmKR4P>Ih8dp&p+#3Nj0>6A>w|TLF!0#HBF8LXfz+ z0QH+$en(!2&YIa$wtRKfIg6<$gmnAnu5q>S$}S(>OUp$mIck_;nn{t1F|}rl$AWKz z(WLKehj0$c8W<{6fuxz1%xt_UF2HS8p*BN6Gq}Y{Yt|3~fJ}+^@_{lytp5iG!Kl)%>fI!=NIN!;o*27+TfBD;A6U#Zk;`2Y+w&gWc5Z zlV5>AKu+grL75{Za8CC^Y2n{x^$x;Ih%kQ$%h8L#syd|m@QitO)m~Ru$HIJ%T)VZS zg7vLujl8qS>TchvIRqo>ZGn22FCr=hgd}ixt9+a;(hifq_NJ6e;W4^R3U$wXNNo$l z_JjV|nTE^E(tWJ)&VA;}my5AD5aRR4P(a#p44D$F|3W1ncON072S?k!>isY@vBx^x zqsDBFz*7T1faII(vNqc0j)xwVm;X;=EHKSYNqfF`X58(*4{Z=CF+6 znI9+(Nxgl%`@sud<2roY39k9D)W9*V#>N%(-5i3;nZF^LX1edAD-V%<4H5;GvFE*> z;)vq)&|i|1kv*?dHvVSm8^Ei5iynp=`Nm!fL4ws)dz(JsqQuZ8u)MA`&qKtnwfu>L z-tFq*K~(SN^CbuGmBOF6c>@H;UB<0JvOqI(4{+zS!yFbs3h;~J;LmFLaZ~Zt2w%xa zn7rTfeeikqyygc->cQ@>q3$$~q$P7Q`CG!b!9Fd1_tFDz06>eIGN`mRiIA{f`JVn}SX zk;$DE9c7FmRf?$Ku92(#Gd=*hwz#-hYqni=XIJ2co0G+Ulzna<5@1-qwq@t*4C)u% zh~!*}v9AKwNFX&!Wjh7Zr*#jk#Y7gW2ojJmGjrH$d#8={N0FMHxoY+Y3uXt!j#`enzM!vr4+i9W`0wWZ!*RKP&Yl~#V*&y$7fi;| z_iGXPvfZC-bLE9@_VS_^A?aXxzeU?mppK`sy4u?LVx^KpSir5XbSg{V(Ql!g<{~N% zG7;Gbh{F&@hk@w5ijG%ZnNG(Fht102IZNA3`{DK0Emv|;hG4s9))-L#D`0mD$K=L{ zm0kvX!F<`yrDM}TgB?uL?99*r@$a<(4J)(acOhiZ^HoT|GaRtTZaE;rn2+z}q<+`$ zq-N+L%Q__M)J~()HkMF~wf$~c!}8p_5HwAAQYW2O|Gf)ysB`ah?gs{M%WNYPidFhp zA(OS5x1iCl9}CCz1G^{kOFfT_%Z!HB$-hE0R?tV99vU)3yuPXb4sZukK}VV<>@^DK z++Y-u(Uq%aADUmRWBAOsU*;y=0QLR06(pPNo zcOjBXZ@_(RKB89!mwKRl9^T;PKq=0~dttbGNTj{%btF`SDyQr0CaBvvmDKW#XPAS^ zatxy8Ei8;^czK!u98#@5R0rvLzB?e!sPf*%1=N&u+`trIpQSjQ@RFa(!=OEVXL3-e93N>!`6$$TE|R8}U%0Y_qr zo?2ns%T9Az!88)(ne-Rw4r5jFd16ia&(Aj`KZV#sQ;EYm&|*@(2qm6%rYy zi2wdCfX5#}dh`>6*mVJR%p(@kZgJ+25ai=uiR1IdXEtn&Ub~7u0^07ERQvI z^6XPFtDh%}XMX(ND&XCZAX6=jAvi&ZPBYlLdmLi9d@p@WW_ukXTV=N?cF}KS(|Vos z^xS@jH!wI{+G9MQgK%Qfkb5|UJ+KJ$nkQem8kvgkD!vnyjO}skZZYbqs{kb-YLZ2^0@gq?J;2WlIBo$7G%K- zN#>3S!=Or(~{En>ZwuiB$ld zdkYhi>GW_2$+T&>(0MvgIr2jwv%RAZINBcUguosPrnbwtHVFGrtsu)k(?{xg+Vxuv zlAHuTurR1!Ov=kOC7ortkB4lBshQL7so!f4FP2NVCAgdfdVb~yrK2=KK zaoJlyLL{T2S9VDQePV|fjND}2-CRL^M~AHb2P>L@*I^L*P+cx|y7r6qToJI}q2aip z%c^2V#UvseTjwE)WK(&Wi?)1G0|ZxP)!$@!`;YS3HbZ2wP4)Ww#gEi*R3E!;Z~GWr zKuuY<#iR~fADs_|1{OyMr;aKcvMexGUI?@?BV*AA4N{YJ++dIS?RL?&Cr{TAhm#;0 z&PAI?lA9D{6Wi&ipVc)2&WRUn94ju1NUEGzbv`fM4fK;s8)DpHCu&~Z=2SU%4HHK= zWr%;+D(%-3-^40Tk7AmBwCQCKGJ1;yfEfiYb1<5|$fXLN3SBaHSKYiY`P_x;J^4we zag6re2`W~D4-O6?;=`s%^0jY$dy{)!ftx>j1Rfv>I8S@x%h%GJ%hNo?-SSbtQ>F+> zVH&zv9620*uGn@7JDf|=(6&u7uR!p0SC*VQJOZvC0A8%Pfu z_#Jzw@etuMXiV_MGx^2J#v@G&f^w{z=SA_grfDyNiLGt>QPGN4xW*HfbfNT~LyVpf zC2h87D-iz=S8o{=*V1hb2ZFo16D+v92Mg{5cX#*3-Q63P;E>=>aCZpq?(T1M&U2sd zes_%iu^9~ZuIj2$_s?F{9931@ecJEr(hl+k!S#rspG?6Mmk58j$>N%obiSIN5Bt*Hu2bXe5qVVEl3#bq_Sg*5z3^>v-km2rMA;bDl+IJ1+ynD%FeAO5xqf!t z;t?e0ZVTWcu#s``msM_uK}x_hF}>_%+PR8Dm1r>o_3A&4HK$8N0m6!ex~}fo&Lb*h zztb2DA|%h7X|ppZ4nBWqXDbWv&?#l?3%<~8gBWhv4qt+2>OOc(ZlV!%@+gR0U}KP>a$dtaWZCvtEKauf~fkXTlswY#ao1laR$3LC{7CZ|sC zzB%+d?$xgsX4fYiGOAm&wvp%fva(~gU$)$Fz_dBzt<9Z}87zO6SCrLq>w$~25_uoc zJkQBj{jT+rt8>5q*QU*g@;;E&4trHaD{1Q-J6h%ll{=3 z<1uzWmXT?F7%|rb(Eo&5K(C#tsJCt`F`i-ZEVtf^a66dC=QhD(PF!xr&lXsgNM51y zfzg?oV9dW2d>bgA4S|YA=$M{wHKgbsc#Jzd;GW*Vbf_+TW)s*+Dzx+7#&>@nrECR} z(oO>48SEe1z`9@hOuURjLm&`Q^|(-nc0qa@D5lTYr>S@b-QqaX|Ny} zaF3%6W&p}*@iYj|qLRG>LP97xcZZgxNl&)%%bVVU`U6{uH~u0T9L;#i@sH_NhBmm(`=kWF`|A3FWnobL&Ljm%lLGT8lM)VZZqm5%h7GQW=YOg$0Qun@-DE~Ld>ugEWs%mD#JW&0iIS>kZ zLFnN(jxOc>W!^YyXtmF&)g9;sIdRssjU?Xq^>)Qn;J(Vo`nl~z_S{4b#J9JpYR|nE z2Wz}U73DOnUC{NqEMeu*aFgrX@a%eOm%f0$HF0wy znCZx69|{+;fO%jXGxA2ka~53Jt)&cztfGBTqo%(25rq|n!#hm$a4LJ(PQ1x8;9E79 zyNyB1K)~3`K86bG_DXtlIgZzT&>LTm^b7j^D(Q!G4;f@HSyZDqMI*=Jw)`79c|Q5d zFIr7{bgHOYUci0a>(e;&2{fno)LQi64=09)x=P*psk`k1$c3;C-F}wgVfWZ>P3EI} zCYAgqGly)bYC!R!oy6zw|MoI-Zy=uDV|1T55TwG-z4v*unVCST!@~8#;^ILukHcy( zQ1j0rlwJF63jgy-eFFwLq_0ZA`mn|?^LTo#hS9z$m<<6rGeaJ^#(aaGzBK~Hhudnv zX)(7V(LKFx+oDHX`VcM`M4R`cd8$@Ht)UvxsOoF@o{3dCepaLr++uPbr>7OlviHf7Uqu1)dJLXmUVM$x8W4N=$R?+VMI#zV|O8na2U_A%` zm=P9B=v?0dxyW})$&u;^#HBRG4-|j97ufatdXDgO5L&U^t=na^*P4g7HdxF~mJV*P zw@KG5zAWuyEl%%R(M29-g^YU+`l7pNLoIo$+5z-(9?oFxzP5MPI=hOS1OI_6qM!u# zyYpn;U}~Y+3hlJ^LYMlOF5vanbxu`%=|e2z^>kQr+OTYEMcAIJG#)&St?Ql#Z=pZ= zdMFQYw8CrB$gHj|+6K2D&S&mw$xeKG`9AxAkIZN2+J%dYv4cOy(9=;W#>(Uc&55HDxOnc{lSnmOgDI1DqIH9EIYu2I`5aOJ@Y59QK>?}tLn#P4{g zZM|5zUvxvQ8dwSN<|Fuk zm?FNXF8EntJe4k8Iqo>n%^*7HY1N1S6Oz)kf1d?+yCYNcTq4Gq)#mCzNpODpYB2y|AG4)|=qL@$c;3p9nBc~U%wBz*cZfkdcW`qq;Rcn6 zs1Y_UPnDh9fE&mS7cJJ*H!r7E)jhAd5A>>F;=F0DW5X^D;MHTh=6wg3?#hJXd`%5K zLyjU^@AiS5h2DdXaLbho_M}y}oXQQ@si~6^0L4y{tXgx*mj}D*&iy~v@9vmzo{8-K z=Tx*Mu^E*A$TOIPV9c4_&yl_%wLey!qalwb2&8EezqmgrdE8e$bgw-5F`igIXHE%> zhw38r8j=TgE=$kodOPsUhdT27B)@i>!yKVsyL()Yynb%M$BI#hvXUuP0Tq)hV7Z># z3mb!~<|Z)H8QnsUi|oCQ=*Ca25@Q^=8Bip+Q1u9bVU6;ar;0D0T&poxI5TpYk+#v5 zhs#O5S!*~JvfE_O56Dp>8JNO@(e|bedP0{i6gSK+;n<+=o;awWl60WI4YnJ)y6iE1 zSAp{0Rlp)Yur5$#h9(UhSqOD0EScKe{;sDfMS*7dVX#40rT~k-24R2J8i}di9S62q zZ6+5xd^x6|G8&q|67!6e(fjQ7CeCXdlZ8k2Q9o(@sGsN(eQq-tvcS;oJ}7E(tw#Hg@Ht3(6f{DZ{8C#XrKRB#-wDNXyIt{#E ze};J79Lrm%WW<{~_)Q0U;05lRRC+G01lt?_X`HsC!VwAS=`~b&2HH+*Ns7hYsygF? zh}7lxK?O<;gj9!)RQY5$Z96{<~(gLx)oj%tg7m( z$WTwse(@y=lgD-yNH=OfeLv7xjn)UVh{CmvXpROgmj7;%ma**uyeRmg$hz5IBG+C! zKr9k#X^eBd&3L*{3aq$83YEY;)N*ouuzG6n%K~FfKRpv$Zr2e%>gOx>mY6&e3`@<< zmH7(2cJO2bqeA8TKroMADm0)~?EYYPF~+XEJYKUxqIPzG$^@K7yiu|9m!~Ptwn;`f zK~=S@FWXDbDPtvvYLbBgIRWr_&tuyG+vuJTo1q?~L|i8|z4?M5y?>zwhMYcRI~7Qa zL3`%?_t#Sir`?Zho--&^U;U7MLOUpiK>&7D_G$jJm=~-A&wg}@A$Q-h?L}c+$g)6t ztewqTj78RpNKw=2tK4Jc{ckC~haW07SRsxPKBVt4@qMs7>uY(sGJhLG#sMS^P6Px^Z2+*gPF4)>}H+D>B& z>vs)}KKH+7o4DF9A0yAp#&7@m9SW+yYiw;P=S0;kb(WWEa_IEOWb zKgC%vH!(seqeTLvtQVJ%_wTnU@!Q4P#=D93SiF@u`z(QV#W#J+E>tz`~pF?yH}cW9XnfP_El09lZ{p+4I6 z+R@Gb?z`Y)!f1K@v>6IovN%6M;EDgh+qoczoo$rt^7#Yud{lP9K z9{oOz*85&juf6zb4Qe_}=u|XMk}bwE8Tw|^9r7r+rIw%}{y*tva8As>$Wt@_fZG{IuS`)eXkfc3R{g4QfUE# zyl1HM?z_zkqs{V7i%GG~OXpv8qDb6@-XPTyrovmV`vwbg$pe!;crHPxrKP1ICMM?P z)zy_ox5Oq{CWp;W5Sm89|4s6}g)EFY~ znQ)L2m~El`#y8HhgW}!l-Ko8agpEIF%#FvNZwQh%ICkSc`@r)($^@HrU|!gDebR2W zUaI?UzXBr5cZ2$QL$IUSR;2E_+R0`_Cd3aLp-2MyJrB$U_hCtAFYe-4hW>gtBg*|19v=1pL9e&Ye>|v!sq5y7NYPGcLir>Yl;~BlbWrKoM4W8zNXvQ+FDqA zFPg5i$?W7mze>cc zwp>Q{tJ3NL>~BL5j~TODZ!im2!vtb~Dkfi295+%fUXuLF%Ty2nC!`6Xpfl76C!1Jt z>$uMTPKN}9Flj(&KT^Dz9k|erX5-`x<*;5v>37O#flRhZ?f5JrfY@s&Ptgh~{@oJP zQh<#@D6ApeLdSxF!#pD-SPy~hrAn3j(o-R!tgP(5JDjXLRkCu{!X|Ek#^j3zBELX? z{t|U%p}nh+nmt@%e`du0m!||Xca14m(~X&)Upu6L@B-`ziJl5`pj0+011G8mVOU(< zEKclkr8p1k%F%le6~nr51IIq7Bmrs#)E#ruYOv0Wzr(E)eche%C}%}y?O6b{55&6I>SS5n@Ors7A#`quJC$q0T%9fnPsZ}d&mLpgTs8bw(S~{D5ndLWrew`NgB;|D{ z0YZK>((DSBBe_Y}!q}4}xk^ZVTq>%nknNnKsEGHUf>~C-#zeUC5phxvu@Q02g{|2b zwJ1R-2i7qeCNSY8NMJ*e?f@i$~e-ib}FL!J2|@ zEU4=FUrZ~G=D_FZr%8pOuBKeRd5|B0hH*bC>Q@cl8|X|&vPnS*qHJE?t9_TupN%%7 z{fX%iRjv8AkYcKmA%WVIgyMoeXigFN={8!LESrkEhcwD$&DuKDcv9>VXzAu(5P7|v zbjVy6*Je1@9NwMQi3N+V#zm3KJPc;$WyJWFx)E}UMxAU5U>3;OFX-$0zbr#ug9Kxu=ez2?2gm8PMGj$`)}u`(?<1? zHII{^E4auCPY+qq?dc+yHsX^N_$|n<|B(v#gGuJzcB==pC}KsUdg4LebhsgBI`We! z3~$c_ZY7(fp&pc9kBz+AU1UvR7897{oqzwkn%2K?j$Qg$Jn;KIoX!1J`$j{jPusuh z0DoOHefd`HL)a!F_OR0fu)TY?BIDvyIv)v>lkcu(EN;mQ=jRqn0swi@`Suc%^9VEf z<3ew;NmMXEjoa|RN(L&-axd{!Vo-Tm8jc@zF7pY zXO~yCg|>I0Hx%T8y35?rR3*^$uIe`ve7%JNrwU02)fSTU<(_A0`}O6%XYx=`0W7Hrsm z*rfjvhx)-JMS=3RguEn@f8NCZqqaZ+lNSmJq00NestV(eOcJ38;(u}aKW^v_N^1CE zgFle*|N2mngYq$BY6{7Jp8sEIMi+Wq-li1w|Iv3)l{A>M0t^a z={Nr);s_HO`bdLtzr+4(xbxxM`p@SAG(2}Sxc@@{|1J(fnnMcT%FXH~iOLpjfo_luyD# z*Aw{XuH)i7_#N4ur#g2ZfVkFaO&(o_y|K`%2luZ0uyyCqS*0u#M@!@1FcGljR8;v?sdQWvgNgf3ixQ}etu_TM-bH=Y7 z-spotG=quy-CI@-Eoxs_p}ryc((;waIBec1IIS(}x5@D&MYmCy=cUo(H$4^L8nx4G|p({Dn(K#q<#e*kpb1p*M(?o&97v=*R9 z1`(;jRVtJ(`ruq^yZLgzYUK|<9M?3Bp$^V(U7&%bq^W`P4Sd}!>5>l?#LZb){(V89 zcs+q=J^Y_-FZ2xYP;)UnBD#Pz#T8RzfPKEwmT2CM%>1h6aCPY^3-Ki8S^L&$N4GATAgsQH~21LmRm#7@G< z+;~zk-r&Vvab?l;U(K92sKJnszNKh#L^ZPy5`n6{Mp*#i7hG9aSAA6UQiGmmnBHujtyz*)@&O8n8pPfmj zpac%W27t$p2$G*%vneDL(r?c-Y-ywgPJnff9MkZ8^Ka}Cg5U!?lIjp04)q0sLYu!1 zh=EWGFZ7rGf8qs`HLN{`C{%w9|BI!&-Nz&8g?B-vtN)&C(8c11JR&=eTIBy9PICXD z3#H^<{eNP#f8xZ+H_&M0Br*OS>VMDoq!#FWdrB)j{+}S>3gS>1QGJQ{-%kWxAJBi0 z!YE2#{!duqMFq_Oq*VED9R2s=CIvLcH8hpCZaE=hL%b6Sh1>tm0w)d>F%SlO^S|FR z`x|cOySK(hCcFPLMStOMemrRWok((2*)$Wv0I}Q=2e<#8;3VYEU7rcMnBS_97w*1fPEnfY)Q zJ-ANDH^vKRPxBq_f7J*GX5eog!cdGo)xn@&qQ23%^Ayv_?UUX96TQCaZ=hUo9*y@X z&Ckmxdm;xpcF^;O_96R~9NUS4FbkjCKiVOGFfovZ;$`;lPZnNgj9&tb52G8UTsm$* z?3VTY=EHNQFS&^T9Ksvz@c@tOy`cV>Rg}@3S93}j@Ww9s)DahVq6ao2Wr0%{|1m9` z#l>`nb??k`w?_Q&h9ybnb<>F)#J;z`+FI$Srbi?!oNl&*NTaEz#b-Q@Ha(+E=Uy!$Ye1vhgo502N<5fXX@Pi1M1uY5$7(*2S|)>ar}b` zHNhx#RM|HjZLhEE>i0Pd_w8+GH#uji%2F;P?E2ZBOu2- z5)!sd1!B|dRHNSUrm?Hw?3e5sGrl^FZA_Zjc?x<@4JgFg+~wMf-L@j1y(9o8M4(-C-jVlrMDCous%pWC@{RceLd0Cxsjpj4*m{OnIbh|f#P z1ydW90uc3ybI~hcM*~43K!ycY;*73xQAVDp+`c0d~csC_d+;gXqld4=7=X{^nD#rV2|)4 z%Y{LJ^Ix{*CFd$dC;+CtTu`Iy)Pg-&FrU$DxJ&$%;hszTs(lp6fMYQnm^;7|gD(on zWt*lbjseb;A$_mN)?_0 z_UX)msS}jb{F(DqUL2geELzI}-7VwVJk9*rCz4Q~qm{28jdj+_en$4+WuV23r4fd+ zkAgXsSr?a6q@F^_XA>@|NT@R`|4r_-Rs6z6LjqEl zba&5r4(>-8ugP$bnjzM11ez*XRv%a<#{jz`Bm2uzPV|@M9>PGls#yYWLos@0qgj3C zvMkBksqwYV;pp)g*i6n?dW${@j_7h9xIf?ghGVxMY)EfRSp>NHuRPd-^j;4tTt~+Z z63z9Mnb(~sazS&on$tqyWc9y?qXR{wVH86Dl;OBnklkxRmfz8UBQY(0!sVN6t^gVw zy9#9Q?@OW51|FQBWlmWNT*)g9+)M4IRsx9@p_&#$cNL2(Yc>tytSJ1dlu*sYr$ZZofDl<`@g$AVK#0X3tZw zk!8}**$*5M~JXY{^z5pruooQqCm+|z3^+?flHl@vUS=5l< zIa`Nxvc);m;gKhLIjERc1v{QWI9fslX>@I-HaD(^5 z7*wyto1t!F?{((C5~kUy!u!=UW7~!mNI4bgtMC|EB|V`wO8zr zGX+TPgyAesMQy!0b#oI*m1V~f*Kh=$pKDr(G8x`4NyLB9qY2{HRDqXG=>L8Yc^2z;de~wjgR;1_;hy*pJlNJsaJwRN;>`vYS%Ot(FRX~oE3tQ%ROv*RP z)!&~Z=DYBR?MxIc8N1<@wrFj-|00v(B-*#GBg~+4+DkCc*YhqhM9 z;?>1Fi}bdeE!35R-bIH*v_TjZR6t3vDMJ@wneR(F&O7N`x$F<>U)-Hm^R2Hs?x!O2 zofbVayHg{OSd+aLz)*un_#uPEqcXk#(+Nrijx$;;LbLJ8(>V$ZhS>(bk-E{GH)#jG ztC=e9X?g4qwK(@U|F+=_Kf8-@t~0_{PY>vpRAs`cV!_#k1{WT@lB6aG&p_E>mRM6W zQ{Yf`i%Xe(3g z;}Jo((wlOEiTff8lRKN0KW=K3--cfSGOcuHTEk0vSC%7r#DpbojD`v)T*C0SKG(=d zc-W5UJEEW25v+71jvs@uoAh-9-%%$!Q1)IyhF6YB20ooI;6yy+(+)Qasc=lBzekFZ z4wJF^WGM#8Zsp*>Pr?^}1p+#@*#%h^FvG4M4U@P!@uLaP85X`J3(ZEmp<8F$)s^{Q zTW&V8jXsa1^|GzM>N6GJzk-WO`02>X29tH!OtMaB=lGo64`lJ1O-cApF}+hGm@EDm z|K#2s9gr&S31%zb1a_rfSj#uFyMgI(`ic}&OqzG?0rO4i@&Hyi6%V}D*A{W+uQOe1 zAswgKx!yM;Cy1TZt1kg`b=Ilu-{mX*l1ELbHBSXzfz!>j2X3W^5gCxi=tNfF%E5&} znxacay};ZE#XCxp1C7>5%Ey)He4aGiu&nDBxq==aZqK?%)}oVImMdV4+KOnr_X0Me zai%a*d)xj5Ucla^FwO#Dq%LiBjqmxYdL0h}f17ybQBRQia@Sur3zjrgH*fwCozT9I zyTND4`zLaO34cNxJd1gMfI$}k8JDj9K947M&xAF~gW9&H3(-#nyXqU}1&-D`X0b%D z3=We@lF836fz{B)R1D^a)nic@(&7W!B^iOV1KO^hBMvSK!cb(u%+w|RC99z(K8+t= zopqG%7<*UstC3(_JSY+Oih6R}c1=Swr<~v>cCz6uACBiScbCUJLr5LEdi@@2qyu|D zbEM(I5yXe^vFcd-DD1VDGDG(Q6nbABLHH35yE){idiIdp9`+#n;sBu;NoBq#XpJ3F z`T7h5G`^nj7s5C;wDTW<`mmxReyggokf5Ak>&~QjbI^^!%XPfFWTJ1_of0_a$lr&l zGyR0ZnIv%4gie6A8BRUnV3JL-N8_K((gNlP8!MlZGv5-*d5$=F*w^%rQDpAQ&yH{;=JjbOZ~)mD#Kowb0Rq zqtiMfBi&ui=lPDd6WmQ^$Ym6eenI=*v|Af&*M4UHt^Uaelcsr26$JD65&ZeEstM?H zU$5U|nhtU~-vjpb1s!+MOV(rGV};1YG4#U3fPLdv{MnszXk#PAH$L}eK1 zVQEsb5t7@K5(GPU64-+OLA(VDJFcuZFOp7F{*}3Tr1%CFH<8Y`0om2IiqpB(U z!|+{nDalcf=99|{SRG{r?is>kt0S|J!eHQim-^$0^(K<`=TS$GXU|Ae%5D{$R7&II zyXR1SR63sLgnh}xm(7sX1E7IlNQ zuGI0>N~T&*-)*X}U~e$C14U(1k;ju~%8kg(ZePmg2AAfbaqnY)I&|mZ-La_;3L64O z)G!qIKUPT`Z)f>6WVqTp~inMd~1JXB%qzHnNfDf@X8{OCL)! z=h`s!TW#Isa3$K~i9D^Z6r;)0^%Uc#ulh8G26-ffh1Vggk~`MIRt-8@m71v2nW@MrBWu@3<1YrwFk7T+gwG8MmU(&6|PL-^iy;-UV z5fb8uL>UsY+%%!8)fyK-umD|5jFl~b>_{Ctv1I8K`y8^&kDz2>N;RRQzop9R(1FzN z8{>A*R8aI4Jfe&1TbCA*p8LoREQqU z4DRWM*-QS=T5e76W=NtR3U@;Bl5d|I19aIm7L5iWeQI!CQWSW@4GTsbqZ{rnUPE5g zDZk5bKTf+pp~$R>dzn!6o`j}Jph0bt=AuV$ZgSc>xw&_S5q71XuizW->eb;lj?x+# zMF<&CevdWkRIV6FGWrXynMTIsfC4klPOAOfz#_t6DNk^gJmXK&wXGEH@*uuDdluk{ zBPk$T0FxvtM1{AjQ07-tB#qq~DCTX1UqVNlE3O&LEKFk7IW`FIX9-Fygise@{eMDv zE3CYy&t@x`cm0w0)QKBF4D%%)eAie=`+aEvoYw>2l1^x7*8x$ z9wMR@8UQQ(?{s%j2NyQgHwfMgONp|-#wt(*?a6WiMl3ujX2p(cT2Vm|=*-q%@#{On zqa}(PTZBYHfy&lmVqZi1D;d;k$I(GC?vGD}E49^^2T2;gICC0#@yOv94{9n^FhS`L zC&|XQ(KO&PF+X?5^EVwqORLFBOP7sYr(0lNGf`6dUS^C6ITXDaGTd_7i_lI4UBfJxGY%(_guU2#q-q&y zJT^Mn$>ZZz0NIU1VH-%Twnjd#eXTx|9d*9oE>X=^Q~_F9bVRUW^#PUZ`^r!UXUl~| zr~~L>GN8jl`CQWd>!6O_6_^KfKEC55O;H;{*aHI^<_-4DR5P}-D9uhiq=_d-U^LoPLb#;Vc`#I%zJX&|B|vYYP1O3G zjbKWkVY+BT4faCmjmR#>T9DP~&G0Ybqq|X-2A8?hCnMYt?T#wwj%N6}~R*+)uL5nKg~> zY!xjcoP4)`oE}zl)3`UfAa`9pyTkv1VW3OBi z!Z}|tehf~`eoob+-Wu0ZT`nQku}IMvujetnL@nbiCkWQSZ#-OxCz0KvLUJrkVCe+V zB|sUw-;MNA83$DvO6)mI@O&qq2J*H}KXOOt;W0gXI&~Y%@~D?Z`VObJTo>>NE!aFZ zy6)BR!|@NAj4fX`f6$RuHV#~R<+5v9%W&A#Q5JSW@KTmqkw$d=`9^|X>je}wB658Etv;WDnAwnS&StGd z^JC$FaAJO8E{$RmIcc*>1~Jnt2e8z3?30>ayl}>aP?hXB)DQYfp2&%r;JZT*TqLdb8&(H-VDu~Kx`UaK@uD=D75%`+f6lk ztPy^6a>CL0@+9XpHLz}$fOFFMOgND7ntBqN;Aw(;t@x8?EyZJPS?;dhuCdk#_4-J< zdP%BZ8TqZ}gcX34r202yVNoNN9cO(9a(Jr{J%)vyT?27z9Ja*hHaka(|LOGrCaS@#jxm@e1k>G@9g+U1v$* zUxo`CI#lgT8rh2Z_;kQU_N6A%_U5BbUt{0f>*}TMMw4HoK#^vWGYBwLKp_x; zzpc15KD-*B;~drLO+ua(_;BELVwfBaDZfJBdB94IZ2AiCmd^b=&dxt(v**_NUjFz@ zE}vNf^cdoMc=t8jqhDN>lR>x{sg)o$eRI8!-s>5uVvb!0)RzymUD4+WzYi%wx3G4} zOjmC~$x5+OgMS0PA~4sGPwYLow8{ei48W-&bd}dYdOvphtfbj5o^&0_`snhKVWYm}aro$1N5tNNJ;9HcwrXLSESWZo`0qqKaMYdJKJvM zuKOm86w5otwV!+6L8P@?hStwFDUC!XSCff_CfMr=Yrcz{8)OnZB@Qnn_*~h z*;c!D>%9BnG)Wq156X6OTwY@VMrW<>4(0X?9UC%E!xmH-1mk~{@-7T@K1o!9gbeGW z0R6tB&bMvDYls*R{;(_D37t4;2ELgfrm`V_qLH;agL3ZY7uvBMsoP?_iIxy7CF+!MV??Q`M({6`jii*1Z*Z9UCkI6TnoD z$KEwiV1P|Op=Vn-sUT?^ng60`74l)69mve6-}qc5C}`{^u;#quzvq;iV`PXEwj4Pf z*GH4&b&A-@f&B7H<5j&`C+|DicAbO{*W!PhkMsbblwqoy1Z!ON1U7F1d?St2oW}mTWRBsUdt^T1?D#+&zNY@FJZUs( zYJEFHYD1Qtw6w6{)O(y3+cP0p_S~5QWE8AB7BZ)4eDPO@<4H~nuXPvEEB3R%;id3k zFh#T~?k*0F*+M}4sy(czj*5c>&NGFB)kfTI!6j^LP(lwNSa9?8x`Wx8%Kn1>d2s#I~vHZv)VyxMn#~Cccy3a983Vkz9@4 z*LO14TbI~u?3l0f9|)@dk?CLau*J7(Bl#?{}_$h1Pw@WGjoQG)N{_ zZ{?_5x86H>XK=cXO-Yw0Bdq-qJ2Z-)>N{&2-MRa~d87MaR3yH>BN|@{yDi5bk)NRW zPwlBo4rV96m%vYx&~McWA;yE#zwTzZ`fbwtx;!<#jI}szgyFG<_Bs!G@05S$WotlP zi~DKL?&Xj3c85rBlGMf!FBv=@G+l<>_*>V|-p;+YW!3xq{#f(^U`bA+!)29zPkrQ^ zgSa;!IoUj}Gikk~(~AbHE+yl={gk18?|1Z>ZBPlm#t%6tIe|lSRujj-$G+-!N*vHOO4CQ=(05I({xuCli{7gNz_Ygq zl#?VKR4x0_)HXKax&Sy{A6?=?GKrP$GsLOUGk&|)5@auFJ9ejCDT{MoFnlLRYJfYs zEKy7E)2OY}I!t;{_|7u55itu+P3#@gOAw)&r(R2Vo;s#SWf=D8p6*l%|jY1lYQc~J1bfzzVo#cn1&Lo z_86nJ^sV3m`lM@v$Gr*ex|Tq2wP?7{FdqPd(YY94t(V*csQ$vtv8lS(*_oqoE8H55 zzU50^j3+4e8_`^?kqF$FxSCtAtI@ zYoTNQ5)79+HB?x&_4*z`a_%?h@)-m6)m!o-)$GM7ky48`_9A=z_*E>xw^8=JJc&1p zwYr<=_S7*_YrQVQ4&L{d8jpqLSQi&LRA{cJAe{s(f=V8kr@b&yYy-tpuxqdb@vZIQ zqkM!EC>NvcLd|RHVO}(eQIW+}l9XPiwPEcUCP{NOu`aO>yzv*ED6Iv8#ezF1nywu| z`XCDKy9!2I^;ED^B}NRC#7}n#a_iKH2i!v4+F=r}9h&5>2zj(?co^pj+ub%BU8L?nV%o|nnb6=XxXCnyIB0?_u%u~oej%*;`Le`lIme0E)4 zAIFkRbMD4}+>Wo-$J1KTi2r_V^%d6J7u!t~hpO$eK8BFqX73V{_7rKb<#xTU+(pt* zeYuJMEp{M>9i&(_rF6Bpauo&rXD0oJ)$epqd0&#@jXpJE<-y2&gXrP<&6ec>bv&wb zEWb_mkZK=WZQ)h^xaVefmK>kcRPTtLh)A^@SdHz;(n+8{3C8#H3CjCNTo1?Rc9ej& z;trVWw;;1`P?5G9NJ8AA9*B9?BcowCweSK}XsA4Nk@t|?77unO`X}bLzgrJ)UWzpS z#|r?rbVZz?*5@GJ>|`~(vF%p&Y!&umI>bov-F5f=qBah@MvHZ175DOxdvn0IfzVB@ z<6-G~)HgKCcFlKd$~7-~!0NT%Flju4Mj8J!JGMZcZ^GK=mWUNGu{sYhzIvo2$L1IG z78gQncZ-Y&MyP;**ysQ(Z*Wb-T;OWhrVrGI;qLFxkqGuH?%L1I4s>a)j0_E^fs)5K zfkhbPw%q>&1Z&&rf8i(?;3STWsw5SSpu!zBOZ2?F6X5QVRHj5qK|+g1=EW6mYcCCj z@%}rpot{YOW>C^(ohsPKP?7275O=!~1MQ|_ZXpTW#DYKWes!FtS0p$t*zha z2lPtCj{s|zpV-4B{Ko8Hqqm3+gUgb#4R*7*V&xpv|5J;BgsAGFnn|VCGDW-ls!r-I zN#y$ypy!&A7CCr?UW$wn=Ng2gGW6x3#6uuS->hZW;^%bk15*m4gUJ=_V=c6V!N)5 z)le`xQjF>uK9nyYa=TOTwHuHouYINJRp@pp{NO|7twx~PV}hDKR5d3y6tsjr-~@PB z*JBcV-z6UHxHj7!&u^UCPz`&z@ZG@jt~}=KY@KNPwe>#w3s*UmdFYct-4;i{K$3bI zPed2WM}-B&Ka*+6ejh;zIUo?+5&`j>il1Ig0Lo4**`-AI&j(DD#ZL^sF42A`n=FdN zl2*0gIW)D#4Gd$lg~?77NJ43?^~;H%j8+4_3BfyCTn<3=EOO^=QgF+MUktp6tOyJ! zlIX5QUanYjnTj~bIGfj4=|i#&Q|isV7zkga_cFkAI*R;#36StJ@#3Wfn zhxz@{*{QzVywst;OT(sz8#BU56vZ?M80aG z?>zZFO!4WuGq4bxi1xF}5+s5RUvcfQyTD+ZR#=aVm~SR-B@LkZ9>V&*>7tpi&s+LMoS z#kq(ng3h;==e(QXo0AZOHjM6G@PiQ}xzF!u3D994D`wNjoXh)m!+9;|-I$dalm5^$ z7yx-)A>@s^uX_G?Twr>ga%u|na3TpR*=*gc+{**J%)Fdn18VlI@B32unnZrPNGJZd zUSz$~+PZ(aWp?_jNA&xiy{D;rRC?C{=^#&^qpbs1??l7u$kIw^-@+Txl9nM$(C{x0 z&)Ddp^4M12vwrX}bNMCVy_c{VM?*UHR#ksiGpj6`)-v*@G*#oOFYrf$<8S_FX&7LC z^zW$uCpF01A{8By!;21}_mO$X#lBgg89C=)~6TkGoTynfTY^Z@5jbyV2!mK8{es|CXP^!+njR-!8=3#prpWmGZ~U?LdqLCAt9z`b z7sTlH{c)yeg>g||&CebatuZ#*}YqI&f}0Z(ur?`<;A-Z{c1fp+t3)40jb0ON^aQE#>$^Ue7UVyiyys z5Vu`#ZJqwv&~jv^`XwgNF<*5;t5tj;fKnwDdXGJL2SDtx^mG29lwLi|_nm=tyBMBJ z?d#WbZ?Ioj8rcKYdY1!+9)~a5-VO>K?&ilKt4MgfKkDk?2X&gXC6_Os#GTeZ$2#QN zrEG{i^t|42+-_{c1Nj%)b_szs5~WW{n^{@LHGXF2W{YSc56~vi1a@;)VPDuX$u=ZF z?`B`Nuan3=e(i>E4Yd}Qt%kwfzG%D%v4-|?VB@e|W{mtJ6`q%>y_osr7t$4NajD(I zlh+zW+<2+~kE^$iiYs`wg>iRxcXxMp4ekzs;F{nXg1fsW5ZnR;cL*{-u)yH%ZZmwt zz4v|Z-QQoc);Vxa_gURlUAwCG-pDS@YICr3{{t;J(-pkKnw6a~#v8K1c51rm)j-q$ zgdOhJ`{5<|>Uwiorn&YDmyr;WqM-=cYZ19dxqML7-;_Tu5&9J^aC|o`rf>+mqI`vt zRU)n=-t+O#*9^|?Z^{F~zD?H?$3FHL4DfmFuI+?yy-yjce^`b%|G9B?p@TJ zE|DhuM`IPGh3%(+Z;bO2SAY?83YZ%_Fzaw@)V4G6YmLj*68QJn!V z9uo4^??K3@LG&lnj#m~WjmHmk`D@uEH7JVVaTu=0`IGzRKGzQMgtO;07sTCl*74AY zP$c`dZ@@Xs=(c3p?^qaXo%Nhyu#^)czTRiCyq|^som41R$o(qV{%!ch_-M2yf&Xz= zDCFg6l@dIW?*-2CAP(vTnW1yAd02hz!u>Zc@iJJotUPhOb`^$6=CM7E(rb9v|L}Y8 z6;;B1fE-I$#hG}N^5Kw>YG?pIWM^CkXL;S2_FzA~1UIM!7zBwQQV{@3HE z1Bw&@HqRB3A-kPXqanvSY5hwxUc^%ZBh}-~!oruFdNOPjtRewwY%b{qWd7G^1MB9N zJJ`&w5%W^nx>_bSgW6H%!Y9!;ob8j;bU%4)%q!W|4>HV(1+$xefu`B7baL796sx!b zm_B4ssMW1D8Y^sRR^SeMFn8(fFAi&rb%{QAgEz#|gurhTsnHp7MZ>3`MVLlrDvBmmTgB zixGX&iHg*u0b*^IDtk?GT8XPRnmAh9GMTGx1%@l9SAG4G-eEiA`gsx8=vMmsI{TP% zf>dRB65;b&%FXboS{`JL1(ERdUm^wIgJ`=r*&@Y4i~i*0of5V}s7CwYpm?iGy~XrN zp$hVXJO7>MTfCi3uG{pLODmHy_bVJLvH1sPby3rIh6RH6+e6yA00nONepe zM)K7;Ut4#w2RTF^?&ym;M{8fL7{7k!AR;NvMMWw$sQGRS=e~eb=I58;sCI%JFnqS{ zx%1&8s}X#ePO(c_iu;R=8c_#teI&QU%thT=Sm&5nC*NtU;Q9VOMvdGo^q`ELzt$miyHU$DP9DG1WhgGRleh%v0BCZDeLfFje z4o3;&lncmM@JK!Nl{Pvp1f}np6@)PFWLYDyr#A&95}Fp#H!2D(^4h?COe~}3W>XHV z5{&+7Mb4+~Qn|v+d1|ucg9cSAd1703zr3GQ?1#}Z%oHGNvq3|_pa}}6Tu7gm2HU?T zp%|t*o;%pd>{wb$Pz9|z2i@=|=sUU|8`~{lpX*2!`fghi zcE~xLnmV+z`IYt=rkqtU6HmwX-5SmRSu`-+4Z3O&)C0br)snIaY{n_3T!28BYG{gs z9o6zAgZzoOeDjzB+BYtv=fq-Y?J&6^rtkSmk3}-tu_vs`&IH+{Lk=sdi}kkL~}gv z$wwI~DB(s@4zN0sBf=D&-I_nc6NuAU4h(=jQ{J7`AZz9YVd zP2Sv%&q&tRGB`>d_z>7_pz}m34JU*-%q&`vkVsXjqJmv52KVEt>xZH2sGO`IKTEyO zI?0x6RWrVzgj$;kBT2^Un2jN4fnz-NvW@WqV7QLJy`ik6>z@q@D3U$i9M*Ot=Kmzk`aeC(=-}5N&!_0`D@yU7H^P!wgyR>8{JY{3$Fb2{5xKGlZi%~>? z0@qm4f)*r!O%uwDz*tgJ>BCuJ@%iri;>g0*m)QT#Szl6b{`>?4E6xMvf1P{0ekNZ; z35HWzY6xUfH|0zk6z=aH_l9(<8PvN7Rvz4bcMg=?lJKah%)c;)jeAAd0q_NK8PxQ#b_vhiW1ZSTIQ+<3`4fCffeZl$9+-Z~rIA^%a; zwh)~I8FxiLv2qA~$;T<8H{6ADALx17V_Yu9$dm+ZxP_QKsD~$AFr5evQLPzLk zYHwXL@G)_6Fl9v4p52JV6UMMO^mTwRTFCwWhZSOT4~w!;crrlYk~8fplPKgLdl4#; zn(kRuSCEB#D`HDzeuf_%*4R!VQZQg}AM0CYEr|ArKGIGJPIrwmZO5SbauZJYu${jl zeXW^UEQb5ltc0`mq$;c;?x& z*Pr4T>nVUp7;p{8qu1F3*9zdr`Bt=HN*LsCZH0Y$XsJj*cWnY^DZtbYJCZJSL?Sf1}}dFk~$%L7myWtOS8#tN&eik z*y|6Tmh5pG*XFMhUVQu+1u2%a;&!0o}X7_&)>^b65=@PR^V(=YShrvGY2Q5eHn-9OZ{4@4A+aGnvkNJ=fV@#q{~3! zt7c%3cXaBgT1v4()hGMgQU-dz1U-h90dC_7o7?!M>>^ue_1U4BA?{gJOTa~x7Q3Ym=fhG%;@rbIl2_jne<^O*}wp z1j*JR2xNO0@iDCea!lm48k0e}_Xbe0cMFeQb|0OeLls2EO6lwPX@Rxf%DGCyv|3)j zo@4!{28yKs`v#Fncv5d7l6{(IX-#Lw%fG0ZtR=em8JKt(eDT8FWf95~X$}S~u#EY+ z;F71@I{m?k+*baP|9ZQQ%1oo?0FW%+diHKgkzT~orlhWqa6cV+ue zv-ZVWX$&6I%x_w{5QQnsuv7}5g-$|VG16ny5Oz^;udx4|jIIIhK$a}metV5t@^Z4* z-`PE5>~fTtnpZ1$6`wkqsKz$AmDQ-BagO+zyj-=JxS71Vt|G2+r!fJ)Xk>LDqHi1W zl0lw(_ekWAZHCB;I^DNjgL>YBQ}M}v$OU0eW3|~fsakTPLIl%c3|ZDq#hQW<$gB#Tq^3M!~Urf z(dLot@#_cq#_^=Uuc+a^SUmq4ugPz9iPNN?y&bZYKRL_vFr;m?E$$i%=AMpdq^-gA zXz_j8eEhApNSegI*z7Orr&W53J zp=tt`A7Vi962THgdqP zQQ&(RKd$RAGqSDL88Djp^Njr@w`;A)HOl2&%|V?ehiAocERs7KQ${mN7CSA-Cqme3 z`tk-i>vmBkaGCk8J-UjeTofkB|Lb zMFGBTt$B3^g@T{nypoQ@u;vSRrv-KG)dhV@Lcf2{c^x5cJ|Gk7t!xYSEUjvWkGRZC zoc^iBVxhIo^Nb?YFe^P7kkoWnmW?bF(tMgn6Pe|+rfc6R!2Ze#w}&*qww zy*d{NOf>iwxmX>kna&R4nkA%ujZiDXsriv>0w287_#^uH_=nQLj^UvR4@u3gw;{BGxvr?0}(chaja{0YR!h?1eeG;TXyLRqJ*KTG#1H<`7 z7p8gZf=Bb|TvFH7!w8FBj*869ZOnW273Sk1$(6;sH0SN<#QjSB8@81+mt#nQxP0;K zL$r$2jXD{etBJ3|?vx#CCF#6Q+)dVFb);F-)icxq0ZklA@X+{xF9JR~tvZ}_6Kaz{ zf{?`MjXg>tg53@C{J>8K3Xn2&V&-U;Bh|Ay8c!|Ac~%uDvk}R?{?#V3_j`g z01sD1)1K{hW&gP54-e0@kD>Yu$tJ&ZM~iWa*Tb%B3^|wSUWeiXRGqAs-F`@OqHLd} zaL7uGkzNzKs$L+9TyWcO;E%X$1t!=1?L+E71KE4B=&u} zMGST&>Zk6GRDC(s8VV07T`?`%p1mlX7r!ljW`P>63*sJuZE}W%(xCL2x2AbpID#F^ zIlAF5MKk=7l<9=GDBE_bQ=jyhrfN(_D&tR|Q%>|Hc*5;w0ihd#TnbN1@XtQ>BmW%j zC-nn!xHFW(94_&~lFcxe_N7Vo8wvIn2L28>=K_8)U zN0Z`!YqC+7VlJE*8zTYguH*qQ2ZO*(c{* zLhey|k5WB#45`jePW>DTR2KN?`=T}_Vl6pG8n*d=6LY1yMNVASvC-_BVzqwoVvke> z-pqBnh)^U%xN8cep&iKZer#G?ObHAkWRNJ43k~>^YQ$*%Cm~H);m|KE*hVCGk|EbC zhq&=yCi~oLxpL!ctx@Y!25Z%R7MsyI^10o<|Bckcy-_k$!YuDRL%E1obL=?E-E_3v;U;`)>8d zp8c5Tj?V4_{gFin(}gXAuLFWd3^gYOmJTR;42|=6$iChPje}_@@UY{7LfPsDqp~YX zOqs~Jh5{r`&C89`Q>181aJg$Z{ecD|+2i~9YBt>`M^y%u5UKKPBx7DB`n^3JMaWvc zmxHVA?u#6JI(JBl{p93~p-rK4o<6bAt|r0O`*A)O>kfL}n@-K&*?=zh=wreTi3t~h zIz7q2$_G7JJ~Iomv`Vr9Q_4Tu_h*MlQEv`U6Z-+p){=SrU#@)+r}M6i6`P1f3HnWi zurF_8Y6MGDEPKjW(T$2Q=wup0=gBed>E@*wXR&KXr2qYR&Xo%iOLP5nJm!k|*M7Q2 zn9XLoZ^1@S_c6*RZ*VQQCj8U-)!6?1xxYkF;%JH0fOJ}Tx+daI&Q$rc5`qZB{5t;7 zhyu;>KLRwnn|>`nO{#?g)#x$}k;om5qaTJ0J5tVnm*jYS81gGzzbdRwszI5Gvx=3q zXc^ED2CY#ex^@La>h`}U!WaEI0uuat28KBPK&~X!S&LZDxtSJan+G&8G(0imG8B^H z=*o)v-VU|730`@Nm<`{DHrfee5KK+vHYithsUdj|^*Pz_DYNa5^#_zAmq&@^@ETQEh@RYUPLAUfe zpEWAr60Yd6fE#5@;$E_uE3`4CXLt3Z={pnP|3U!M)XA+gAa{5dL*vKMp(C?3E-|`z zlfpt-?&}me8kWxNe8luE+7GPO99cyxZ3N*(q9P^v}wRAD#b6g@C8TcTS%O3rVbqx!s%=)uSZS zpXmUK*ia3=`!W0W3<^~%8oQ^c?8pQ=)OdxQ!HT2NNW*jhYY>HMcMIEpSw@72KB?VB zDL{JW(ad~SFW-z-A*dxLHH7c_G2r^8qSn(2r>c&oVGTPu=i1dlju=v!`Cgv>e}BRz zq#=+^#(btR?*F>}-W^Nzj!DhqO?-vKbngve|K~?b_#LA2T-9-YhZq0v^Lg6dP)5M9`ZX%0~r8I6+5GhdVXwQ z`E~XGniN;y`;-FFG6wpv$1ugdSAO_EGrM<$``~as_s4=bV<}d_u4PB=yF5lbBt6+| zGsJi`mZ)t6DTjL>F)lIm#DutX%otpR4c{Ur?ypgn3k61!Rpb#EXbrCArNhdR(mv!5 z3NYOhUz7qNFxr282s~OUST~AZbpl1bTV2eL24{;}T-Apt&U-AF(8QY`%>T3~?7)W= zkB+6dxA%Ek{`BJ))7q?!ses9Woy=ejVuTL=Z|5ln$ArsF`VdwDUPe>Z9fx)MzLr*f zP}0`+Lr|P(A;%Hvuo%+wxIDC(g^O&g!6Ro`i2Md>;1ij1_n?;ew-rEek7h^`J%?9p zi9qipw;ie!B0R#M?s|;v$XP1;jJmqg7vNtMB3O=Q;iEq96#k67-^5VBXEU(!Cp$D; z7Fd3Z+*>jJtL-bixxRy*fqY@F7V#S=)4`K6Y#nZr7Uthob!2eZN3_-QlrqxH?5}fR zTBT}HZLDNJ)5pD>FB5?i!Th4MQXs6T0%TFrdq+cas5cfD-qS)l*z${R*gFYSU-(lN z!{g|)hL01peOLHCidme-4Y%;${n)UUbrHF2!aiBpDLDWUkvA|^D|As!U?+u8R$^B< z21IA(Tth3kb{mVUteo!Y;J5zzTZU9?*$Zdw{*t~ml%DoMqpjN1 zEuvyBcz3%vWKiE{ORar~G_&yA@|Nzgm$NYY#DEz9y& zzwirM&kSV}Ud;I1U2UNe#*k8PQ2vLn;jeiw)Yp7}q|D17+Hfqm&_?~A34c7G8d7BB zAWkm+R1v_7X2E)KvqF)Ae1X2O73v0y=-(8X5qujErjPRA$*s|63lI`h7E{NJ-_;09tg5@Afr~BRDT^g>D5Iv(L^3BtKVl>U1 zr+N)peV!xi(c$H(s}Zca!K`;5+C$)$5ntQh7Ba{u&)n{*xhZ2}6q$QqSTE9szSkhd zGr~P$G+!uKxrP2FxF-qD1i;6>m2I=ApqLGT~^HW5yBWoc)tk9P~fi9Wukd@j6 zjrOfniJ#NBuOzhsKZjPAA;r_c1kOD6T>UP4@>}Np2?2QglaAvVU`o3co6#!HSP1H- zuTANbltF)BqYjXSPyQ~FU}E{;h2g+Gx?ZlQ^sks*7M)gTKkfbZ8=g<4H%~E=y3kBB z9(L-!rkOD<>*Q|h!LBn0-=E)*uQK2lyb24q&b!e&n{aIOQLj9ZaQU#V! zaQxr;ps4S^+UzdhZ*~CK3uLdi;&hDj>|< zS7=j@mjGg_ZP>KV*(27DN?p8@7vkccA@mP4eaf8^Kf_8Zj(izmz`;MRgZE zL1Aq^swV0%Q`tm@qV#ja7{~o@>%063>9V7f<3j@XE5rud^tB58r?1hJf_R}XGOW(z zkleuk@=x3fkUKVL9RyYX%Pbcoog%+aP1DitXW4(chmrR?@z6`#|LJSQp&?%A6!~YZ z&;M`5|L0Feh#+_3xi^jT|Kni|jPG7ZydpKg^+YR8msc7CZE_pk*u&e zWUq@2BEt85+KSjjuyBt(i+$IivaJ`Uod)B_yC6E2G8c%y>x4q>sVdEOwmRr*vMIuN z2{K$+#IlQ|dS~3&l$F{jDZ{?^S{`m7UdFWwK_(5WnDZMOmVP^zfGe-3fFcNnePLL} zJ8f*}jmm&$RF3(t|Kn0Y8v7zZej9@L1&s};c$~V|aAWC2SMX9Ef**z)9f}sr5OMLd z;dJ+`A>YLmNfpYTIRFe#)!e4_<4b=UDofY;+8!5e~ zlN{`1Ug`-N({$$JVycjw<(b&S{dHC!(+fVP6o-2~%22Cpu1k6=CUAZ4!xLRzh7Tsqu<4`r5^ln&}!cQdHNHZ?;Eo76SGJgC`(_(z6t5 zg;w=&A`bZ;3dP^Qj~^W#x+e;4BbhuxG<#ZcaUrayPZg?J`%B?Un}$G!MukVqo|lVW$rB-r`KT@?X!# zH7k$y5VlKF$6VS2GBCX)leVHWS|S#RnF2*&v zhycyYu9l>*ir!{U_Ulzr2wJLyhZKpf@d1Em&&WN^VLys_b|2Pp<$2a38WUk2Z_gv* z^INLo_CWHQYZvJefpmExfIBbgD96qA`H0pSkK0TW3sB}oxaJstW$_e9lO(fsIw20-JlLDS+FBT zsD6V}-TqKV4)2AN+#;jaaJMtf)o%pgE_!_N47$+LHcodNMF;aRtqJQ>vu_~>l`v73 z`W{yWWw76I9K}`K`c9l*ti`L*;ZVnG{@o6G72X(CrA--|5fv3Jy5TWiZ^BL42$h(G zwSYH@3U;2So|+K5Hs`729mr~d@JhHd)~9=G^q<_WlWn$=$*LB;`tSFo94U=;*+Vh+tn+mdCD#@Y`G3|z}Nr05LXC=q4y;`&mYq8S#Z z>_v$QFQH#(L~4qG2>9!kyfdg+>?P!PLv=u|GWGihG0>Puw+wKjm}8abH&@d|HHM*MbIL{~i) zmGS*>DU?UFRehW))?9qixtXlyfs88_D_Otd0E`kJP(Z`9uBEvWCt_15*cRr0%WJFo zyx3T7McC{Z(f5Se@5t>~?TW@m&Bk@<$TPQIXrJAetx0jtPI2_fBYgriJ3bhi77(Smc91C-7kwpC1?jPW9tgJMumIi z7$c36X#|rg52;&hady);Y@@sp0Wm@ZBww-32AhNs=r{+7VN(?WWN7tD5`jB+2Qt6l zXQ-5UDILtyx4ew}|XL_;oAQ zz8495?c+T-)>RD+p^ z9N3Io9C^5YM1RVb3<0ZMipO;KTn?Z*hy7{G%*;&8GXEUAcHwN9XDPgF72IQZJCB2z znHFuA<2~GTO0i5=d#NJ1mcqGW?u{w585-T$+4->rAKkL1Zon(Q&|l149`Q)c+1YvR z{%p-e%g^6rEtZ~31!fXUAC<q+}47c8gx;8qg5eIiSF3yM~q!NdoGz2CPVeJ;E*;^=uimv!dZ$95WFI zv&W}o#ZIbHT7R+f%F2D>g?17FBvJ&k3FS_!&h(_iCFX9-Kj~5HT{U=m`R2Dra)}opGLB$@UhcfEozXy2URdCAz+r^sTVjjGLO9W>dyw? zXWj}S?S=!&kLIhELX%hkL%f_cz}g5*qH76RP_S!*}Yq zI=`Ql`S8g%`x5Sm}$_PjxDwt=8z7(|KJC$yfJdsJ~s%O|WMEBYZu6 zv;0oow1)$ATfEO!jHC2;(`1US8{NWvLuSN`Vta)0X763P(S zLN=3PNRY{K;`(R~D6WaQ>M57A2@@fdTT=Uc-+Ad_B)K_F=5Peg4 zx($|(EW|h9!UyF~)_{gwxahTGnb5J`AHQ!tLtl9CFV57uSuq}g(SARWc}m~|I=6gf zDQ-iHu&(7UB|IBG`mg|E#6zGi;kMdsj8IstVa?YrC;TGZCK5k2oLD{e;5fHIvp*4L zeVV#r`Cw@yuCaSrfSA`vkssd;!f)FEbS z8iFYQdE;IOQDmwtab{mt+rwcOCtLOUpTHBEy04Ej7IjBn@JGg<5dtJtCvAk*Oiizv zHD#3XHb8(ttZsyFVYmh_JvZcvA-mejFXon|*3C*Gso;L^%@yLm>QqJE1`!hxO*hkGBR(eI>(`WwxwVVgR}xUDQnK*2Tw zH<(lrDyz1_$aKM_#N~LRQNGhxa3Nlr(&vRe5+;gxCV!0;h-b^=jc^xDo>Giy+h>9l z=+33S;_yPsICok_r>>f#sw(M};`evl?X|m$?aAf~=+xje27LRw@psTyt5k)<6hAiW zl^zYVv0zQ1VrQJg8ZxF-%?6@!4BN%8QCp%?IT`^Py+w7vnb$x>?*2xnYQe?^+mk6F zWmU<9%RPx-Avw`ohAuqLOLhE2MB0S=v(wOR{0N%S7u?+EGnQC(z!HcqFaMW8bisc& zmxGugC`U^J`U9d128;4p9;uJ5@4AA7+mG7$JcOt}_06rq9esEk%TolX_tTxSnMfum zCuHvV+bHLo*tw)9CX)`q5vne(X- zDFkAc<1V;IJjQa>8q#Pe(FtjUQ`#@cZj=kypz9OU@oT#*5YtaWQWYmES&trjk_i+4 zjIpwFr}`NIo}AEzIA3`rU)a}BK!rOZh!+L2VvpyI7^#0}2nVZggxdCAs4ox(RRdCA z(W=BYJF>l>%x}ydNR>}8aV2k>)wu&2K~WvWff{PUM~Ej_{{Bm|6sKv>Et;XAn9Pnw zi@EfjIfYHkQ5XLZbb>Kq`z~T>vSyF)k5PjEph>kw7)|}weL{ZS0#x_px0E`)_rg4^ zg^mxJBV$jJs(=OLN{((6AsZXZ)|5A?h$Yl^O6JH89KA&|u>9b_?JQ5FGS1#vLh~_3 zP&sv`6J232No<+3D2yp{gcV-}#~@uo2bk+wRo&@DS;7N!M9dJV+;Q3G5JJ&w0m$qG zcgN$dPC1{%8wp)BzY@(5_NzQy{(umq$dypK?`0TBbVCkK%2*;UsBfbD3)@OnEu4>o zh5|bkpHwGr59}mTWK)x@_`E^Eq+cavmNXT&4>O}DUzoSHd4A3kN){2jy|5sT8E(Bu zB3(UjL!<4y2Dog(J<$ePpW9`b-H!?OKK)tIa;-&W=VqY@deB!nUMo^Hg)@*;NFCi? zQT5D{*iY+DUABmhjAcC<&baifo4va+$<+Wn4(6D1v7tXm##uN%nthJ|X58+X?%$a} z1U%O?fs~w2D)TS%vloJ?F;-Iti9&G!$=9z>ufZd{oQ}so$N%(&Hp?58Dq3#tIO6wJ7bmV3vm1PxH#=Wg!8v(tFELU}MV=pDRMwKKD$NgG_5aT^>=-;>Ykb9Ip6NwhC0-@?7Z z6-UU~gN@7)In3v<8STV=1&MvZcQYa28)PjJr&=6O)fGU={8H_Ns!khU4*ipvGSh8x zZ;Q16@;3HfUt^tZGq3DEyMe34R%*^wux=v4j_Ul=&DRhEr$~Wyot|AINBI9SVkB)9#K7cPs zTy?|KyTVjimc*RMV8biqdA=yTL+HUA4v4{;M#GTjsNny`pb zyTk^A>N(n5OwwJI?7%$+RdCwp$=e*)vCO| zg)jIs4$z`QG68r2=}%zw2!2jg*hV-(nVCA6!wJ-iHxc&$IZZ}AO=kKd<4S<|=o0J7 zAF9&0LPVaUqC1ohU@UTH)tyoGMCO!^_=p# z^*~g1d8nqdXb=q&Xt+KiSz5}j61z;^RCFYeW6i)z4?FM=Duk*e*@N4AL1*^$+;{8u zp+wa?cZ+O^JiFXV&vB^VL890%HJ4)eX|w3&>n)mSExeAM)YO~k%uPZpnQ!kJcw`$C zg?m*g;6n-Z%U;9UjP*XZO)!1qcF-)fRb>vOJ0enQJo}Tdg&wgb+HPwGVGzT)ojag^ zJUSAcuD4Pf4k01*i#L{F?fu4D;5KfNAskGaut{`HklU=|eU5a4OW;i&a4N36TbRiz z$R6<38iLMU2fivZ7PLS$e!;KG*3txkeH6)k7F-q*SXFTZN?1y#G|1q(^>l;HSINGd zHhzh?31Yx_fIYy&Mb9HDA~y@2DFDnAkdan~;DyD;hu+#GuDCCy*|ags9<5yO5$lQI z{t{Hf+ThM<&A-MWC^DKLA4{d!>f_s<%aGj;j?zAG+(z=J*N3 z$1ssM0m65i@g-_}P2Ix~?p7jMxW^T-VEQnNf^XZ-c=%QBBUqE*Zzx}r4>?mt?vkJw zNen?%$?==A2(l@SAVRLTM&aP^2nxVojl*WDUKd;Pm%`e@ZD!orn{glSgTnKxvqhZb z@t`STFbHExL?7*HmbKC?H5f=b0eca8zstTsweMWw)7>wL_570fVRzO)GzfvWKcB>j z?K&WON@fDV4v{BuaP310qlIJ;3+ZQazbw+!XG#c1>RXN@o}LyZUYNE5R96iyXnloVrn+=u)vUA}nSlzX&BCUsUw9-r zQ3KzXuUL)M91E>A>;Z~c_=6)K@KIihfn+CZ5P5*umKNT{yC~>B=a45=l*E_NLu~BJ zgkGUDp&wn>pwhU@mii-^GKFFsK>$VUa}pl;-}$`SB>jvGS4t$MH}Y8UtRnUxMj?_I zMpvpV9n~@#_ZRK9tqtk{u?XD6!$=ZCEGc6Ys!l$d_@UOYvdgNLZZmxD)z{M>uPX71 zq-wX)YyNJT*d}H+x^w4=em=!is(vKne)xRS(^kJ@D|C?+tK5GD%?j$vO9vR@@yFhx zBXpvg%g+T#D1~^IoMnK5?1)@mP}X}6(doL=N5giBF`XtB%~q3Nl|`S{61Twj8J)cU zE)H(E)SwQ!_0C_Q(gmpQ!@Yjp-`EkSF99nfCWNA6C$E?~ZbzeUcfbHN#>9&$$E-QZ zkf1w6NrB+g*R68aZqx$60eFIg@L)XoREvQm%zasW0^Ppv=4*ob#FY_J+BemJZ33M5 zCdCO{iDyTC^>;XzQ!9Dx}fhY?1U%$gec8L ze8(Q(ypC%tW$y(L1Brr@ z;Ks4bsP9~yoVeH8-30aY^zK2WVpIg-qhIlblw>L{rl^`RMTJjhIK3f(@Gt*DJieMNr7(g7S8^s&nQsWy@i}g$%N7zMmA0_w8o}Sh)S&G1v zSjO0W3Y?Wj#D|vFdaWeOj(6yg`~>k;Um(JIYC%E2a|GQA*E_Skw^K9uH3yZ?{U{ZMiF*eSAG7W4Fx5Psw^k{fv&CvE%qTsHG6lE*P>ily2#Woy5m_DHf9xnvmHy5>{+)j&L@Tse-+e38Hx5&y_LGLp0^nWyU!TW-}+^++>*otIU@6bRTGb2v>#^)OHP*1MU z+p&zfkU7sx*kdSAd^zRzf++c#JS4aU+Guwn%|v*^KH{BHHLF$c!WKG?34Q(4=YbVc z75WMEx4mbP?DJuXsGjk2*lh~HAp~J$&NIIiBG`8`?bkDJ8vL}kpX*%5Ok#_IKF->* zXJo3(cvUfznpiiaoV}ak;E|JNrn*Rl6R|?)0Pf^2A@`MR8iArR&>~1lR_&K}1)oY0eLe79ZTKTc z*C~&v(XL#CcPx5v(ul^?9pJWE~`C7-cF z*jDagLxj67O{inx$pi+Hdqnh>=a%KAX>rso?NX&HhB9a{jNR{7v~uR?4fz0CQ?k84 z9yJnWfXe3(k&%o_n`O6<7n2&HBSy*8m>KIx3Q?W_la{ZN-E1RQKLL>~|4QqDEOtlZeMm%YxI3oay)wQ)h!Uy>_ z=AVz~MUjjeEW-kJQxvDf)Bta{Rg(DX3zK&y(aJ*9Sv?0a76XUR36Z>Nuil{Bp_#Bl z$WC%>ez?i)O} zfCkJtcT0_7a2F6XjUnRODn;WRNyw~t2(ZhREO-PtA*4+gC85GVkmRBQ+ebu)d?@oT zXRU1#rquX@db@e9o#Pr>Ldhjo>pAZ_oXssgZ=eg+>#a`w0B@9DBv&AFOO1D+$K>|# zJ>v*x&ts`s|G_rPDB)v#TftOweQXMN=YrOS^sTwvisG4=5%d6&Sr^chcq(7tx~{sJ zKa9(~)FQEXX4_L)!O>i@sGLpn9-t+x={S@MpbE+q&HJ|}bW+<~>k_L_)~4a@Bykyr zdD-4kYcX+|wPvw0^&uD1+%zcPS|0*BfhVK?Er||#&#+upJ`Y7Ct={sp1Yi}292@6TgM84 ziCh-IjegYN@;qME>r#np?9IGvlqnfct~fzpo|D9hgZR0Y^F&=DCy9fb+T1jNM*VvD zIR){VctnUB&E~W9ptlgK^L!%+bVLVo?5u_5BE1ZC;0yR}xjHzI1&Ru~+K%IFgT<%| z|0;a9p!+Gr&VrB@lo}Kl6n{bZ!iE@R{Tfsllna`s+z-Jl=e#kqrrdLdM{GN7zphK> z2-tlY?NKjOPU4BYK>FEDIxt9az8|J{0oa7<-?WVSq+$9?W1om?_*z5wPd^OEG_0Ij0_+K&Wj5|oqFErN zcIm^Xn)KvkNO^3U=?8u~STl>C3EbxuSciaYmO$!4rp-EZZaU0zyp8563n2ZDi0F+jAd@B`SU{SCUzJErKI)HJHkGyM`h4Jf0Q(DQu+EzIMQ> zl2e;6*9F!@FF(6=WlWw42CFm*KHelp0~^GdG=+Pp$nT{mKI%&!a*6L`CvK7L%VI7S3*&xzF~qPHn1#3%a8e~@eR{@}?k z;(?&9rsi1ii1;zy%>(cBkqAeNkH>O|_1m{3tQ=Mb*y%fH;s7KV1?>)|bt)Qjw;lM# zBSrDx8&K_YP{{A&rGxzE0^O=_$J?j2MWB~6h%0ig|NVEvxi@%pX46)YY&3=A7(OBM z4YJ_dH^BGpqZ~AGWa()IdznJlh7X&puhI|2{d>>{4wfY;pS9flzq-n@COQCC%zQ!= z%uU;B;*Yl|FfeB|{Ua!NwL#BIy%eJ3?@Rjx zp3`62-1A zu=y%ouy3<55+9fgauNqhzN*P_eeO1vw?_p+EaBDPo5F|gzmO;wAOeYkVKJm@>W3OY zn9aS9!<-{5XC9;EOCnFZX=Sva5y2kqxj|T!J{69&Y@}0l!F4{?_1EVq2MOG#-s_Bd zIa(f@@>58mZ2d>A$UE_i08OM#&XA|cK3TE?ahU+fic-u!iPs8-oCFg1{KNZtPs}d9 z5%=I@_ar~u&TC?Z!9j;BlEK2V&I+O8E0PIWOu0P#bPIAu5-cET-bW+kM175zQvPm3 z_fSO6?w*pQcpJppVn-SleTHMV$WN z4h{z8{rBG=!~lz(f^1=&pL*)4VPmM@3Io`31>A3b^P4D}T`P`N(N}*}J6q5|x?!)r z`s$Erj^QwY=DsSh=neXVtpYrggs*+=Ye6G=1m0J@>Qyc59piI-wr}6w;s_>_U+foj zg=2c)&GLp!V&Hm`1kKn@OccKV{qI+cSlDXdp-;eGVl3du6MaX$Y~p~$?mPSJv*S1s zW0h@1;5n}J%x6Bcb=`H>h5g4-L&hB@hUPVN0t>&e{b-x_zyJL)_PqJcZ?4LAeSYwR z9|V2aT(k##rM^tYKKjv*hW?q>ZjuEIj!DgC^&8Mieiqnq@P{<|?7D*%c-N1ke^(h~ z?rE0d-H+Ewh}HhZgcwG+U`~|`+%ljGZSTf&F4O5G_*upV1AKAKHP;lXWca)9zB}S1 zXn}(tb>@X81rbjk34fuEzmTvO5|)4irrD0Cbu@TqezWb71bpb8%kYLG&! zs1dITPN6e#k#3qzBQP2;qHGyuQ8sbf@+-839eKc|#xFkhv5yrRWFih))WOHQ1V7^Z z1ASUSj`F2!1zw6!NYD!jP=J9`0$cFOP18=jsf#*}LW*m2QE&LDPAcNSp|hbuvj&}r zbG@oe3-D0#Y*HOZ@uoMu=|<7?OilXsEpaMh=;qAuqBm4FH#t)1P{phpF<9MEPpMBL;S5@uXq|avDz*(M*7nwQZj57*NnqoYle}x7e zsSSCf4$kL1$P#s@-Z;lQBwZ7&=&N1cH^2GKLPpmtc;r&8UocJqhn{$xyZ7FE1C}4^ z4P28x+cc2xo$q{Sp~XBND;iA?`UO68kG7)>+~+>`xoWI3F6YtYFzOwEef$Vt4E*Zn zcc?!9RXVh$lzvZs%hv-pP@Q7(E~EdSN)}Db#VW1H7rFxwVCJ<$-8U<=2pTpB0LUhC zLO&`l6*8g=O(>$@Lnkb+v^(jVh(sEBbi6ge_3s4LI;4*P8ScU;5IQ zf+ouWee?G1+ryU9#3k%38QWo-Am7+EIGz0u81x=|c=zAiwrvX-er$Kh@UGw(r=VLl zLDi1{%lM&1q=Ni9T2*jF4IQgHY&uOs!VW^Wak}>sI&Pj4r^SUR2R%1Wp zqd&ue*LA?2)D;7xsM2|$uh61)jGfqfjMX@7K~0hh>{=Z7Oe-{Ka$2=n=1l;%`6=^G zf$x-4PWhqQ>3B~b^xW97!qpgf5P-~3nGM&SXLG7+tjdtiY>kUjFMa7tD{$^{sDWC~ zc*ZkY652RQ!Ep*^lw98Nm!a4)WTlf80Je+3%gzLznIy-m*yh5wbFAP2gLBvpj=F$@ z*)OZ9lmk3vb1{z@EUQe+wlf~$poNuMHD-mH9iQ88zrDq|Wp-ce*v;V_lxnsA_G6x*8fP7{>K!hM!;+r8>Tl-F(_YISF#M=^k5Qy5_4buPUovC`p4C`Z z8QGDgJz43rW0jTmu&(==wsN}~wn>=#Y}&Y&KD$$SGR&fnz38HgW|6=OtBmeyrPOtp z1yp+hhH{W;yo`%jvE-TEaCYfrIE0*&6?rF~RcUyGEHh?t4+I^sLnm07ZPI5$1G>uH zLq3YISu36G9Ag?Qo?H?KhP#KXJVRgIx$Yd}krCA5vOX*1^RyTC4RrMam&beK`XMK@ zadzf8aqI$Geyv0Fr|i~hF(>*v;5gQ3$8IBcTshTY$AxKRT9bvUJWfz>4@8Ee!9uUF zIk*$S<#FT^y9E0Jn?VL~#J}%-?+e?5jMyPKcH+2Xj6C_+9`WodaI}ZASnbELqsGY- zc026&&;Tp^z+|2vuh=N;5MbMA;h^=&Pku78$w&@5;1S~l_8?<|CheiW>_|{g^ujhC zyX#tvik%dfO~{T0X?E1vN6zTq>JGj@rzSux?v7!j(l^<$%fwD5J3RD}!21X>{U+B2 zwr<@T_8)oj6$0L=H@lDQ9=VOEXO<7(%mXfq(_S1Gg(q_yja4dX$?0-4KZki@y%c6gvYm&=hSwo|E(aa*8JNEFYG zIzS`%?SReYkWL$O5m2iYal8bDWMJbWCk7xc(_mZ+6o#>;^5|s`Qir;a_ysEPaf)`{ zMlqcuMh32(eB?L=3Lm4%I)&lM*aUr9>>LOoBzVW9Fv6x zY$R}Ue~kK=hvZ?Q7TXG!`NtSV-HeZP_hZoGvWU-)Nj*ov+#c`|yoB@tOh9nZknu1< zgP)lO(`g!Le~zZn9-SQgWFo~^r&yHdN&tLdk;850yu4EnCSV-##@YKWjjgvT@jRV@? zrOyI#rjeB`RuEVPW>pKk#$((I8LI9u;-K;I@qInZLKZm0-IxQG19lJSkXTV-(t%7l zjS#t|8F{9&wXV7#!?N>OckXvAv&cOmACq}@FIeq;;R|0FJn5y+h9-6mIor=ASq#6YJmo1ttJ}e}Lo*Jej+JgE zJkXor3`-ol>(IzT0(CM!0!i;b(=uXvlD*!GSo|E4rKpo+V$QM5{Nt1c=l0osfX~E} zo&FTLK@Mhy$bI#p?dpA`+l`R??k~Y-hcQaLb`|CGp(f6Va-?cp_8q~SZIDZKl$qQC(h;ZY`!rW<1QC> zN0{Je7~qW4AtzSYaYU~8GNj`TOMWJmIzktB>6pM#SITSRfK?lXd^gKc!&(aSKB=sw z5BimDV^+u5Qo~VEGf`&-qhm(sXb_G}eCku53OMLAQ~0bV7qxv931giB5RWsH9UqbD z2LWMg3kP1x$D@#Tyt_Q`;BnAJn#(|_=PKK<47M)Ug3XaDE@|7bHHyO0!J9YG$SMOf zdT*r}9ymMTncZ`|Dj#`aI~9e;vC6=9I5S&>?kl>;griQ2=~N!_%Ba3*vbUJ^JCMt= zEU-Gt(IvK8ah6BYxu}X!@b`hw1R z%Q+JCgANgAeKah1kbRtW6gtsq!$e}kALn6UQXOZuKO5#K!cF0t13iu6oDWWxIbRKQdw*jyD`M(!TCHJr=lqT?gKs z7hJ|?c`!a!A=$YApJm$luF>%fF16b^9$dMeS;jf$z{G_G8;mq~fa7QdW0d=P;@Hjj z>Q}$YW7L`5c|M`bjsZtv?9c~{KHOY=Huc6~@bRT0_}$JyKl)?G7|Mz%E3f92(;PAu zW4KVKY=k*|81CbM4Y7SOou4?;M1Sc8Q`!)jqF?0bl6TyYS>kcDo!c+VA8EXEVVlnz z`<*z9U5@Zz4_FSF)N;&;KAAqAT@czBa2Oe;O zk9Yb4!?L3QJ()(Hb=M@y&i%dh4I2xGE}_df>oPiwzB0*2{te4yn>uH?rfeqGwlATH zy3og?dn_bb-wDBO+6~L%nkNvPs^JnlN90Mz!*lq@u`T|>0S7MQrTt7(y$u66{jk&G z`lRT11P9*yqK`QD+;dywtwzw!wC1k)v5q({a&b{yGwygNjx;+Bb`j47A1O?s)uciz zA3+~5<|E}p7Z)vchB_|pa)}Il@QsU_IGwMJIDKUy_|0WAE+7G?0WDyRGhP<_Cwda8Od2+v(!D)WWgN&AtWu4tEbQC0HUk1=D*#TZ1aG|ZvKoaGT+=dvP~0D;3>|H;pVUE+X8)^XN#bd&zUj!THZqMzur{!=49 z`k=wQy8PxkIZwB+gq_l$L_fuB{`p(CZrx|4DdGTfmWdPf$D`2+yD#j<1JX26E3_C9 zyyfyVGGIPxk)zN8Mx=2W6j{M(p`_rlBNsGfR2N#23m6##RUgU4L@rJ8u>smryo}5I zy3-K-9AgB|bPlS|Ha>j!tEM<1wTs?m1>V|?fz{WLOw?t!0o z?T!?iH*XF(96}asFQNO`NR*|9Ds=q1K<;@*o^X~EFyM>Q~@UaOkW`uo*?qj>CzYUy}hi%3Ma@REq zFMs$*1NIxZbR4nJ7q=oG^w2M9@)nSk&1Gyp5`^7@jmU?I__&BBe(_Nu;{{f`KGg+u zE{8L=V2iqc0G9EJa%e;Fp-14$b!P11<3sR6lcz#QETawKr$6?P>t&vDxnHLVf~0yn zv`{V|K*>0`7*0Hww{hfiUUXic!JU;BKwW;yyi@Q{{rFFW`&phm=((};0JJDd&xA;I zikT;$C)1al;vY-Z5G}p~%nXuQ91b{Uf6UVOS8i5}P^2f3)kE&?mJ;AZA2=ElsFod#xx;HDfrs{)>#N*H6-%(gP+ zxoqfQh4*xQM$KhYKA%ve-mFxyqrj0A&f#%k*5yJY+h6J^qTYPEmu*QdE^?_IXFfs; z`B`y>Pw)^vkH%wl?I*RavtYYDVvpIWS=?!(+@w; zORb{OPZnBHn;{}7IUs+yDAP#a%rDx3l~4LW`nbBIA7NEI_q`m;V8@!iiz6W14}TN+#vqO}A#dy&v%7*r7TL*x z23AYq0ewES6UQ@-RaO~8ad{J&rf-*l7ssQx^bOxEzpSdFOYFYUr^8=%A>a+6`Odle zUi)lh)ignuaS>7u7rM<)>lrJc)XQi4$%iw&)Qz?skXx32*k|5||BZBuHh{m_8#3}k z_OS`jGfp^ooCmMliPdX%kjY0ITX%RjKKeP1+8|$?+2@k9CW&!WkBeGebj;b^#dNZDJr zZjD`bY&OO)bd~Xfqe|FQt{1Re+J;8*V1uEPTpXvbz=4+&I$GF@qlM5x8-fqLqRu$# z18(@njuY?X#lH3KX8_iIfbkgTHYS7@=qq|l{mc*2Xg|vKPKWb>nzA$@td-L26z02u=xl zj;dyVvm)EPbSk4$d`7`8OgFfkR;MrHX#$sF$g56jzdC1}i|`!F+WIuxt&+s&YO-lr z>_L2opj|lNJ=4Eu=S*%g>r}7po8<>Q8mKB18E=N;85y^$r#ZfpY-f1Gb_xya5I7k| zoxppIkIqPg%bnu)i5hSY^c{_-YH)%F)4A<>WlA^r+z#_?-;B#;4yw=2xS=z@=VzDO zEzIzp>P-F&$2;R*hIhI{U~oAa{vN%5D znCn@2VzAdLJztghZ(7`&IfLAA!rP^vTQW)N7h(+4*M`1Z@}ujHhIy9xS(ib>^b%%# z*`})F$Z(`(8RWg5PTX8B=RsccIFa*aIAj)mo{!7;y74f>Q66P?t9OP6t`nUZu5Q=m zv~G2*!}*=lITG?hlbxzfoYoFsw>0EIi_C=<)T?@TgRkMA1Xf2aZxTJvo|!)Nrhfh8 z>8*q|(4J+9_*e~Mw#Tz7SIq*+mf@JSF(U&8PnsQh?VU!L;|%MNG;lIg!}0FCes}(M zq07%{&fl$U(l~cVewQhskLMsnKRGSa=X8g_fe!~VNZDwA( z?MScpNp%?B+xgsgfyWM5_t~Z^%RoC^j>~Z1%kl!9e$Q|&!#L_8>Bi;fx=cHd@fs%~ z<6ee$o0x~>1+Gb-?Yyjdvy;b8N_M8hq3I@cIT@a3J`K-`uRmj2Pw(UXF|jxLptIjB z&6KLdo@Fk}V7+j+S-UX9X1PJOaBf@YwH!K)Fw0<0H;nVvL(=VX*~qJHHJ6{^cqX3l z1AR6Op>-cRc&~foap-b^&pgce+trD(yVbjn*K|Tpb{Q_)b-<8jSFcBY&v4EM&FtRu z6;d5Bi+dNwMVRxOCez|~;J|}p2bB{n>@HL9I!^+q%MZA9?28HYSU)Sn`#901kcCKs za5I=nXQL&TXE-y=c`{79yv~>5^0V`p;eIzv{oV2PupL~tvPpAYoEIUkJEwJ`&+z7l z;r#A&!a6;Mbr~6UF`iKbtvYkY?J<+T#i-|FTm~x_10f;H53@$`I393-yJ>B;^BKQ`#9ta|PW|iD4)CE^$ z2W%$?PRiv%J;%W8TvjK1q#K4je%`!!b4zz7ji!pPBDz1sI8m0ls-KC*l2Cdxqg!2@ zz;w&sDQ%(fb}EnfUdn6=+Z3+X^1Ice*EB0^*&!?m+3U3q2NHZQGSReNO>j1Wp}eMW zO=&aSpz)R+?3_lq%U{AFM*#N!hVTLrJm_{XnS;hVNPNBE8Z<2U%D#1=y5)Y`{O>0$ z>6*P9+z+B9m`8WRqHj2Sb`$jY^hH&%)J?wFTOt(Gh$BWzy}-TXOFPBi9^=&{ETXOka$ zGKLf^6bIbL4if($aF#~+gzkMThY9;E_WFM`bT?tqCh@Zf7``l94h%x~GIIuiZ@QoJ zkJAGvIM-wNJmf$l4#;?0b^}hMiZ33}2xn!7ZpZ;~pqkL@*kbiO%NmM`r{#sf+)30xhp$aIpv9Pqr%m9OFpTCI)5BHQCEP&EoU|!E|G8Y9{-a{| z06zu1T-h|N*MY}@82>a3VoUTl(cqT9+;SO654G?Bq&xufA`)v>VZzhE3U42K+81{?^Rf{*yf^)=DZ z!t(HV#WE0t1Fj0?mwaptA8_<9VS;T`j+K)+Pdt_|$xE9w$tpbE|I=_wo9uSft<#@P zty2QuWw)HPgB!*TIWQ0o_y|$i7cFa<)2Dzp`j3Q{&nD9M;NWv1YLOJM+@Yz-U11Nv;Ff@V(-)83NZoZu@C)Hlo(>_&ebcCPns zxCWh8r)ALi=Y!9*^@4*uIKp!4t##XNx3wg9KYPtnL}$oo@9@(0tmC zyf}yb($F-YX>AHW2soEHh&pCmO`qGpz5bG~u<>8GFh^rt_4N`nvhM})jub)c{^p0vg; z=d0(}yOQI~W?u?m+*&OuO5Az0tZ1hmhJ|q82@^dzA4%<8;OoQ_a8Vy;q+XPn%XM0( z(6Dt}HTXI4x?J+5I@ZuMdo@kQMIAFvrSC@%c@li^rOcyXq|gx**}YRMF1ze9mfHXM z!3Q5)amy{Yc+wkrhd=Y?0FA(T$erK%*0;WN#u;av@RFCjgtlNMd(`blDP`O67;O~~ zdQ>Q|_WDU1=X_*uyw2LFX9?uFabwE~?yy83G z`OZTMuhL58$j^TEv;GL9pvTrhH!?AK?X}l_K?c$It6%-<5!H2;Ot|ieRM&UEKF~1c zAAKurC+9po&qS1c1rgbm@WF3C=k(e$ctbX8@tt0y>~@Kw*>tr_ZW~8EanPBc+X6R? zH19IF`A~*YsN)ixdF*;bU!2k_xhAiCy=~g^2EMp%8CSb!(^Rj6-zn2Nbj1}{ObEx7 z`bXl)>#x7ww6{ZhK*q_A@)&lx#xwf5?}GjWzO|%rmR2VgcGj5)X{WjyIreG?+TK z%6BS2#_p`Yx`)x^C$u-7ru=YYbt`Y3ZvRtuK7x3%j(sb+%TBZ}`bie)1O;a>uNKhA)HWz~w*v z=}%A7YGSlQ`6a4Bm0>xmWQX?V6OYIW?O%7+6aM34_3G7mU;;R2sBkJiKZ7#E@s6v* z%*6Kg3*HfAmE(PvUV7=&yWjop|M~Klzx-+$=YLb$FMC6`w4lZN7(7JPKmPECKm6|} zope$QdB`$houwV!C!)V?{*xtmm1{`5(;tI_JoH@-GhDl8@^tcbc;M!8luciS&(Cw^ z8$kM8#PuYnZUcwr5p+^7+KxJQbL90HX`W=hfTy9OZgbTSRHMiOdZbB4Ys;1`|4Xv= zts8E*fuk3<%G)ppcDXq2a4~ToKl98pzps8{{h@~*8hzY+DoER*i?q9CfVQ)J(xmOk zmt`bBn>Oo|<;(Kn^tr;E&#>*vaaxzKiG0lxc*{4upued(%WTe*pDS?Vv944JwFh`_ z{!pghGg#&)uygSOc-ykHH;%fvoe8@**Kw92yFaFE`tKTVoCjW1{P5VlX3d%y$MwnP zu-VQ!>#UE-X1iWC+rL-9T?nctK^RK^ck)}c%JssNPd<6mvBw^}asB%BtbQ^w*~2VW z!3YS032t0M24@kf_L}C8p`d4!ZF+S$9NpjoBJn6q{Be|+H*)a72d~mf2e|o9j{^LP zrx0bInYqmbGxDeWagkAYM|8jBQT6}J8M}KG?tbW@hj!^%(Yy9)wUW5K`o{O(KmYm9dw>7?-%p)# z$|>u$%X7H!9rlDLJYj?U8t8;}(F1QIb@Z>6{TQID+E6H zMcwc;I1LXkz=2@vc~oDm*sE>khgGMCls+cf50YOczgm7Jc$Gie1*L!%c#0=pWv|e{ zLwmu?5j~G0FdmHHslX~Fp$-T)1A>KIxNzVE{}^fT1%7EZ`>1Fi5nnePdE}7?sqMyu zb4oa)ezc9;sOmGKJd>)=BjU|k!B5|D#~t_U2nv&me>(BR6YsqF=9^au$Ik_W0Y@9n zG_W3qZXg^mPr3Yimh}2Rzw(u@+$5PkQnEz5@-NZT=&Ra3yxMOXnNb^OA97Sq3Mw51 z2H8S}q({gMZ9et8-~H~PyY9N{Pgh@k^#j_miZh)mpSGo+ak%y^Z+Xj;pZ)A-Kk2DY zed-3udLdaFJ>rNX0-rlb%O$c&USt}*LWT*^BV@YlV~|-KI6?+_M)q-nvwTJ9ck<69 zk1&vrO80l)e*5hYTyVh!cPakL=Rg1Xx7~EpO>3lEx9NVvzXL08TzWj9oN0$C|3eE* z|DfZGM;~|GaYw0r$E4rf;n*#|OKth6bn~xjgDJJq5yu>J%*LaSK6?GywQGZyYI}^y zLTytBW>o#wPTgU7@K?Y3745!TI5*N4>vAl8#k!3fH?E{FL|)`ZXjApQ(`vg(g;T;Y zskkZO#Pd8!9PeaO+k`Kl_1gLWKqMDqqHm@AD%ESH{0cpN-im%qUe#*>mi@Cg2_RJ*QVjFG1*&^E0Q zopasd4?r0M1;3vGP5sEHakQmf>WSa}_O}n5ciwq-3(x;~*0Y{x1go_{U>Cwa@E z9+#D~J@qox>V0o}+uM#k{`lkngLG#-?NXPaTyya+n65TM1`6o}cEyDJ9`&z##MfQY zOYVT|QCqK7U%5d#wf3;X4qGEnd&1Lb`!fF&IOO?n4^FFu?V&b`uzZin3&N;!!zL@A z83)E>Z>-+1VZ$oqPZO!9!0o{XYz@Lssrs$OfvpVg`xed>td zvFE@EjXY07hv0&jf)m;q9;iO(|CsnUrTdgST@uC zsQhZiO680HzY~8))lMVy$&7Q74`he_b4>Nvt+DvubmZxel8v$FJm)z_W6#P(8;Jf< z(b5wCV*D1Jqv|In?!5EPKZ>7+$VOYEas0juFTC(h)$4!TW|Q3A5`s3XWtzQ&a=ham z?>JmY|6W-($ip-wi_o6G8OA$}bf#H`qkL{cVK@@nbLrPE570b^z$ygiLGo)$Pp3p$ z2&F!4H0L!@1|4dxD-1m528@%T5k?rakbu9MX=+KOWhl-H|V3 z<6q@>$fNt{#csk)n>LX~Cs>pNRXnQ`9Fu+-7xH?f{L%88@$S|Sn z0&MPgoY(cS%#z2?;IKDNMYXoe|4qjABkJI%wF(QJzmvaL9)6f#dORRq$i(^QpTFk4 z?|tv{RD;9i%|~<=J`>_;OB{U_`VF6GbMkDIhv)w$@4kz^gEqqf>pX5R+S+&t;bkWe z9Zu6@xXk~x2;6ZORVnb2O3vz`r?apq^si9^c^e6y#n_P}NOjf*-X&+y2+a)0ab z3D~;Nxbib)XIf~RL(2TLiS?0w+n%~YU*?PJ?>O*M7uuaR!&_fHw(t%-&h+w59sjTV zpJd4F(qh8iTW`H}oeZFR757{DHUK8`L$Bd7 zUo!vAC+Eqb^SDmH!UO6L4$5Y{qFzCp#!A`!kW$qpmsQtA#$oD5JY(>mNca@RKG|!zo7{)dN@wP$|d!i6rTxWb&zxs7Aqpq1_I&)8+8%=6U1w_yz;*L?wioc z42p$d6AsdCkOV*B(EYZ=dQTvY}14*Z2l?i!uJ33TTSN}3@StmrcXlv;T zJCL!my<^9YQLU^@$?uVzKPuUX9+PSUBe`#B1#ViaLM^S(93+`tD|uW+o}d5x=hKqI zQLVbL)7cWO?52dAq9c@v?tlv!wme&gMJGw&B>MDSk>7EY@A~rWc*jwe^lwb7zoS~! zLRK-<9*}2XTTU)y;is_9OxZ<=Tt}7Xv#B28^9y*!zc4pG!^jC zPH>^8z)`lwzftNS&wtJARU7Y?9!)6^Z5%s~DlaA+nSZn|vcg!b-3E4Yqg-rb`eWgp zRNPMOE>EbhSR=Yu({4(e)b4%jX-|9Fn8$D77||~Fm}q{KMKskl0l0+S+QVQo4kqRaLY0Dy_D3 zsJ-{7Dy6>H`~CU+KEFTQ+d)3-F?nP}2MFnFI{#i^UTpl=LNFFoOUu4+0Jw1e^lp zfmlyJZ$B^3|K5W^VNf}!njBQs9;z&%qzi=t2Sqh`6*X1o|2~iQ@e2B%hAglVegOe! z37DaZJQQe3$^z}-7ev6~eI=9-z_m#b!7Bjx1&V>Al^t+E0e^~6cSU7)Woh74H#j)J z%h3yC=0`YdMM+IwNfjuTGBdWfFqeSo0p|gJ{$9Y3k(Y-*_Nya;HY|8_qd z?x|}R2E3gZj&*l{ncHKOegE6-FfSb553t#PEcm}~KC4V11$+Is)YB`>&ja`jn1qr6 zV0lkJv=0s)2t4BVKgK151zUysAl0yNIVB5c3lF^SnPJWv2|@$k|DUF<0sG1+{ z_|YFhu8;VeFN-mh&E`&aC^YKh6q0cMJGF33#=8^0E;lNP*qb7Au1z?eriU( z26i}Oye}SVpsK2@YDx-%`dWuN;my@lZ4lNL5qg0hcuQoMkD0SGUd_Za$kNd(NZ%fz z;%94U>uW|-GS&AmQxCNaMj)JkiGe^O5eDHwIQ0N?YezLzL!bnByE)O%Hr!a>)Caiq za0r4TgJ4cTF(%y7180Rh`@UcUWkst9BPVl|zJsx^zn!O&im#rNpR--KO`sZ@;AkFd zYGV#HwMLVI_54&U09*U{;fztbIHD!V0I08PZxDd?$D1Jyux1`s9!7R>2SZ@;8RC>} z;d)9ce%3~+YUU(=65KKXZ5fU<3RMj_`!=Ttl~CY^5bTRq#XBR_iKh0(0jhct)_5~K z&dxv`YCt3fnVNZ8=wqb7x(gvR!b5N>Jzx zD27Dzu?zCIH1Y9NG(b4`_}Cix`;k<=h=zJzVR$=56Eh$4aCH+*sJR~8B-|YEX2hBM zA=FLO&2Jdvybz8?0gfI4BwegipslT0BM7RBY6z^Qr$>k%2IZs_rle+d z!_vkY8A5PULcyU#B?~W@BHA8fP6+TYw8dj#umBr-LJ(eE%}~{rh((hCk1z~1vH*@i z06-|)DxthREL2Qx=zBRsEu8iIot2SBy2@TKEY=u!CJ>MD3%FrU405tlH}yf2fFhzk zA;4ToA7<;};AL(T=m|v_S@;-Ps014-N7#FZlZ*%?k8qPQHaARQ zaJXN%5*+6g;p7Xm@mDeoHCA@;RWb7j_VYEez#3Z^!-AE8#`VJkjg^elp-M=!qaEJb z)6v=s8jgco!tkbqkWe2_wSWLyAYcHagzz?l;k=24Do8cBE&ePtC;_%G{NI@Jp8x|~ z|4(#RGVs%CUZJ8Ap)%3ewI{l56f-mo?MuxwvlKF*EJ4cg#PEZC(3Uf)D@O+8HzQZ7cG33tGK) zu#`3E`0v!|?Z6+eO}^E6or@hG(LR;^yV>+2U&=;wTjTdb2v~yl?t%94_DzB4UmtB& zk)`YDY?TQ3`tOfPI;$Yfe}Cq1e>WO>qigk&FTkSy1jD!6&sDg$#o(r*59n@`W2aho zr*E=ef!R|~icXEViuLdHX3rg82rWlPr;rzJy9DJdJ&3f7_`NRO@H6bb0BJsFbE(4Y zj_Lb*DhE%uJ{k!Z1pR#5|Jaf6!M*3 zdYg16u`D!gwc<~UaRbtI2a}QX&pU;V6le@70jhwhbVO+FNoxLMvdJ|>cg?*arG@8} z)wwT+ub5p4U7a_!%%vkVqIt+Vp+ zk&$WOkIp0=UXWBRqUW(?G5?!^fdZMqu__lb{O9XH7r&|2mi30I7}$-^Pjy10etk5; zeX>yN|74{tSMNU_X82L-U_y+DW4*HcP$%@O9?+u;XFW;-+B@3rWUT<~D%~o0yfeWr zBOBJ2$6mL;HogRwaTeAHqqqdW`jg5Pi~auU*~?$<(fePf1z$_D#Ak#1a(E3onB#A z;!exKWM1^|Z@px>KMmvelGbzFcaQf*nj#8N=+sZ=7uH_14U;6fYZHoE_Qzc01^l|f zeabX37_z2o#7f0zt%uPeFb1Bx^Zmn0U!K~CLGoLo|AsZrBwd^b#vo6RRqHd!oAlVK zQJ%W@^m23*5`E^zMuTKQCAl6PtI+;e2r zU?OXhLe< zF6LoXAM=vGc09?3Z&$P9Qr0XU`fYZeeet-KCxRA30CFiFn%CW^P_7-nPw-2 z3)T>Ih=Z>_eN?pc6w{q1Kct17)EqZJ#JfGBKG$hQVefx<2coTh@fiU-w=_@%|fOU+%YlaX7YGg_1H-g@iu zYZAAG`qzG0l*C_SYyB1LBl2(^q9<|()Fp;gnTk~AhJ zzLqABJ22gcx&aU5r#Oi6nHy5Jku&vaj_R}2S+xbHBGPDB;4(~&*8C~T(yaq1w8oX^nX5_l2RGY%p=py-mSf*i|BL*?18k^%LF5PjD z$pIqN;NtuHuEmM0RlGEX2oa&$jyY3f4TY}DIn0;DkdAJo3Z{dTIpNdn(F(IIYWk#J zz|TvhQ|ihD#gI;l=3^#$BByM93K)Z7uKu@Ev8OFR4)td;p6`ulPc7($EOIWnh1|H3 zx-x$cu5*~!)_YzxXyN;V`&zWq^UW6lI74t;YeqdYWh z3a324VWkOCn~K6eE=?nx#Re_SF}R+{70_~lYT`L{1xX}Yt@yz7;{JvAg-S-NUn=kH zrK$A}#8$kQdC@F7?iN`!c;C4tqFx|!y*B7?!@(87?{QQi$lKhzWgwFbA+7Hx3@^CT zARly;vZrX(&+nP{KHq3ytfD)%E-k zTfuT)9crs?i5|MExOtbJS)gP7*n-LEs^)H=SmcDSweNYEcgg&pk6_ z>EpZ}<>rMjGGE!7si3Zb$7EoFae`=-Sv`;c?8sXUL@HdbW1VP;Y%t4|WL~l2wESIt z67(PV!b2^FR262X)_bTU5=VYt`siA5C*6mlMWI*OW}{UuYT;Ag?X0vUfT3bKdamVj zR;OJE#9Ld(fCkR#7^!#d`_9c|zI#ZHlI;+Dy9Qm3{9OdRotGx&Yzhzs> z5eJIMwB-&S!0jD2k*QPw13PVB3+b_Z{Nw$+nzKui&fU81;`;{a+$x3rbFW~S0wb!8 zP@ukX231!Q6Z)G?{tAM~n&8z9mP!HDtkBHG%XoEM23KnT`y*EIUKm~PYQ3t6aj-TV zZS4f7NYPC6J3h0OXb9E#RAK+jcJszXORomNe#*4!zvC{_bTrczGJS#f4rJQ)odhUi z3L@9nEr7Ry;;2lIfMgc7H0Al$zWSQeo2b`7AS?{}{#}W7G?VJS{IKSpdAPebs>1vI zJs7%!b|$uJ{sLgi#?JSXFksflftpW#|Ja--$Va><%)AO8ZrFWr^kC&GDl$puEqL1W zNv5F2K$$^0!hd#mP~i@;%9(*j!KGgiKt#CY`z>C8fvDN02bknlV%BSWX2 zuitW^{55bBVZ}Dw_bHd&!-tS|$eVd6PMU-wH|YT7?9X{*DXm^+T85N5uX0LBlFn^W z>$e0yr&^4GDScSd{4MfnbEI-P+{mkx( z=`DU0L{&qLy?591nv7S`PaxT>AaEJ4YQWuQ0QDVyIEuX|9$y@Tf(5m*#H$otq8+%S zpnzW-%JIn3QvJ+*^m}7X-dRYK82ogf`}RNvitBdH>n&y4UFvvlIp;U&w_Y@DzK!xd z(YE-#{>?6pQ(BZ&`#89D{~f<7ZHM9YnGW)3sUE#kYZU3L!{8a5FcJ*SkJ|s@m~b#) zdIOlYrYs7P-_Np==r({<2LWIrvo==6^v5tlNQ{~I`=A$MJ(hT8(cnKb$%6nE{&bH%^t;~^lP8wN6S2qyvFlxz)ser;&oY5N6-_6f2DW*} zuw8BRFE8ZQjW5spCmQiTcbJgO@d1W$_?fH6+j^wKg*%^)`1-ctOGRR5SOJ)O74=mK z_Kj?n3Cj0=%O&a1f_as^Gnjdy?K!-Z)u;IYif}f}6p#*(Y{0dQ!z< znBu(hy}pv-TyOf=yl|HNJCJw2B~eM1z5Jx&+UQWbJ?$)&NzjS;%8ZdvJj3=$XGmBI z^lSNJC)@2*4#@%OCq?Y0#Vb$C@k_%&l3%{odQ>3Dj4Qq!tID*a2~_2aK{(U27tTXi$3eWPJ&bC)}(;T)?bbc$-`+Lg_z`Rk)Uf|}kBC#Ng21C%W= z!WqE+VPfs2_r(k|vH`@%NYCi~qoPWH^0g@(D{WJj$P}XNH!9vl-xc95!uIFST$byf zs%wq7&E=cX2S3%O+VH^qfqC|UB~JyK2RF`Nb8!F|Q`D9V=M`Xt86RDbhSJti#RDHT zxc=>>>%Y&HFIrbEBNwIUGVS{ocPH>8{o6C0UI2Z{6i@|VR+Y=wmwYm7k&9r!rCB{c zKedrJX^B1kcSbtAs&_b@BWB1v6V%jX>-yRej6}6hX(a{IsauiQ%_xTqX4%KH8IqLr2jBZ)du?xGZD-a9N ziD#}&uV40L0R$=hNLfPwU^F62GqcwX0c^tyItH-AE3Q(@R=}I?##mRQ#9c4iypmtHU6T#k(F8(WS|PVR!ST?RqMhw{NX~ zkn`R3?7Ga{Uvq^yJ_WMf&EpO_t#W$VBfSo=BaOOWz%&>H;M*dIQ|M{2M)*LrYf}-X z^R52J5I-Eipo`flsM<2S%xgUZ@@+sAHC#9Q^hq#yZXg?&zx_apUvNJ?x)XJJa*#<# z5ERdMCI4N2nRUkE;BN$ReUJ-kxwDn_&t`XrjpvgyoTQ^b0JWTAlUV%tz23iA08#cj z;G1adzr!czXfYw%@93M(1weL+ULs$(wNOS}5e?fvWgM#sQ<=&K4QQovW#UbCY^9Tc)XOE|2;+{vPOW|cZ0Oayf zT2R(N-roT7tRkfjQTik2TqEto9={MhC3OE+EfFUo&BUnjM#Puc#;@!v!sANFDGKS~ zBVj+^NRg!}%2Ne$4+$U=#oXlfow$vaM7}3hCGz6O9??_5oh)G%#>z>OuiU^#vpv+B-5Cmp5xgqM z(JZP5ip+Pjt{(M~MHv?GPo?SasRn$7o(OK&170zg?Q%0oXA<0=>(R9LQ1|BiJ^$(u z9>J2(F(*>x=gDDS8ga2aS*w5ibA3UVWxm~_>J6I+9d|t=?2Q2ReEHXdQD=)`%A>3K z*Ql9%z=&&dk^7xzn6Arq#x5F3Iv`LmRK%zE^9%PWX`IG1jMjSZS=w200-{Mp$XpMM zxIG=UN>;h=NUL6V^Jkp^S?A4&=__!%%h1m>0mi5it%JTVj_$?3FYwozYIKdoLBJPi ztgarGPXHb-f80A0g{%{%Zq8ai7DeC5t}a+d#Jwz14~qL4EMazgsCt;EZgRK?{;8P0 z=aNA6q^S>pD=Z0BL3#lEdGqyd4VznLLfDY5G1x0q#nCmiKpJ7K>3&1@tKrq zC?b9Ga^xz${}ZzATs#01-WHEuYwI5FByfr$S9HDrkrJU##iQCQSo32fET0StNPLd* znE^}!A)!nRr%Dj`H8kddDAsnSv5ob(`gIzpt6i-cN{GNg>gKmXzfN=%{Z ze;@Fzb#GWhkMiYt8b3{u_bH%PG~u%-`WLBR|HUtI8c_uYq!z5}#XYi8SIE%ptpPNt z8n*f)=6En0s?Ia@mG8$6!D^q1X|eW0jrHH2_=JLG(RM-7#7u9fF|v+>T9kVI@jrlO zBedJcQyjvE8P-A}x^bSN!%sW%&_Fdn#A1E(X-62)yb?}posP-tvQ66^Ocj90<#fz7 zw*NZt#}!<6y)K$t42drD3+1DMh_#a+M<4w@pkh6I4@^M3)5p=s~>mJq^oD(}|(Rghqm zyMB}PsuZAP8BOxnHi(hq!iQWMf;Lny2P;5)w!Ypq@_4!}vP%81NUZn$`D@P*QXhJx z_&qpZ@wkyZ9)WlU^KP&`Zsx0HyuuHF-)w5yHzI}UhVb!}bOQ#akVC0(Z@@*}cjvOS z-$wROI0a|}h8}6(RyF)6{OF<9P#22-e``W$YvSOk_a`pPovDL_jqhIqdmC)G!8M9m z|L%V0HHghf_znH(;n#I}+4w6yNhb;1UN&1}Jghp}p&NE4ACf9z%I6D%dU`ZJr97E@ zaLZ=(uEG8-LRJM^<H4QbE)MFC6K;`y^?vJ-Is&?pydq#GhSW8B_dtmj zmJ&B!2E#g>k00rM95A%Ub{uE%gO`r>GLw+m67Z!12@Ka=Ws5-9ZEA0lImYE0o(B(6 zU))>HdU?y{SFe;W>nD}lhLnWNIDjm8@>0t__kYnc7P#`dIF9eUcFDb$iX)QKcD`dh zQgk(<|9s!}jHWnr(NDhp53~5MruywBfUwqqB>3zhNdG*QsM5^$)Na2|yw@>K&(_!w=s8FeARC zfxc5uL~X9SyB^3B1wi6R8qc9s37mLi1CVJ+uYvp=k4*Tb5m-r%rHh!KQJnQ8AO-%6 zy6%=Q>jc{fJnz@eR(YlrsRn+Zm(R}#bvY=4;}bOor6#<(*|x7;>+9g|DMl*!3=?>q zhwrJF@v8>#mQT?g6}rb9b{D@crU0nRu)1;ao@Z9~N%Y`txgc90Ll^*pPQhBlu|f<0 z6>*!5O9L+0772@c$_!O*oHA2pCzG~KVS&`?04iK~Ud3Ulu4}2e11Rd|F`U@HB{4_O zK0kMm2~ty^N?Wt+kdnf2>_?pEP0vst+Y?X0nIdvS@`NT=lUBG0C+qq zS%th@E&`&whOG;brTDH8o+<+h>BI-U9vA#kg)ZS@;>vi5W+jl|sKrNZ(>vGdZxsB+Kks_(GxA{5(x^=U(k9zi zd?hs3C8$wi1@#l?b!Otp(b|hre)WF-;X$IVe4{q2tqOLs`PFiuF?^mOm+{bxvN&8U z11vH$i)~HZXQ*YWgl=s|0We(e`~`vK=PdYQrpgj>i_jkg(6RvY&!+J3(^$Z`!^E4u zGv&Htb~|^ICj@z8j{yWV?){_0BzN zhb$0#yaV4zG@#gS5M#iV(4O>qYNwl2bdc<4(`6@}dQE_=c_v=s+?LKK0RF216S{4q zvrwDja5?(5&!;RZf9hdc&}U#dLIwbvBYyc)DUWqqJC#>&#yFT`$O-X&wK_BR#u1VL zu$SR+kE(6n>A46OlA)+@ig910>mWPQ`O`qA$9ySh%UhPeHM*$pXd*A788khw>6?{+ zSZYuhYTRR1M(4pZed!E0G~MM+=gjKqO64e)AnS1FvuB~uDi|k4d1C$cSn#3^K$<;) zNpJa05QqT4I)l^_Z0T^Vy2&!dtN}YU*=QaTJx+15`~EsmCid@-sd+##2yffL%AqQx zY5`@ig4F5#+yj-+hs&y>Ce5YhqwW^TbLj@LrS<_|RCKIq<6 z{go0x>EG0-)pT06835ZK^=U3>Ff0-E;)f&`XCt>-mR8$oW~Wxapfj_CwcUhB^K5F^ zLvN6K+0F+`Nq=`Oq;3jVVC%o5wP6n$Hr<*t?kcI_48n@CN|gbkqz3a2^NyM))8J|I z-Z1{Qwva@}hrZuGux%R%JFuYUxmB(7gN-YO*<39BmuqP>f_Ec%kGmM>Zb6xEOGZe& zFi7Q^(()SMT;lT(@@K)2WtX-_|9!ht=1xRIR{J;tZMGl(+M1eLIj?d7#L9N1WxZ|?*D6t1q_zLq z&hoXQ{b%cAU^%5YhW_GwZbS~YmAvEWHZ!v~yi?O7c6tA)()4QOi!l6@`_;pFj+L>h z`xiM7eVoAp%D&lR*BmO51we|@u#(Avk9BzNJ_y+4ltB)Cfd_t9Zm@joo3@D3^+hE@ zWTav(K;9yS&50{0;unw59zLHpbaZOa0h(Dnu;gGi*K^CYnlsk!&W*Y$0LE{oh*E3RGEsPahX!&iUcTG3SR4mQWa;iKv(hEj+J?Ly1{w3=!0fGH|QmhZVG}Py@>RLg^LW`HhnS)#vTuKEsrmrW$E*0Ok`Gy? znKSP@RfZ0%4mh_{qA(0aE6v_ru=ZU2=X~^Z32Wt))rVbsqZw%BRh%r znd!hG%h@dX3bgS8+_6&7KJoN=k=3qGVV%EGE097|nfUph%WsLbcg3QjWTukqx>mcM zTbKSylCO#CC))r~+Cf1C{~+NGvwvQ9;WZss5JXW5Gn_J3N=0?S=WH(kVnMe@%=oV< z#aC^t^kL$s&te$?IZ@TAOW^C=9297?5SI*+piibte7ha6y$LLUZ=~dEb3Q%M8@4v& zEVf&wTC}EMm*Ct`<9fBC$6bsa`s(x}k1xMM?WMHodVUnt&27z-b^xZ%W04`EvNC~| z%`2p9Stl9FiqoTD7EsSLrD{9y-H13Fkl&MR;Z%rz+uf{50U7O?UxPeJ%pSLT+1L%= zeeiss_q`QJl!QB$V|R>k9rSlF+O>83b<;Y;3?4eI?qi?m>kj}INL{?J-@{A?6G7pXGn+V>^zlO0 zROWAqU5pEf>}0OG504UCkcM?r9&9qZ5Ri}G$Nm)Shg59h``OD%5hq5#C}&^il%c8C zC~2qWk>;&^W7~}6K23^)7D_crAZ7DGF#QZN4@~C|lIML*Dwms?rsOLwtXXTs{^!f~ znC>###)s23@l-SMaA0xd=alsXn9v+Xq==LyIyeOs)IbkHUdn{n=Q86m<(w zYGxtSM;oX)aOkAN?z1gmeYXqQ!gvFE$UwWL`yBL6$NQt$v&BJDLE9aa4bLRkBHJWX zGh@0w<|M+0^Kl+(r+C@0k8&H>{?H?rCgq`-?1m{YZv`>ve%Ox735q{B{OXbQ{+Y(d zEp!%-2YEPN01*JibxMKxL1nA8`k^;+HP(2_5lnkRRy65QaefSW$xY3ETR=!Stm2cU zHD+OCuiUNq$FUZumQmD)_%4q`Q9hwQR=jEwsWZ%ov*{df%IyB@_9dR*=EKtGJ^I$* zdk?!Vf1D`YeAAW2kQDszvxq84V@xA2)8t|nN|$gw*F0D*G1VF+we8u~D~f!h-C_iv zUNlgAqzsarwrDTt#Z;5;%0$aDcbW365c3As;MaB0JLj|~3Y-&w9SdUII2JngB+*zi zHRbks2y{ghxw_L8XyuOKV!7q_l>^h={N?IiW>RuXVsV@Q?v*sfkG1C#wM_2XW5^BE zwuTHap=?#t=HHMoUA=g)IBjvrm$EskT3fm-nTh)fOWng(ly>UMJ4J@rcV@{x7Y2FH z(YHLDvV2+y6*YYj+`^-+T{~Lt5vDqNKD}m>@$>JyK1B3Wp2~{~=0WMe8uXi;Rxze! z7Gs$W58CNkV6C+oHqAnJX%Dz#GZFJ`FD6f$^R9U-@y=)F6bQ~JUj6!nFFUDus$Yfy z-|Tx)OaNgX#NcntOJ~K4=i$p-X!Htjdg7b6gc4Uw`KnV&E^^1Y8;7@3B~a<=JKcHp z;ldt-3$rj4opFb2HuJbg6iFNjMm_z=qdyFS{7o&0xcmkB&gFfC#p*5Y3~kLZ=Tkv%T-~1>CcUqTv-L4^Cz(xSB_3oNofW?w;I4AIfT_?zm4RG-hYm$K zpX%^)w`)i{V(h17BDPsWGWbVj=*4{0N~65&(OZ|~3DsuF#2A(4E(|%B@rQ@OCKZz! z6a82;rR>mRNCLNsVUsyZmK-p=RaWknGYBx*n`+!FLp=0A|MkP*sMyYCYalQ$o% zl@kwvoi2{uASXkc!_)$H&yFzSZIH{tYho%_v|ehi{g8l&cr|23q-#Lt+%vFD3Z$$3 z{@St{blXOHd_BA|jDl+*uZwowZ+PU{L z=I@65)a8sapU98yo`de6^)jkw{z612tz|~^9^Uo2PCMN9T02%*t!v=Ewt+@}Nyey`I6fyay**V^nl3$}E66zy;8?uk~ zeJJ3;Rw**aR(>MVWhy6+wPb(@_1wz_F7S_?>=%2H0>*&qS z39>wNGr(G!%VN5>t~1*_0y#$;6O^qxgV z;hhhaMm=kEiIh5M&5TG+)J!Q_;A{M2h>L4*pCAOH{c$Thx2A{5iYp?q#UDD}i~Fa1C4fNz1--ZX%0t&mz*iyN*g?RTAP-N`d@$B6jDt4dGD%d!-2OaruY) z{~k6zro5cj;gFZ07{9SgR|^ncB$mU%R-(8MJ=Yl?ttIgXX6;+q^W zt%<GTd?^-3SMUdR zT5`30tbmzQ+|iff8ZY~Mo}tz__u(x`GL+Zl>X*DHO!qJQX)QbCxz)RI`@ViUTQ*j1 zBOA5587!leT7V)o)QBB}u8!0_RGQ)AkbPCd3Ub)(`h$7@k(;ckb8T^&3TY3%1rB`$%oO4~>OCS#W|B!CH>Q1tST z=u6Xxmb$mN>@XpZ<6O~q8T)LIq`1D4C z);tf)Sc4*|e+jkz8UN^_%3bXtEVtdF4DZWJic>dbGly82Ej{DHKcH)K$2mw8UTp%~ zsbb9M=&3U%~5KfOro5J;E$LZ;6R zDb3bP3(|XR;;3_H?On@p^6!bHsbV-DQ?^nTVDU#j8W&-WeW-wa4(8H}T`u5sXY zdV$LtP3#MFw+Ejnx;4w83?|ov`a;Df;_1vLN}wV`MGWOavJ@X#gsk60h3ub*;v8*4 zibRTo#lM6_89$oR7}nYw%|)npvI3tAGGzjNg+;3*z5M?QNW?iTzkhLEe!FEd5&dAC zRKAs)*RJK6P2Dn@6fA~j*}B%VL1{GIsTMI&7K&gDuKQgOC86PPuN9F-SkFBgfKL=B*r^T&?gx3Mn-X5c$+ew|x= z)j^`uYAKpb68Nz!KiFY{$lirScRc0K1tiy{z=WeJ4I@oeQlv`t6yv;LoF5ef}Cdru~ONw z9vQwtBKtZqAN#>VKdbJT5374qOOmfJL3wkq#`)|r^E}a&lkVSpI*tD295nYfXXa`>mn%pwGJ8vIm zbC`D_??i||RV!OF)F+4ODU1_@*XM)3q`z zQHNof4WNVOzDP;pr@gY? z552r<8D`DJRl4&T*vHzbDeP{%AX?h2ElbsgQ5DO=>N|=FcG;DEMeXUr{(EdAJIqD8 zS>xNuCUyk}#HqBap}mp8I=+vo`~4$9{AlpCXX}5iBiLd( z(SH7+FVEfkviu$)b3A*3LvuF!XhJ?s)$kp-Z;hxsxzj!qx|iIOu;2awM$0Hq<77hN zz3`rO=5$T^L*f;aVR|AFpk?YZ;se{pP7ABYJ(nfdc^ApkAN8cKXbo%- zcj)FJh=fM=ln)hLz=oGU3N%h;ls~k)6mZbQnz%Cu>qz?Fu}ew7I>NjlHKK!5VjbcpswEP0av3QmGa4yfowvbSeTzmTqyeTypiABcgSiQti4X18BwlxM0kxqhzR+sdmzK7Has0cCRuYszL5xGAc/GMJ0ZGgoG5nzkWGMgSl/c2BbAjYYl4DPCSohvQaW5DdWoKbQlCCctAIFY1SQuA26LIqwK1oY5Jxt2mEeo+27xtDHe8DShXQfvSVIBArVwaR2fMfED9StHUOtL4RVsFpJEkDENg3IXIzMGWdMlFdhNsM0r11Vl3Lc12e8u8Q4jsRrBkyuL66A7wU2AejBDZ9sfonOLJWb2FYLxkiuX5mMi4D5LIJ0UaNTztII4XxWTVp1zBVjsQR1Ca6wEFtFJkwFk1AgQqq8MmG+vVPjC+M+N87typxnTed8q6wy1zzBZ0ugoISl3FVR49V2/ZMubqyJmK3AzXx8aWZnVStB7mPxQpy1I0o2OGYhlvnIcRxTKMi6nQdUrebv4mo25IUipJ+cl5JcQ5qqO+2zRalUQs7KJiACL2NYLHsjtdiuOUziUh4eyXLuph6hdMYo48VEpud5hutKPBGcPeGGB4FHYINd+XuqvcZc4KwB7Res8gIlBrUbGJqyN7W2HAUFTVVp/17i3v4Hb+h/N+Xrov3144lBfz8pGK+UwnhIKRifTgqW1ZaCCQaWgvOBpHCu6077bDA08BdJFNY15kSWA/PD68T6H3RiHVMnR+j69zwAegsGegoGqMh3BSZX2awc+JWyynGWFE18IQMMO85qp7zy8/8vu3lkWuVUpWOPD1k50S58e9+JWIQ7m5SCICV+JE1X8pF3/DTngchn1wvlCAlC9Dmm23I9ALem3uFW3+fW6uHWOBa34w8uBqB1CuYMLAa974nzEGoYGdOTk4Ntdw54bTKsHHTzSOzenRy3utNRrjnwVqf3HfwlKUkMo7dz+9Dgtpzps3Nrdd9RB+fWPpJu709Ot11uTWfoPbnvGeUQ3N6eHLfAeLfzVpr1d9vC1/j4bS7+AA== \ No newline at end of file diff --git a/v0.20.3/img/simple.png b/v0.20.3/img/simple.png new file mode 100644 index 0000000000000000000000000000000000000000..6d4f52ac4703bcf476c620cc5734bdfef9fcdd61 GIT binary patch literal 19737 zcmZ_01ys~u)HW(GN=Xf=5<^LMHw-mH3^{ancMc&b2ug>PARU6Fbc3jX(n=|yG^i+u zNZo_}-|u_xUF)tT!_4qIeRe(j**?%lsS**~Cb)L(8W9qqqX&1MdcS9jP=R zymsySxu3F;pL>vlvzz@jRw2cI?^p%-ojiU0ScQ~W1qGGe>}~ygJUzf$@V&dIowI|p zz1_dR3GxdH@CXR-@Qds7i?IqR2#SLjVSZi_5iyg0&s#g%d;HgtrYFYP&CQxsP(_57 zA9Tg8X>IH5;pge=#3}>_pOGGZ_HN)e_!_)w>46s`@FT!)DIjXe&jH>lczL@SbNzfMF9 zTx@K;d;^uW`K<$lblg>~g@YYD0|RXI?DaA3`rg6z`oUh#3UFITJ!c(1XJI1&ad@Cx zkS4!tkhc>?msJq%Yv+zp7eiS4^4luv`WyRVG(>__6~JkzsA{V5o2WW^s)~4ETn$w< z9reITsn`m_Z4_LLjopNmv=#L{odR^M4Mmjf3_aCtblembZT%FGD#ikOI{pfRq8do= zKy?96V-d9=KS6h8dvGLhL9rk|C4{}WlAe>1jzfT+0TQm@V-RSqE9juPkVZ(1rmCTxrkjeL6UtYU-$y{ez)eWQRzT2R++SQr*u_`Z z2ksi|uOTX`AQp%gRIqn}E15|6I(T|$X{Z_62&x!s1i3o<8;Q7UItwd_Xdu=2(bgz^ zj2{vyY!qauZ{Q_>fg8B^>gWn6T8joMtN5v?xdy7Z3Me@0czcU$x~qb@2N-C(fC~rL z1V1QoF>tOy9{Ton;@+Zx!2%cz+=SoX(G#iXCZuAZD=c9Ej!nfx)8EEBSj}BsNC9IM zByMeJ;$fiY2N!eK4$|=o7C`cQ3TmkuJGljTAZ>Mn^wl+;MLl%v&_4Dy0mAN{f+9$? zvzHne&elyt%~nUm+gl5K5X9(OyPSU*B>?tg6pzP&uC*tJqr>+fGwbiy)RfP+yAdHbHJJ6YtzjuJGiLJ1DAY4q> z-dEfa=?3>f+o-B}2cyIc1rZ8p6@3E(FR&H?F=wFwVT286fkGMpobGuUDw$`-_hGpz{FZn5!?{R04=x|LeXE8UrfwKNr7J+Zl|V#G8PbV z)iYE8eQ7I+itCFagmj$|2AUpM_tHgF0u!iU@93*)53ZoMrjrm{&%{X3IuKkQb$u$OI*a;OAHe%4tf#~@E3A(HS{+?dl_nJD+#!Y@au_)+e`dyh13yq z7tvEhDjHmUR2DE2_fio8jg<5~9BrLE1Ofxx6cyYQC6wIV1%;3j3L5tALjEd-;1Rfk zp{PH?Pz!D?BpfL25BG)(DEb>~YA9GshzWaOlvO>ARqeF&U4>D8c7Yqg^#U#q2PkV+kckKQD}zuMo<|&Q3xXXhkrV zwwIc}7{7#;pMZeBzM+Gswv9)Csy$jq2qkKd&~Q+&*KoiXc=-s}NdzbYRiNS`Y@;Uz zS}G`9>4>X;;GU}dkAC^9Y{2LLG`)~=H3Qb>wQI0zNF@dRAdCHiz)&>$*RMjB(qnlz zoP_Zya%*aIA~F8=9SRQO%J|~Sk|@~2>&iM56og6a1eHbN1WEcF%-wnXG%&`e=_#ZL zI0o?}d@dl3nqU4+_i@JQvwJYH&AslqKnC&MuiXiX-rn8E+V!OB;by5oA zqn7k3HXMt-yAUT3{$$*d5Et+3e|;S_yR`ac4ac!{$Lru`WTGKmTd-I}o~I%a&&mvo zJT`{4{@!J{lB(+GSEbe zbTP59Po1aBo#q=^7g}5^41ZQq;~N!3MAsI?EIlNDrS?F@z=fibZ=`>gnY8Xnq?bQQ zL}DjHzIoWnmp!Zs7Y{G5XE$(hRVk>cpSEpr5+~07-=BSiK8dVd*_iIzSrNy=Pl>p|JNvchY1HaLC+fZsv%lC{f=s{bwA|5xSrv1c z!SmmpQy$M1$y9_TMT+CCvBwX>k_KZ5sl7JFZqdqx(+UU(#3v*SOqQrl)|!eP?#vS6 z+_eNU!|M6-qX_2KP=St=CkiLC=BZy8lRXr_}=o~BT6oVm7Uod8eu2WKZo1V zUza-v7Z*F4Fg zm=z@(qtLoo>D{yV;q+DDp-@>nS2< zYYaaZ1b);R!69cu4gEg&%p>9#J7V1C#VGq{i~0BIsqK$1uU)94;n)sLRyu5q-`;SP z4)Lmn&jss0pC;~!9QPaKl##emwfU=mxJeY7nLeT>P-b~cMHvwR@z7eCc&Zq5vYw5; zwnoU;RrKQYh{-bSi1H(5mxuEB`%cYsCP6`6zG@v3XeAUYQ39XCn=2)^o3r|w-Be|Cd@%{%f4SFgwA zNCombj6HsuPvh;x&SK?C~8N0a7rnBW11&>&DL661S9Dm9~m5H z)Bnb)CkL%p$Hpb0A1Xzr>zfqQ3LpZaKCT~bPw#9gO!3H_eOGi)+zRWj{KSK={~C#q zYP$6x?()G(efW10XM6FPZ1R#6ie4;6p1Xa|QGA2jTCvE!F^KHxo)D#Hy;VOpC9RV% zS$AR38a`raV30)4P57va51$=7GXR>F93DINC9IP2BZ-H_OHHA%agBkKwxjRW!E8~t zlQ(P?uvv<5up7;L1NKe0O`2a#3_tv}zZe%sk3n1)RvW{Pg*;R#(nSrNY@M)=sVnTq zD^OMW7KsQ~HF@y{5apZU=+vz1nbg>3|74H~(4cH_p3{))uM21=mE<>>x-~sLjo!ym zpOKhYV5w??DP*N#HQ2p|*3Pb;v1l`5QwBia(gzVH5yw_idh~Ry{Pf<{);A;aU+B?h zjPGjOfh@dmuX(es*3y~xK!13#(Vmc)R{Yv)boOOvo|tDHZDQ~FzGrm;{qP6KeWE+% za|elhJ^U{i?VaR2mc%n(oP17}%Y7?)+s*=;(g!-E;N`=qf-th2h9ade2niBtx7a^e z#je|b+t?_#ID9XOuM0J#z5ljAD%*vTxZgZ*xr|^lgH_k}vh!PyVlusqTaiRX{f)?w zukE<~{r%PQX~y;gG59<4^>jBU&@mR4rxGywEDm5fFdw!z#Nm{J8Rz%BL=7zi`hJX9mJ^M8>{tWfz>ziNue7$G7nCEVq zBQIZ8QxgpvLB7N*yrIkU#$A1xnL>A6!sPR0bi)&!kl7bzs3-`>hW${n$Mn;H!R@is zOs^yNF)J0Wy0*}~KNqLp4#veknTd@8DLHuGdqK&pniNw~QX-j0FY_n4NcW^*W-Blt zz(8*F-V42QjmR$*iP9%u(toc+lRT>wiF+Z8Wi01hd^oKoXN+-LP7DH9lYJoR+Q`Zt(RS$LLh+@0uoA75W|;wLV{y2{I-dza_O3Dy0(B1|yn zq@SDb45ic@@N~KPRDzQ_{kEeVp9J6EPF;_Exhu@*rOu{6z*_J@fbwZAy|7=dt^I2? z%$En1ta$r^dDpW!2@gb%UyD^$=~WavzI);{QH0fV_CsCzXhtsx45I$c4Va};mggBA zkCKvjeTULY2v6*FbW$bDi?;G2Xa$hm3AB7IuJ2Kw-k2yzHJrYc`ldf(?a`9vI7gX#m)gP1XASv`Q^?q zqh_d@#bL7a4?JFru1Z?`lMVlpR~|~WCN28%kN1=ilB~*?5ElNUeq07&yQeJblbJBh z?YkUO92C$A=Eyur{Ha7nxpO=hG5Tk!-R{-X?Y44*!>zYMWQ-nkWHi(oeoMY@s{6*k z;x{x=+agEEF)2ZvG?gH%F#L~mu;;q}ZL*Y}8*5`cmOW<7M2AAP&21hF%^an2*JB}v zDn%M>jK^;{@)soNh!$nt;w^5@-RJD4Un2=knt0o^vSa>fVcPr52m0#WyOC$cP5iza z8vb$l&g$3l#cxrZLsOWU=ao}h1mS!)T+=Do+JUX*42fH2uGP%@Y?vXRgNm9_{QmuL z)?aKS;kL=_Zdr(3fPv&Ln?7Y}?AL60)v81Wa&o;L7@}OMH~gGi7$>fQ3acs(cEo*g zaolU_N-+3{YhFSZC+3L!W~*Ph;cqPz+~JJ^3xn~)`?U&12Hsw>RG; z5X7GxbmF!Z*H@Syqt|H}WvMM5ies#J`O;oZD@nmbjdmyS%r+#?#uVEp{CukhljXo{hLAr=(UrOC4z%v^5w zt>BbqW@61TtTV%(&n=@#j2L1QB&_O*<82WmmJ*x@)z{#3~@+o!frH>)sWHP&9v5vPzTah<7*H17;xl$P#N`~2MjS`UfTWmDK;qFB#|k5BX(*_i1id#?^riFdqrT!!4K z^+66RJ6{L~|M+71c4u|9zmM-ut-5k~Y6l)vU3bd1gz0!;v1N_o^v|D0kEm{*F4R7c zKNR{x_ECyT*y`7D&yiNsntZO!)SyuE+X45!1!Vs?MzeUG{IEwqXy{_X_4%nJduuRz2zEhPA%S6maE5#++#&L46DPV;2PxsAZT5dVU;c+_pob@YLF8>Ms z7}Mdk3U43TjGbfiTIs{e9ET03v#&G5)?a7yn3gY8>LKLS zvELxuXfBxvn%076v?DW9m~q+O@z3DhF^KF8;=upFfc07lsj0cHqj){bB8e6o2L~;R z7Yk`-DawzT4zW;>_wo(8F5KMCTa`WKy`xc_+nSqB&h)L#zy58@`w&c9iFcc*eVP#8 z@Mm4!n>+qi-TrxU!ub>05X|w{N1IPVh@)>1O(}WK8%qX$HH+Q!n5{;~V&e6KqD+@y z$`z$IUcG#Dhev{Ygej7MTpKkodBjXMX$_?2 zP;jubO~szw*kgf`0bljI_w8;wF+#s^Na44)x0`6ZlRy?_L?{L<9ITZpGB}yYJHI_u z=U->|nW?J~i6!2683Orzdgh!Xdv++Lfw^!$3U0bf(b2JZpM^ zqAi3RD#<6{Am3gVD<=x;sRuxyn>MfdzN%Fe7c(8w{}8`u+K>2iQk$b? z{YGd(tVm6!liF}KGFS7moI?tQ02{ERGB*J~2l6pg5#C2{)e_g_N z1IEFD|E=e=t{mg#b9#5h%i(g*=F`s6in$Ej7h1?;@!%lc!VqH?vRA*-^iAMketzdC zFP-U{oU5#Iiwi1uH-uG$YP+h!xY+n-X3^i#9*-o^5uWHhXL^*Wl1y)2{qf@bbn=C} z>G*8si%In?ZtDjQSMfrcZIKF?qh z=VFqCAofEG;7`q&dior!DO1u0{n|4w%l%MPQF|`r__p$K%yfm;E)m^P12Zic94 z;@+R$no|s(FHcT0^Z3vDsq0RU`g#f){+u4KuB;U0t)~jVYU?JPX!`_R-cnLV^kqRl z-n$IbD3C-;5d84evuK_!*W~i28gfo4QmDP^Xbl8>)gFVu(<|n^vQ0ZZmG(tzk2=&y z?e`a2j2)J;NQ5{4kYmL?ZedQy3kWBkm>e~1&X6A-@BPY!{{7|KrOJ87pB*k8GqV_5 zsmDXDM}u6!uAO_wJ0}2&<6lt|Y~GLdsh>1%3(>3;(@}+cH|jX514hK_9dd zshe3`QbKm?Ruvnix<6T*IxXM)iI<t-4@oa zotbwq$sTn^l+h+VqtjVZQkB9#Ik~Ki>}@5 zI_7xxsaZXYS2?5faTN>v;{5jgq zbcx0#d0CCFnKACQ7@nFZIyg9Zb5VHj!Wf<21+h$78QnIWFAz|;CjwbWlNOAo4Z(7jQyEDWC-$D%gMi!?RT$X)8 z4a{aJu~bW7UR;0nF&E197h^&Z~CNOXxn-Qov!1Ceyem^Fv4AV2A(i zmBXA#!0CIZKXuB*eb3oqyi@+JIYL=!*Sd_7>fUb*@qMBb^&D3(>D8M9rstOliE___ zNTN>zXWhC#T-Oyk%>Mv`G=Ifz#~is(CTibG{hFHA{xs8R_WF-2ctwhU;og}*GOXDo zlx}=Cr68b7e0u^H&s{`?yoEk!OPTvJJUTbz@I1QSZ}o_$*i+x-qw1%Xd1v3*5#7d< zFRS-lv-Z81TsWeb+4`5oG{^(;Ga!d3D-f7gUq&d+yE6XRH!42iVooC>BPG?&G3D_| z&Ul#BL!CC@i5Ko-$ht4(rmdL!F+z=wG zlzw5kYmVMg#m*&5SqiVZ%}#mi$RO8Ph<*LB(G-I{wc-?23lv$BQLG1D;b5k z62dp=M66khAhTZbr>-u`}poFS*Tm?tCFDI@HYN@2)XiCL~lU0G}&4ME_OyDhaf zgo+s*aC%demJd){hFT9Bi#J=~v$L(*oF%i5smYH|Ji*NB1!By_8?K|CSXYvj7uIET zL@UK2uJwTkM_>76yYHp=-O{Be5r;i^H*e}UU|~of?4}STdG?%?)6YC57QLDsz%j*- z7d!D>h2GxRyAx9VJKC}+V0Uw6gWSbUu~G)#D5ozH2Up)@d9vglZ;Quz6_Hfv=a-Mr zbkP8p+#0wK&{Myy39RQme>hbh(mW+BsEl#PV`oz+SNrfs7=HUGwzIWIl4s$D;;_T( zb|3l&BVtdjkB>P6n4jOpl46}`cGmN+3)?IRT;PcOk;fDJgr$k`?%nb;vH2CXsws9= zlHSXU&T?Z0QT|LlGsU7AB;tW-WO4qqv=95pXGT?`q_0P0G_qZ^hCiWDSY)IYOTq7|#pKVC8!UsB|_hgWcb+*$S6Fl3G|3V3(ob;RZ~ouenL_#9EIB_+4+4 zb#5{XZwV9fK3Vc_xa*kYPLJKt(6BXCMp*jm+WH)XrN;Vwoq31bG50N!quuu-0Ih*> zvM0EFM!o&$o&sGBK}SA*6qf33a^BBrte%6=aTmiAKnF z&=8r*8MUNp1_cgys*}zfdVVLEQ8h`!C!1zTC;xr2Svczf_w#Fh%~RVJ{9`K9Jf=Sy z*YM2Y&L_rQcK3ltD-$1R9&MMOrl-TG)ZC)tD&(+TT7?SwGex$h+QVMGRkP)W+hhHZ zh5ly1a=x=%qDq3GkXt9gPCIJw-!(8FVKVD9n96*=yi5n|At!mXjRm&7B!2s?3NM%A zo5F5C`SZ=Y$6doDc(`7T$`ACv)9Ib&ZVP_Tw`tz>5P~S;#@5l%;ha`T;F_&9egBp5 z^?XmW{l|7+Hj@`_&NTF4e-0y?u^rpp2f2;QD0rrkDXFC%@bEumGak<_YoDtbTSVr) zr86a>{#5=fQ5(ylD+A)8EWdLX5p(uqeRQ8bL!Y05O(EHz%%epd_)yUm6>I>0wLW%r za=0c2Zq8P^R7Jm7J=t0GzF69&k1oZO-r8i*&pQs`#7=kcKjh0-pbGc{_KY+h^Y?G~ z?%KmS)S3bO*pg@^oGP!5e1Eo)=lQT)@~HM&q$z+`rdNlOs_@*8U`t(5T>Q@@c*g-) z`X8Pob9=0zv>C1nDXmaE{k7lV`AN3febGQQcU~nEAmm)9vC$S7Vf~>W%bkHWdpAzf z!A`dNL5KF5h4Q?%hqRnXC9soFyc?|E(e}!*C*r|Q64>y#f#)84oeg>S(;|5A5eG$A zNPtwpUPN|wHc!QzA1{YHxA!eAnWGHrM0F&0><6zuht8VDK0dq9w%T;qG#kS?LGZ1H z%0<6Q|CY(gEytv1Q8ikSsNDMcjHImC>c=U4z~R#m9RN07!^wR9?sC}iGAHrse(NXE zXX6qQ64jG>1_oKy?nP^@Y}&kR3il-#&SkYTVoe_X`4p}nZRsuRA+z%3feBBcXdA13 zA>^Tg)#28pm$!F62#Ov)^`dOfAWk-X4xEQF^&0*JSr307AGn1DJs7W?B(^ek7(ZhX znUYf$4i{m=g~S~-v@v0B@pFVT*HK{ukhh3Bvl18o!*#5xLQ+~WSL(yX=!OU z^+;9&NS@^7-I{Ll(hN@CPL;UyNc%$-lC{B8KZUkkdhYWp|t3T!Reh;Nju% zuO~(A!OVI>=Y@%*q*3(r^lg6I&;kg~*<$sx1b{TFCD9Z7)YXx)DdLE_#fmsE+cObw zpOdf6(n4pkeq+Xh)?bShuze|?i-$}oXl*@6Y)7W7RqB-J+rx!u1FoUW(^A%sq{7Q26=IyQ8y&0|@zrDUS6 zzb_<8$#tSTqRpe5GxQJZn$uxkQ*Yf=)>iK^wdu$y zQkkW}k;oA-*q0=H4d3RtiUQHw2@!ruz~7Lj;x@V_F)Egecv?EbaE~@^mV$3&RBAqGfWW>l&d@)z}@w%3s{gtxU&jaoZ zySI*c200OZK)W1#N!Kd2=*Bz-2qJhAaUStHovr2%mM+dfd~DjA(=CL7+U3PLqpA{x zFSN2TT{}t{JT+O?hcWkGmeK8yD9`^`m%wuUK8e_7^%N(R{}$Hv(RYBJt^lR@-v9Vj z@Xw7ZiyLb!G_}-<_VhW%O%4h+ymn%dHYjW=U+BdLw0LZy_vjTOwd214#5>2MvJ5EQ zqrLF9p!-ECLpXaIm3f&@nOL-Cu_q@dNgWv(8L{*#P{SZEIT~0(RF?)DZT*N*FBtFr zvr}__XTyk6+I8+(q__$+TV8+-D^U`kHC`~Cgh|AjHG!b6PmTo|F2XEQlAYy;mz`d` zy0sU*V}0DmRsYDL!^;Mk#cJ}Da=6c7N@{B419CPO%tt zGREho^cnMtq35+O?ETjCve`&qZ0JNpgqn0TPKNO>Ye9E=FmPnFKxN?YoLcDxTu$Tl)d6h``oAc~Ri z=S_k}a-EbA{MeIQ?A)&mlCTmIS5{W=a8(__QuDtyi zU6Bxw5JkuvLWan8XKScIRO6hWyO0@c+L+!F{L{f-iB-xNBpP(@83eGY)0)k{hW423 z*-auV_on+!ubM_c)bJ6LT#k;8nNVjfC^dD3jZM$#SE&{ue|L?Cte9kk#@Jk~sk*IA zDNi6$2x^$ zx?DCdvfSaXH!q@lG-U#3>o>8J;Kc_!^x;+{L-=p3ce1D40s9NFDWtv*S;zw7stXQp z9^DGXKZ#DPQ`BETBG2GJ_?_Hk7kbBU0y*L0FTr=!8d$io9HE24%QIE`$B(P}rdlZF zEQ40BXJXe-r+y~pc*NsPqI)1DIOTKw<~GZgeP$Kz-)1opLu~ny{seO8hv^)&*S7~5 z(Xy5y`ywrR#fBisTem~UR6EBrD;0x^vM|iUq+SEpE)qY!1SWE~=Vap!x??`xW{9_R zDokH3M!n;?A7iE@BbrPSAN_PB1o?M2J=El1^icCz+?0^Yw$WFC;`e0)MPDbRs$7vT zd62<<@9ba%4ci8}S~KUPZ5I98be=3%`*;=NBZ)%S9Pt z>_05>$&pkOVB_#(bpihMhMzT7CGttHh4R!b)&jjK zEFPUrc~_}2(`tzEACPW;oyJOO5Ql=trXJk58V?rvhBQU~B~ZfszzNinI~NANxj~Ng zvw(Lp$4ey^<(8obv9lW!E-P0MXY)9SGaKNX6Ua(ZUJDIuKnr2fMq`g=@#rb||KKsS zuqaVYHV6}Hrtl_Z>C=7tW1N%xu7-BEI_Dbu1d@MuTE6q|J>7)7aGQ?;90Pd%4SY9R zNxw=-x##AcFNTG)_4_<~&kZQ){)}2)=x((648hiu#@nL3O8VC_kSAL|=w&95^&d)J zMgF_BoQg0pF~hLVbKojBgr8$(F&!btUqR%N$m!25XJ}AbIi13&xU#ku^}>D81~4qD zjddR&Qq};o1FeZ0Lbl4NS@DK^48hiK@q}Ojd9|V&jP<^os99y5KNN_Va0P|PY9@BR z7xD3Uvu`>TqHmxvHc_Ka+7suI^52^mN5bx#E3cmFe7@a{P#v!>0#dyb<>Va^f{cU(qohU-iK3-UAo7M z(1`9%Jj6Inl|D$gWUmQfDS(9BgFDIn70JgD5cWEg7PTX*t-0DerSx!+asLi1%kq$c z;h69?Jx64*{j63|y0$=8SMJ}r^%X%3o?AQuN^uorxNo%dVO(0BCgEjDY6S_dv8bq{ z$G&!duQ(PR+17kh#m*jk_f7WqgQJX6j4Wztg+V2y4I#T=u7?5B@?nxaH=mOwDT+53G(E zb)Gz1|6EPq$-lelrUnxgH9*s3U>T@P}WX2TJ0H8vPq)Oj901 zju4He&??SImIvTML+FXm>~!P^(Yo!+nt#vSM9BXULRjLO(uyFy;catsFqIX$g4D&` zhD2&UI44~K`-!8Td`t>1)nYq?YG z3E)A`k(rC>$2KD&-@4;~^9v42Gu{`V4_E3hj~w_e_w(}d7LX|~QFB!D$ds>XOaOz` z;i(>l&cDrmm%US4A0D(whWMX*ycyK~r%B%z5KXwt3Jck&aj&Uf!&gqtxn zC1sVtIAG6-wpdv7K&5|!wDqFfM;_A`MIg9x=>h(C_DlKpOZhQ zYsu=!so!=4+U{A^ufmrXt8onVe%rc|_@ZTRUte9bzPET8;`oh6p1VExKcE+YIWjx? zA&co2B_Qh<=vmy!Xmfl=@3;3sC#(l(Y9$pFCKeW@^UxAgRcj%tqQ<7z3nL_!W@+J%nx_Wgr{ ziW2-#BZ<)8UZG(3cLJwu;f;h!b>~Z2q6j+Nmn(_!SsW~fpdVc=a`#IC!n{t;AR2jl% zHnfsT6%x{i`;_xaHe;?N!%7a)><6_=>?ta`mXwqvdY#v|qpht{0`R^b0#yB0Hm^DS zXlHiT5ztyj01<5FQ>({v0=*2TvNsWDvfbBFqfnO4v+LmgAB{K2(aGpqX!4y8#vMEy#S?vo-k z`hh)<0di(o#WbE?x}AXt5AS(%M#@oDZPfS~rb-O-)&x8(=>dKMi25Z3Io{$?s;9DuDibPW{73F76yy2SnE*{z-7XEjF-JCbLL( zV~?uG?3v`xDe$;qgw3>K(N!i^0mRZL}7o&f<`_)mZ`BC=2ez{H>nA*Kdq zT>&|sd(#tT>eOzZdb-Eh6q3wKWqcpJdI!|X>|T}iu=4nXF9;g}puq5Vv_p@!0DE$v z;iMp}KJMc&cM2n1U725fW4UAVDuppuhJA0j=oSX`*PHsKSTux8K|K@;kZTsb&eI|P z(J6gMIbSBkAdK7eH_%jE({2yUNex(inr!i1g}826&Z+4Nyo-*nQ=Y()Khv za>Tr?+Av)q_r&@@m5B*9TSUgOyy-ep0;@g(gi7k6hqrCO@cOrOrX#qbIP1zjSetB`qI9#r_!vyyAm@Vp8`rqv+0tgC?Q~>u~jmK84!m=k{$hpls zD1bNH#)0#d<-`7Gg~6-HrmqJN+(xP^ldPhy6$iMYwx4>!oEBfyhDBbxB8b~x0t^YA zd_NDr0MZ#E?8JXHRltBbnL7i(QZ)r)+k^c8Pgi;i#6J3;O1rzd_HHC(RDNA*7kU!@ zXM2$y^eky-Rxgb^?&BRgf~@d909s-5bgxYYjY?0G!RQo!<<+@jSba z=G@)O)4fcUy8eb~TOhg3&d#naMF{Zi>MEdd6347Ic-(=ZrUlpl zAfu-F%&W0IHG3RH_HR!=;@R2R!2|@zkVs@(&;gUKo?a#ntkBWHfr*QY5U^Q5T2_xZ zvWr&C?L)-b*_p21ka0>O@az&C#&Z>tSjRXULzsvsTe@#~IC2%lERwB#`xg-kGsF0P zDaH}69)G$w0ulwJ)YNe?F_3)eU?_lME73JnQPI(40000h9sKmka-RV4FcKi%zy!?J zMK(1x`M|#9`FVRE_@m%p%LQELxVsLg)c=D%KNMtT88G51z$TFsdy|_(|Abf!nqnSV zYuZ|EHE`ttv7g=5%w0*6`l|iu5U?>PDs`BJohIZZu2ZH0V2cx1K7eY-yKT3wGsD)) zo$M(`TD4!n<@*!CgdjOTr;9b*hc70!0t>+rh>lL>3 zG!B^DKA-CxS!L`6knyZ!@~dkZ0jP!CI42HVXcDjij53tK9Vw-SxW*5$BNhtBf`v9$ z7J|!|*MQD%06b{vUmtAev|m@-s<{mV$e?aYnj!d0s@gkcwx?5DLCk-rDzA+Hs&z40 z?t%tT#WNE~Xk;+Lz_l=#w0869rnsH9Kmcs?AP}XSle-ES0gWfpw<@#>4YmsaeYXnA z1!DYxk-UE=`bJSciR@$0XCeSaAt?`1J=zWeWs$tkX6fN=XIVa(Ryx)MisJ{`lM~{#%F9WW3*#jTxe`@Jy-=Kp#{Chv{ z{hMc<%TTpn(GXm<>he-ewb6BAI?1K4FE}s50pj=@`1a^)4#1hASj57)p&z`IM+BlC z7bl5-lFk-+@^iDsbMXojsCwv-_2GljI%%y@1GciVGN)pRR-x?H$L>z$Toq(NG*D?* z2IY#0NJ~d2030{;sl1EI{~K9if{`7qv7DWLx>Nzr?!WS>d@sL7;u`*rafsYem$M%0~I&|+Pl+1PpdbZeV&$`7X z@ZuH3BtSnO0eQL4n37JP-1SiJ)u?oI)E*1Pm-1qMIh@$*^X$Bw^4{KLw6wIVD_b|& z97)96fA`JTO6JQ%t?r}jgO0#=BoG1|Jm5&O4kg6E%~!_%;*c-z zpa$w=RG6iBi0kMhy^d~1)pW|4EuF9I?fi@r(jM28xB<22OM-h(qA4uK(Qg$MEN=Yp zol42}y%lWy`qd~ZGR{(5+NC+h(25Hns3!W3O*aE}W;l*&jT4wbBK94(!+bZLA9U}6 z40a95FB*CFOsTZ+Qxew$DQ&Y*N{b7km2FcJ`X2w)tuMOvpA-In3R?z-g-YLi&a;oW zJng;ILk&Eys0QhW9UY7fq)XdrNGS(wc{AcdzmXU4J1ebJ=(^V@8gHU)b#fe`oWjF_mc~ z|IeK%Q7AuIG00)~Jx}C0Uljg`^##I41tk zw(Ys6+|ewSmI;3l1~~t_?fnQlE*3fqJV$CVh{#rXBl#h38eM9r4;KN!57MR3uBs+R zz@0$%7lD*J6FSbFn^+$8A!=mQ-@!l#H7 zDNr8b!AF=}m%Lg^P$W!|mJk6_G)3uFZ~e>AELKj~uT^{D4nJgP2)^NpJ+~TMNXyHK z0W}w^lzwbcxerC3(TVAWf?}iU$y8y_;>YR0$MLP>s%63&go=a2;D6aO zexf-J+k5#Lmy?XS9V5RBfc35bi#&&3v`f7Gs$ZThlH{ro%LDie{CARwl|c!LrI@6# z8ri%KJ7)mA;02$N3u1gEHLd>&Q`{(v7QwZ)xem5D$lLigKFxy&aXb`?7!QBfmZgQO z)OGE^QrqZ@)ooRU+f=|0RzJE^^uK1PNS4BUgTf)l2UJPKf+`8|;GDIs{*e*f&3A>Z z2z5I}nficI?gEIEiCeB3HiiYJknmsTlzz4;0R=LrF*RDYgSbK+?d?^4%GBA_$FJ%M zoQO^AvHhUc4^wb}6QiFczmTKZ=C{W7KNSx?DONAj#9eXH04i-m-pUsGV%nQuKgmY4+IUzDxnydx#)ZF1tq368r%vf~LSL5QbdYz6%)_HNOdpos?MD8_dhnRS|?-FpScY^xIuZ9z;72gQ* znRm>*=o8P=#0PF8hX+apn+0}6DlL6Zrrk+reU=>Impt}XO-%nh2GnTW0Cf{Dzr0Rw zPr7>-7>A)m^=X8>C*&&WcCuw7>~>!CQGeR3Fy>Phta9532<)6$_78U00p8DzaYeus z8TFNTF2If@iGRafyjl}mg$2+N)V~uF#Cci?F;{z{3#QX;Qvw743uCN#M>>n<43tuV z;yw}JtnmoYj!X!Erji!4)heK@?0?bkg>vk5Ot0woR)BtAJvmkU*#yHsuOJ@!n_u?w z%vYUc{J>8o0u}X^DrP4o!WbXH9hI%6`TM$R?(7ifV&-373dQosT@Y0ffX!EaZqZ%s zCWJd728R1%!Qi5bT@L$SO#1TrkzAGk!=!K65YD7O1sOfV20Y+aS02)IO^O4xLgQFl z$$?Lk6)~gC{zl?a?o$~e*XUY`UN0L$+U4Ed7anj>1}Fe{oJ$~*e*qr|7G{xWQ&9Y! zp9|TGl^>X^Ae*Y>Tk1YK**DVKP4Qiqh9-N$2fu#XM)H)6g31Q;K`9V0oxlzq#$8gN z4Cv};Ga-nTdk`NXsMpulc0gT90)PYTVufkO=>0bOF{*v1&5?V+r580PRLs2?V7JTy?+Eq2mh3K9PSJ z@&5>4?QgX-bD~LPPYlMT9EM=rCx>$)c*G+^CCVeG>POPvzuAN<4M#UHjur_cWu-JT zyWbPWH+#GXGBPM`(o53}-X|Iag&xB1;|M-vfh)iGW5bQb$`O*_?YGRX;k?7^)ake=U(1e zU=#8yoyaY@i;%=WN4VcK`qPs?f^s%P!KTT|b(u0L;?q`M5kRznx*6vTBIAsOm8~*-DR(ixNzL|I<%`9!BPWP_@gTh?{@_ebn zhmx8%GA+_*TIu^mYO*h+kJd@P&vgy&Y`)f#i*L0IQPT^v|1r}P;!R0kIe+LWI&BX^ zFi!CX%ieInFCY#L3v(A0W1C{T5+E~b7iAzou(HShc%NS5`zK$1-djEB?X5bbk#fQ7 zG5X&4r_)YI*j3K#yc@L-qn*E$>5`Taet)dy!ab#@wOFduB#z4v`W?$bS4?Zf= zk8D^TSG}7%bCtdEYyVk$U9r4}0~AUguI!ZET3h@M#J5)nJGXHoWa?M&+Ry`OlX?PY zTM{+4{tvsV`$+lskR;D15r0xi`sQa%sWBFLYc#>C;^neZRLl&yb$)jwLa+5)ocWt? zai~#zv0s_F*Q;F88)`#Vz!G8$6yS$?Yn-dZ#@SiuO$J$7;QlLtZ+Qf9aRnH_K5dxl znJ8g*FnYNSM&tcQO~0|aE_U%Cr-|C)*wvGdWa#48XMb-36UB`GVWy|4RYA$gqe1wo z5ODL?*X5eqg8qPZ2nbsJ5m-1A;1a7n8qM2F1VVCZ;%TCeEz0U;w~Qyee_0UKgTI17exd8=kWd~exdoOuHCIh#WL?VV*-OO7_0nD1oFrgMmKYt;* zuyjfe{C<$?!!|TD3~eR)b&-Gu3E95ZNg@IaTh|!rtp4imJgV`kAnA zAc5V+fhF^mCB6|!txE>IP~E@J#zoFpU6~(?|`VqlA2{?gYO>@l| z5I2;3vH-q{H?;b8y4BC&jhJ$vm-{nD)k+ZZ3=mMWAdf?5FP5P#FaW}=raOlRO4@`a zWIzEoQb9L!fMdMuR#n-Og=)L-X0Y>7hT@XOH?|+vN!GBy3 z@r0B2_BgWMeptClF0T3ckE@A4q5B3h#%)Ac-2h(%tmr)!r^(Jp5@Gx$&=R{4A9#y3 zWE1MtukH|OpxR?{t&n3^Jlalbomm0D?^clVrK?d~7@&->SVp7*(a zpWpZQeBS42my`OBQ4em`>`HjH?XH`u;Mq+3Zyk_|nDu12b65t5v0_cq%YxMHebj1$ z)qUwmT;b1J2N=k=KeQwln?{(RK;w12$u|hFPg>;UqFFmk;V71p71XU-^j+xJjBhXl ztU9O1{Qar$YF%1%S33exCkgySjr$YBl}&Pef>}~H0d*XA)}idM6&`NT`i0eecLxvE zcNBkcn!=I8J><6;%bPS`o(AafSH-5~w`#e!d1lb2GFcB0 zB;n4u#BvwyVpqS~i|i?fZ!PpQHtmzH7BhjtpiAHM#*2KA!gvthUjRFCi#D6V&n}WC zefWfs*Yv$w79}I&nGF`m?DF^&a5Y<&A0X+B9;PibGTuClfIiQslb9&iLJ1dPW zbfiPt=)56%kkrD+Ev!EUynMGky9i`adrrFoO2;62sXo-`m(7#{mEdkatbDF!C>K$&<2_9yq+k{ z(H2jQoy4<%nWZla8SunyNqz&q=L7HxCK{m1t=`5|Cs>JA+9jsmve5|n(KPvJZ^WK} zPW9tb9ybwi6VzF}sffI(GWxK|qCqx+t^#mvd=}*tD*SJ3@|W`c!Zk3r)M6DvBxJ6| zvtGIICdOFeO3RYU(!JX0Blr<2hFm%?S5m{dd>xYC@N!%^MuMR+)UCC)z#1o^(cejnWNNugw0abZe{JaiSMxTh*2}{ziSN6PDR?$Fm zz(36t^}BJ3`HRC@RatlrD-T#8p@>RW1*;+F4|DPl81VFe!hmidP${?rj{-0O>wB3% zT}@kRGsqG6#Kj)Qu2b#$m>Z!uN5_MS?e;Hd%=gysDldw6zOgf=9M~u0nE<*ylstY7 z%oBGUWEcL;>{*`c%(4yiG5=|GOf7Pa#J@MVB~q&I9L}*!auR+6Ow4?}81Ua71j8`a z(_&%{RbazNbULPSoiIG@p{QXQvG+)@^^AI8=}n^hr;tEjONp&)>f*L(*pL_H^%A&d zp~DF*gX_z{rL;4M-V=hyYAJKop0&~NrssPQ1`8WB`ie)dl5=Ms?aqWOi4@Fl zlGy#f>A60X+l|mJNaj{%MCXx)W^r6_C(J3Xjqe0XIS^Nf{(zKSF(!Mam8(lCtAYy- zl(t>%q%q#JGOj3c2VmkkC78#(dfL0&iWi0qGmaKUVQJw&W7e2|o~D3jroH%!M2gAA VA&jdz7hHL^#h&PFTVdlL|2uzjq$L0V literal 0 HcmV?d00001 diff --git a/v0.20.3/img/simple.svg b/v0.20.3/img/simple.svg new file mode 100644 index 000000000..2038847df --- /dev/null +++ b/v0.20.3/img/simple.svg @@ -0,0 +1,3 @@ + + +
      *2
      *2
      +
      +
      X
      X
      Z
      Z
      Y
      Y
      W
      W
      Viewer does not support full SVG 1.1
      \ No newline at end of file diff --git a/v0.20.3/img/target_transformer.drawio b/v0.20.3/img/target_transformer.drawio new file mode 100644 index 000000000..db89e444a --- /dev/null +++ b/v0.20.3/img/target_transformer.drawio @@ -0,0 +1 @@ +7V1dc6M2FP01nmkf3OHDBvO4681md6bdZibtJO3LjgyyYYMRFXJs76+vBMJGRhiMMcEOeTGIT0nnnHt1r1AG+nS5uccgdP9ADvQHmuJsBvqngaapqmLQH1ayTUombI8VLLDn8JP2BY/eT8gLFV668hwYCScShHzihWKhjYIA2kQoAxijtXjaHPniU0OwgLmCRxv4+dInzyFuWi/D2h/4Ar2FS9L6mcmBJUhP5jWJXOCgdaZIvxvoU4wQSbaWmyn0WeOl7ZJc97ng6O7FMAxIlQvunNXLN3367+aD/9XF36fa0/xuaPDeeAX+itd4oBk+veHHOaL31YwF207LZqweZMsbx/hvhdIzh1HcdR/oCdo43OwPHt4iua0yB7Z4ky/Qf4XEs0H20roPWwLb9QKKG9YmwnNt5CMs3HCg6Ur818SDI4IwTB9J+0JsxN3LzHBBs1z49XwIcACdBDdgCQnEUeHblhXPSk/VhDfVCNywcpcsfVqgxs2F0Quc8koHKICsJp7vHxQB31sEdNemWIe0/OMrxAwr/gd+YOk5DnvMx7XrEfgYJuhaU02iZRitAgcyGii8oT6DpeczRZqiFfboHTXlG1zzg9NdH9CWNwxF2dUlSzbOP/YicJMp4uS7h4g2L97SU9KjChcCroTWhO+v97pijniZm5GU3YWAa9lid+893ekGZ/wJ7FcLyR8yLPogigSMhWoJCJcAL7wgQaDCAJi0nZKAk2BIbDc5GiC8BP7uBJ+ydZhWOnu8SEaiEATSV5gB+2UR9/iQk4ndDi9mv2jjcUzBaUwPYfvXUr0qJ9xERritC0g5k5LKHEhtTiLoiWEht2hvU5NYxIAM5UAUJnZy7m0YJzgHU+OmVWHII3+uxAbVoIWpCbygxvW3cY4ZlpknRlrWOC/0G+SFFK78KU1x5qkE07eJYH2iiwDW8sI+kej65FKyPrpB+HZL1n/W0/T3gX9d4ti0in8r37YOHdTxXYSJixYoAP7dvvTAR9yf8ztCIW/3H5CQLR+hghVBYq/AjUeeM9v/sFtRQ5bsfdrwO8c723QnoNV9zu5krmK7+8vivfS6szr8iDMb0Qv5sEzestwuE6oHkJQ7lqzZjyIIQx8Q71UcazeOh/EN6mE1mRK9z/fsZpqqOPrSVDMvUoZEpIwLgXLSi9SFREqvKFLjTolUis8eEI0DwqgIiFGnAGG8F6vV8CB0+y6d8LFo3t7cB1eLY4vnDb1CDB3PLg6qXWfMOSt0uRj0aGpZxwTwBJSY5aEKWQhauxhMikNt58GE/oIl64hgFrEfL6CtRIlNzQAGQTSnCnbUBe5BVRlUlnIAKuWtQVUcADsPVD10GobODhdHzFa70DHLoFM1Q1stwV0zKKnI0LmKvGBREpg8L55wKKrlnlbz74A95p4XPfgG2dZcBno0mohCbeXZtivL0k2/FN20UvPffboVMeKEyRTnMWKGNj0f6vDBNDvHh1LPpft8aNP8KIXA7/nWPb6p6ljg22j01nwbFaeGXLWAHs0ysByEunQm30MSBvFQUG5yslW5ToBeCJD6wfBDlYxcx6YEkEYDgPz7nlgr5/5H+DCcT5+N4OuT4Q61XA+1kAQobMlsZF36uo3nefmlD8iLubabYXCQvDts/yS0z6866ILda9TvlVucJ9lGKP65D8W3Ox1Mit4bGGPddkgjIiBwepeyCZey1SGclG3Fads+9tzp2POb58L0YjejUF/i5h5GSXtL/IVjVj4+ts74Eqao9yePdvf9rJZb7RpG+lKjAEMEwtjKf34gRUITowA5ErTe4azlcL7PDxBUSwTwRG/P45Tjt5/ceKZ6lU9yrDrLUW98jH4eNPpvq/ppbfWlbTzJ2+Z2pe0Wp2X2H1ddDQGsN7ftxTNkzo7EpGtMyGd6zEqBcJvjkYksptHqeGR09V/UaS05dPYKv+4Qd4Z3p1X17sxOeXdp6PskoAht1inUmN0fBlT9tqVjQBkVu1EdT7z/hYEXlGcl+rR79bT7SGLgLpZ2l39rVZoju+x6VTnfpwr05/NJQ8tCLdmybdnFsgpAfSIRm1hDa5/lcLchxMNT1q2SOYy3kCC51PTgg1kWlmSsLV2gyroYLY3OuBOpz3CSO6EOriuoaIyqehPdCioaxdNmZ1IdKkyblUQ0GtNhXTrdYYY235fJEponaNsBQyIXhGwzxMiGLBhVNvLdR3/+XBEWVBrUGxFnVDFulblm2zkJpUccY2aMjWY0y7DE/L8sd6errboSZq9ZrWpW1TVpOjYCMoqnHp8mNvVyFHIBksfgcppUHomr5m+9L6kyD8J6E1OyzuEu+CeI1fh8EEonK9WI5V5+xslJ0+FuJMZrpl/+tBHjlUKhT9k3aKmkLWzlDdUxUrZvp4699U3lO/u5722lK001r2utTn6vtaxW747XFrl03Z8rU7kjyxVdVQQh9p36GMJZMQSZL9ZUDIHu7v/jR/J52v7/puh3/wM= \ No newline at end of file diff --git a/v0.20.3/img/target_transformer.png b/v0.20.3/img/target_transformer.png new file mode 100644 index 0000000000000000000000000000000000000000..4fe98afc6e0266d49cef14dba0cfa7910dedbe16 GIT binary patch literal 125524 zcmeFYc{r5q`#;`7k+QWZvSdk?EQ6s$GmP0RgBfEN#xjf2%zRvTG#2F*^>^i(_ z%a$#B&?tSgEn7I^w`|!yz_kOMIk;n_Wy_Y`5MP9)uNTXeOmW#F4bl5^B(19KPGk5= zL-eIpRS^^yXJ0yv3Wngk7mY-ACA*ORyr!zGs-mc@rKk+Wt13%FV9IKnUm7RW)ikaD zyx-Byh5CCUBU%8NLUEK*H^qhM;)EspZq5RMo`7h8)3R6;-pEi|RS%3QWKS3H59#9UN!$Dh ziSFosf=mOqtEip;3j?S0-03tgaFckjziRO;3j&$<> zc-AHfTznnf{@gwT?PBO^s^xB~YG$PGV&Y+f$D97SZKeyIK?XGYgMxoOdGp7g>Il~V z$J97LUqzKaCxKe1keoG)R1I*-DpW5|SBMEpAI>yU)6i7$B5630tjSKk7!!!0hZ~!1 zWo2Y$V2NQs!N%cCxDf_v=1w*;az>cA;E`y&GL=e2`Kr)WU`TMCYOe0*?CeIuX~FcF zrYJ8z3o=m zH;J#W>JGluV02Xjb1g#+FJ~_}hTx7<_aI^{P4wJIj@D=`J0+MITF3tRAr)z z2?c76an^!T-84*~21YOznqtcIqZ(Tp8bcr&Q1A#xAl(L9mIe?FS4Y5X1D2m7PT$1^ z280ey_9dy&;GST_bU~p^91S5VR{jR6%1}dVAAc8XBZduUp2CyqcR%~wN~?FgDa-4YL*CwGD=mMPQwxX4V|=1u~?**m6t!&*jHW8 zhyg+AxoHsSnkt&+`e-$@KOU}y0S{NkFj&SA!vGW<&QJ+Z)3WqOJHzxyaD9@So+%YU za6vgy+%0kDV4(&U?)t74dd6&jXK!bQtA&pni=k?uZ0hH%=}ALj(RvJwHYP{eLWb4tW34W!aP;s zXeWJVbtG9&6OM!W!8d8iCRw`^O{jjF<_JeJ0){uYP}$ z7gN>4!}ZtX!Fv1iG>hMvH|oqmzC0l-<-l)eW&Igu5@EY)B&$!9dlWNl-Vo zVxeJ1>I4`8L$zi@y_Bsm0d6P)cz}tQ1rdctL%`6F1#_~Xp^4slPHZ<8lL&FPQnRLb zds59PbfO8cUk1vX3TJL$fQ4#e2`VggA5Vlb)CFUOQYDl9^@*AcG`Q~R;-R5#fMw(8 z-kUo>SsSVV^K3xeWG5ctW~oI|^Cc^jshV^M)XD@!!m})`%rRt^6%B@il9)_9%uI{z zsu|#`hbDrbKv3#>riMU*;qGqk7H}&ca1Q1{aB(rxVuB&Vz}L)F-HG8!qWha@Izv?< zrc|1-g;s!{x+mQWyQxZ=Ofu8eT!Rrn*Cgv3IVqbm)ftEY7lyCDA3}qM^2C@zDLw(# z9&9w)l%;73RUtS#2Dq4Psx`t`!waF}Wro!X07tG2Yi}PP6CB;#Lf?oR zmWJkJEpr1igbx#IVCaFTQ3zy)rV9eCtncsRh9(;zsW2o`(?El1p{WN4WPr0ZNScNQ zG>9+M8Lr}DXr;!a>N~0E>BAv7LpKc{OQNfhiMg+`pP7fUBhuTN<&XD=W6Z789T8Zf zsk@^e%iW0V33O8vi2%!@p(z?_?rsJ^BMde4e9h1l;{a0_(iCq_)uTEQ;4bQL3o{CV zLPf){enekW3xYFDpMkT`S5ZdNoJ{oHNvHq=N2szs%8+JYq++T9C}W`Jb=L(e2!pH@SV4LBoMno*q*hAS>gQ*WU)@P#JRSa?7uD+U%#yCwZ+7bpB zWOC?0OAsvf{y_&XUmx*#nLi7*sHnazN+5q?xtFNm^{Ih<)=Nz&?F2RR-c&{k)&qtxLiqc7U|k_AyqTK{=I!m|N~Ad912BM( zaBEd>7iFTis--GQg+$UfMKX+`uD&e1v1b4cittAI8nKZijR1rX+!@34@U-$|c>AJ! zT##nUS{7h3f}^>zItgcm1^NnCGbO26L%dZiRiF%(iHjx4TLZ`Nq`};MRmfC=m$9-d zoPyW#^|A0Ug?n1~sUS#r=%((uTOoiMcLpICoNflya>7H+k%n%D-qxxVh!)+#n&#mL zF0nK$@KjfCf4cJK7L75cn!b95BrmoTiE6-b^M<*aqs_cj-1QwdF9Yg0yR(dp@hU#5 z<|bBbh^L;Ox(W{O=BDCFqZhQDI0C+j`R;; z8Ih5OaCZx1rXk4;SYE(*xFsAxw1CqINR|QBLf^oT;zaRM4e-Qx6P%r`R2_{xh*rvM zsQ#wD8)`C08ZLCY6V2S(N6VT{^d_p{Exb)Utn`d%K2(&qrk63(f^C38!#x?UZqDA` z1QgASt`5RCIEYX@+4`Co4TOpbJHQXINi;ndw2G^{CIUgDVzr29bqhcoIEz8SY?>+q zKa7zz-5bz-bCteulm~@?b;oOYLL9MPo`%E#iav?JBG9dw-f9|JS_B9MI7|aSb*QQ; z1*@#ALRa;WkIqqHFekWhG-y-FxEz<=GKN< z)&zYt$`86}+&zHVH^8W?GB>S)x1akat^AEPgG!RjLeaEd(LdxI~UCKVJTdlw@~|!Z=3Hr zw>Ng^Tyg25{rZA^sxn%pI+;@1EHmpLjnt@OObofnK$Dk)q+9Zd#rE7$9Hg<&*oYLlx-rf8!)VvTXIQw7rE56m7LsmAq z{>96GB_%kyFP_EsUzoss^XM5^^S%=rGXG_Ez~XcNg$bztU$9tTaNG8syzwzH{5b}~ ziq>k)H&yPEn&S|vZfy!yWv+fxkD59^f1mDG_5F;i9try1=N2bxdbIV+UKzs}#MJ{< z<>!7)S7tjr4bp*hM;|%Wk_>SxczLncqdRX5;yilhKf>@V5wLJ*?{6mXqq(wBvG5{? zr!$@I^28zqy@=hh@bFz#zBR>1d_O+tZcaa4J@_D6MC+&)WBg&wP&W49A&BcWLiJR4 zeru^qtMSZ2}NiLvCK)ic>Uj|LBizpDc>`#B^*Q`_ImFsFX7PBdlC} zKte}uH#+}T9Pw@3VMttO1b2Jt`r0q#huqT)IfsMs9&d9cT9cG2Q&vBB+~wSsRA`Eu ze|5Uv{%>{D#sVF3`&sJ$dungtt^QmXp;usS*=E@os&EqY%<>BuOqFt%F zhHjkFQnR&wUlIvB`8(rejHWuOZhgZ&rjK3ukTSWa_oQ*ErsxsLQ^)?6^^iIK%*=hF z0osYbS9~<)LTqLj=IqsjN=(fw#kDJ#9ekpc|6Bs~HS5hcxyp|dE%`r8g;rzwW zQ22{*XWS9q$fml<`LWi#JcpqgwluNG7VPjanu>666K~ z%eMbT&yXgg3!!GCwLv1M|FFYnwg`A5aF5R|7YX`N3iU4Ws!|6um*&$?pUd(dskbn? zvR_^%cx@?3!ScbGlm0Ulg56AaK9RXrXjg6VzT7iM{}{5T#2I-=%{@j65cF++gpPG3 zRZ&=j^1ySasT&|soNjPXPF{_Ko51@oiV6bMk^1oYKDUhUlS%Em8@4*@Kb)^V3txv) z2Xcj;QSvKB%30q&T7}(Is{H}CuAtn1Ki+%;8L1J_a8vi^=w3Zz{dS5IVQOx zKDEcq@1fRKmh_&;d}_R286Xz?6u84smA3TL{d4aQC|c)^&hWo;|1CRbwuPX<=Fi&u zx262SVR1o0u=#qmyZVt%(EgKtQ(R$tg=(%}IjBVK-mz`3e!|Nr;r=7}2jfL`&dbh) zmhSfw89}Vylg1{ZA_b*)-zu;wGnncrl%M0>^*UgAMsYZBIn8b_>&xh?}_H8G+?GD8i+dsjZLkL4f_D|%* zf_~h_6wDOBejGmOoBRCA{?-|O)-9NCr|@%CF7!bq``rus+}hIx z4m4$>`d+HQAFREQvAMf|mBstdFAFq8HzCCWr2yH8#lqZ}+Y)biF#N%QLZmC2+HN2A zhtDZIDYU6o7(eLK&%>)^+@j?ICkDe@^$xE@kvY48&k zgPf?4{!7U(rve*BB7zuhLOe>5(n)#g8weJkdpa>UJkAF76PR z>@v|MDAnCX@*sQ-aBwGjpln91L)c;nbcSy<4hQhmVfRE*8N;gIHv z<{D`Q{_E>tH8DlQ%j5GAb^jS#G9b#9prcwYzcrqV0zCe-buRDt*pRvfq6dc>cP*jDUNX_9SxYofcB6&L9_j>5r3vnBK*Ed9QG z);Xkn_QK+HKU6R0T;?Whs)ed#C4=>&vPs=-yF3awKXw z28yPkrk>D-by~3op@K64VebxE#U4d`Et2HSM%CncfBwtC@&b9O-+KL0+@FE#8GS*n z;>@tDwz)eU^9RVz{!+5j!%rQ6>v7Y3AxFr&DQ7o(x$Eb`#Mt=jTM_6qiSuW+sFWT# z#Y8p7Nly>a5jeP}*mxdv^duaWow{TaJDJ6r2pO!N!P( zJnNFeCMv{fF()~ET6$tz*ac?QvLu34%E8+h9HDUFh z{DnAJfkD(^rrnX#0r;2)@0)gkL$KGfhTI}~gl!;-*%xnH`&B#Uw?=FOxkL7a#{;PffBJWl z%m-|!*_WO|m#Me+Ei_O0O_7pVr__e#cEe{RxRANq+&n81x)x_?1CLB&;@|fGUA|UB z#WhbSIRvSQ=+rN!>jZi%i55i-nZHU4jk~M=K`6uYg_hS@MVpFv;D&9Y%rm%-(13Y= zCBLoF_&1-1q(LH4y~<|()?DwD3rgG=5MJzOefwO9zi$R&+5T&&Bd4#f73iHpLWA)Q9u%z7PvcD80ldmM7KUTkODhkU-Rq_pWD7zi-Y$-w;U` zz7s1c;Q8vNN@o?rE#DbPexTK3-}`pKT-g^jIj_qqZp1=FF`t`It18=hh6vOBm6a#M zLgNmF{wKu9Jhqw2Z~to%fVncV67hw=FO{DEN$=u7MvENJ2wJ({?NjFZIu4vpp1}rR z$PWcioRD&UdF@`i-D6)e$N{7DuZs>&y(_Y--Zd|$RXSVytLFQLM`MH}Ieg(&xx!&i}QBt^FWe;%F|5s__wVOap0(-iSM(v_*5_ zk)^FFAont5(yHZTZ%dBC@`%OhUG`>*)(>%!b*A^*dMdp7T@%ckzXuXIq!&YWF^p2w zN?MWo-z#2QCGb}+5N+9 z0-&79t^*40y^U$7{0ru%!q6+tLnLyS;svYi(ai9mI>uNakCX+$jLj z=TzaF_m{rB9Ig91V%GubN}P#n*^aj{>(N*Qd^L!?uc! z{`fpHE&UZpW;s8;0St_P7kBy`^SFJ@BXMm7Kx*UVPaX}zP--sw*(dI!hr;e|yotaf-47cF(Sx5KYw8gQwVRu79P1Y11Fi!Ee~G|+p+VBM ze;EYeC>##=sIBhe-^nuSnZPD41pV9U{A2KcxX?cVvHyeq{t6WV9{7LI-)}wtTW$Ui z-TgcJ0fYY!(A~=vj`Y*P$4%0o;u@llno1eJ`p~4o8J)Dri1X3FpJY@^iSYmSC%@hP zg;U&%H%s*l6ozP%^KCT;1a+Cd{^_BCo00FCX8`)%=bejRm_BM{e1`|ouL{@d{XInddJ?@jD@c_TPb&rCMs>l-}Ha?rmSGmd3@!}>DVxzyIB7H zIyqHk65Ky_0ZU&?m)7L{pBw_?N&(G)Zxx32e)zeKBqax{rT(e_(fjraPZ}6@d)Ts{ zh$691O@Dh)=Esr%&V*4V0hU z`8>sBs9pX=*|CFM$DNbJ;_Qx5xt@!>4!yerec$$YR_{*E!sx-^?}@u@9+@yjpPpz` z8Z@1CF+Svg8AY8KTgXW~u)mBaTD|ms{`KaT;Gt{6UYJosiM7M~Ef#)$yS}}2KI7`t zn}_I6sHX;qEW((G1*86)&_t?H&gjJ0rO`J(`FVEM>`doWm5%Kq&bGMu@t{T&(0dx^ z7k^^?R^!qpNXyN_i8?J~SVcssOhQir15@P7J;)S$APMNv4g_LUtKw_&|JLniSdJ-P zd+Dnc8S2`<-bK%%a+c|*M66~H1vfAAd_Q@5zLRa%G8NJ-ZSXL6>ZW#D@QsEEz1`Q9 z5*3qI)$c^_UJJ}vOsVY}(PBmwNFvH)Zd@`+ZzV$fn`Q0HeC}Qpry9*#81HhaI6xO) z9i3jiH7HTlbu5l@e{WOVk(bwM$!7YsRNLqDfu&Q|vsRN@McS~zp(mvwE%i$I@6s5b z>f}4AuVk)@kGs!n(lR2#MRt*l9fUN1D9yX*F5XGaJ;`rLCl_wm&5oSb#_ zY~1SO^{@n?MeOqYe9*VuapK0l-Mpfw3WEl6>z|+H4}0l1p(E%ugF*E4EX4QJlGq|8eJVp|!iyDS=6(mb|;nA7wdc z@xa3e5*M34$_Y>>9VSSY`GTEyjf1)m+@^MTm{f=mH_~2pW>GAT-_(lK=Zls*XFM1$ zaCN!$@mkWf<%jTJ*E7(G8VpW*;}pkK^{Y8VQ35KZc5IGtNNrY3eu%%3p5^$%M*qA$ zi&xNlv%)~k{U-DedZ7@RlMnn<&wWCxk3Zm$_HsI&^7`Rraeh47Zhi&e_4$7&HXO3&qVCx&Jyn1 z^h0rtPiYSGB#3@y0 z_O168uceC{#y?`?SSksh7uTO9V1!cUe8^;5nf&ah(x=)n)3AJn zV9BgK5`Gn~UtVx@cv!UWik;H=bQUch1I*~Ito+wizY(6_IM0PmoF{8ct@Bga{%iK@ z9mbTa34X~B2rm^*W%23}D&(K4C26WST!r$7-YY&`w7#eA^Vhd84>(SIT{;(ce@P@i zKU~8gB9CT$$Gb*%qU=g%uHk{|dljLbuV%)wt$tFB2L;5UEosu$lYI3xa{Ew{z^n61!@>=hGd1)_rAt_ljqIvrJPQh zODW`ci4f(G=OR|zTgIu%%SsgQqx5&=mri~l^f6ZMQFH^0#&k>7SqIWm-!*+K%+7e4 z%+8!Hceo|*y6MM;0dCQ}T*Qsk{~ah2-P*eT>IL}iFh96%8`>9dY+ohc=R1TN^qe4~ z3)CTb?mM;~gEf#7aT=rYE9FRbPHq-T$j0ng^t1+SvMIdZEPI&FUiq5+Rm9XrRpz?e zq%e0=ERV;~c$X@v&c5|a|E2B&qt>BDKZXS+m6>H@PYw;Xplh#1rlnv*4AD|e8qsg= zOZHs`&bbS~73>t{U;inm{Q6;?=oR`lw4P{*Y(w_Gu|h~ zRWh<49R4!GBN3O=MN8~`Gydr?`|WT=b7PlGlw{K|Ud=`|-&+HJQE#c{Ne->$8`eL| z_1kVUonQ~W^z*TRB)>LEvT@j(;~zl{RVwq~nbhg;?X)~}`O!)ILbNWeB<$|w^-DKP z87wdE=)+QyNg^>bH!A!6Hu?`Hj9xTLp0?KoA;sv_k9&7+^_;~mIL#9-!Xp}1frDrs z{07a}?{?=Bm`WQsKGs}zu=LCGJ^5h+tYFmD%F%@YPJG7piFL^}*R^Sd9UbQmkazms zsU}Q~&nY`+HFZLUr_y^GBq&Oiq7zw-R?O@MtSmS7G2-hDkpe%ByU2Ln=D; zaR8AwgRkV=#>i~pZp4Y4W6vYNzC<@=)7ic6AB`Ospj@M$mD`O!aCwRp%1y)^w`{uZrjBnmHnmp>ON zG%BviJ=^5Kp@PyiMyACBzx&cJpz*|00qq!_AJp=J#~b?LJKhIKlGj2c>$!&&Q`-dN z+|Csd&Yh6sQx4-fS^AYMh>CJLl*L#Y2;~_5!%%>BmS$)%4-)Y|GRi-iOe{D=PX}&> zk_mg0X7g5Y$M?Q48*#cX_#$zX+nIY_91$jQe)d)QIr{W>Nx#Rtn?GVAMt!8suRyEP z;KDoSb?ms=vd5H$1*82?%Ua(=07=I|sIA(54f&7Y{P*FR zGkZZ$@+K+c#ycHC`+>2OUZvQ)GvipBB2 zlcYZ#>xElfwoilXzRwOT0Sq$s#xau?06C6=>I41L%RN~D_EJe#2=U3FNfz}HDc(3P0zTsi2f^0d^x_)49fEyJQ1Ib zP1?0ySP7a6FaRr)@}C*lMAbGsX8?92J_c$mwwLrLcm}Kwd6P0b3W@!K{6}?&3 zXuK|#1W^C`wX?%dY2Cqfp`6(OdNnJyKYbjRxuy8(Ef9;)_oA43{}k9wLeE7!10Wv> z!1Jn%al^hUM$T9i2e%+9L5`3G&_1&~{PD`G+P}U}-vE{CmX)P1E}+y-pJ+SMT7`|5 zl>ki`Nr{R!m;%cZR#iO=D7@x}Z@7Quu;;cpp~Ilx=9-wUJ$rAR;4xzbP~J`g9RVm% z0g~L;*ODOb(s2i&8oYjZFm zxe5yEg8TMmG4GXN)78jkShL>vE1S%TGf9;(&E;yg|Q zD0lwj3m!tmVSB7BHY%>t~Y+cw?=%NGv|H$f7W%2Glm@zxUIAiAOe_${Mari{-1e zQ6%4Z87ukeFi880Z_ISs9`jz9ac1W?I%x$iQFGH4$(EKoG~}mNB3V-0tH>>mB!{4% z+1X~o+OawttBlGC#QZ+*QJcf6q|2{Bc_nSz{t?g(q_F3(+T7dYYxf*71mowzgH{&F zRjhA!T6&7Aq;-SWZ9^|!Ot4W^t@*SQ1{fyI7=JZ&X=@b6YSfm&xV7mk0j@f>kkbtb zPYe`74k|e`-_2=PJQj6G(IeM>S9|)?_+#x3x!+Btt~`%CBvP_{ysyT1OF9ZPaU2X& z5IG(2Ry7?o%b-DXNx}#7ER;Y9*C^)4;rdZ={)7ZT4}^LM@>tC|Ieaev@!LS_jBD3dJi+s?9@g#o87qn0rA;KiWyUw_7>S~Bk;rHtn{u)wyc9%gQ3eMWFqV9$qsGg1#0t*8@7_P3|Xoat#V!I z-#>Q#%-Ols)AMVgPw2B95S|z@>x7tezxh)fZ=G9s50(6gI@XfLR+I|*`dMJtzVy{i z5a%OH<>+EfNRVonjQFvga-8Hu3B4T&1LdH^YF6TexGrH`%`8-~sW6oGJ?%e}I;LGK z+BTvhPW}1_^HkSmDZRIS3q!Y9b`RFg$9XoScfbDfXFnMmnW2L78k1cQVgMv$R}`Tg z-&!f!^5WL=CKhqU@2k+hvF)=beA=&-$S|kgqQ&QtR}UDC8HGL?<un(cTmsM>ajyB(rm*%%e z&l%Uh&=*AHW&+xoLuY^!h|ez_a(aGQ0@R|4=;$J1vFPcW1Ls@cDE7TRg9zKC;*`>9 z`s>9c`DV7}QU0fOYVs=F(-99@Se;_xGl_>Fl5A~~-l;G*L@<90IZpz;um z<2D1`>z<<{oZpqVvUW-A+xk>O-Scg3?D-!fm*jRU-q|za_bw{qlz-9KTe0(VA((ON zhq4WqUfJ`R@L>E!wF&9h+Q!`;Z~REs4w#3>J-;pi8)>>d|KXcwJC^uFwj@M&AHtAl zd=oj7GIF&%(J*%CwG`pwJ6ENv9vk!B@~~k3>AO3tm7eZjG290=xXE3g@sxGkUsNYS z>RZ8rHtfyyy^s`}#!|biWT;{f2CgmAvF2pvC-p^Zb1CvCt`R zO@J1FSLS7J_=c z&lwE8OR0qUwx2zD)4A?j6zX&2n=SLg&WW6 zEH-tFyth}~*tS;khJ|QC!$J7ySZgo$1;@MnNpTz{cU$LP{Br5|4xz7h9$5-_QVD5Z zRvd~exc|}SJzUx9XFI~GY&njic~oV)WPi8i#1a4L7f>CAsvxyg~mt+{?K1;+iPoK#&!JH@#X~ zFL*eI25+s=8qE-(i)5M zb-c9;;OlSaovSOQX%WvO7$aBHCAPF1Dn_c;_wyeeZ;#Ld@tA{A38?p4*q5$r)7&P; zO<~+6)4FSmH$Zu|K~+@trr(zyLQyl&KPEHPxKa=P0(kINP|a5KILdp%sx-@YvJ-I_ zO8x5VKyf#D)>VxPd6`OW4q?8~%TVey9O4T>efZXOa1%tt)?Nnyl6Dpdqo>n$PlOz` ziVLr5oi;A&kG8#GmZ4;7_YwB(ZI;2z*YvjW_N*%j zT3hjOR!3afNBunb^s%=^Q%Czsv4CHeWd)Sa+hKh^>HKoPC*J{xM%g(w%wFQ%BV9gh zj!*H8*$9{1h=@G*lq1!?HgqOyy6|%WK|aoSef6b|VcGY}d{Q&-IiFX#4w4eWq4+DV zH2PI7NP4haFlouoiK(M!Q=46ry=YPM>u6HgZ9a@kBBzbXDF~%OqroOAW~The#1Ny( zS=3$P0v^&t^#AZ|YjfJEO1Uc+v|omhnttw{h?hi0=icFKx_{}YV+fVHKORTkKDKS^ z!3h3}nZauLWe1wDAUR4%rQWQC-(#wBI7l9Jr!RhgbAEl*7hCaE-xqt&ru3A~BU{KI ztmEm5WzDAog#o6)m_^suF6*D8vwNXtI zUk7D_`MkSw17~>D&om)AD9v~wv?U=~94_%wqh9$T|E^bC`7YUcj3>BMx8=|6x*414 zyZF(|FKE?4?0x9>wEPKyu#1|W*o~9Nx_U%z1iDeJ65hT%e>my2oWGb3p?WO$e3*z> z3b}X3ZP})btV5#wUV9k7M%T_y9qvdyc7dZ9bVCqtUC5ofI_!nA;qRk>Ok-+UY5s$w z`Tijb!*P$m{Q@2pOLOX4!Jle(->mNcC~#4^z@MJ;&_hJhI%Z#cx@~8FSbbgVxwR0G z%(XdB{#bC;YZn^T^t3*4i*kxih){TUjQf*8lF{%}rQ?FN-(Bn8tND)TBpj3VUkQs- z(sP2nvr84~82$KkIlJMVV&a@i7_o4zBBJ}BN4MA7$U9quxp8`TPvhQ4Y z>SH=FQmb-CH@59$?C#r6D|;^dC~*i5{=6V40ky3w;^9aKK)FapZH7sBrCsgPnVy+y zg$$#>y?&w0ja5rxxTHr!jmekqc5z+3GkxU<;ex0+i#)t?)GJ$mxPj7X*||q4zu3~! zx#~itFFOy0m9d}5wOo0sgnDX4GfVk!?o-mtSi+_L#qvhNxv-BT>M3qqBDz7gh=ZFw zRRo`lAFiKo3~_H7yOzq%-IfH>OG1H!O-1iEzs!_=4(aog%bz>+e8PA{oQ{`24dk8y zz4e6MSr+U2gFj(2#&3dHy0Gt!Vls$?ER`O7U;Ns4!}H#$vnyz^JP*rH&`5k*+JSjz z!<9$ZNT_w0SriM-H}@dBukXupR#&?X>BS7sZC;SMa)l`G(y|2gjfXB}y`&}lv`Ovf%or%Y*0O(FnGs#Mr)o}zUi|gf=s?u3&zgJM>{9d_;xyKa*#&Jh^o2eg|A9buDu}5YVBDHdRbxtTo zwQ9DV@~pN$ZTUv-6^6>6FBi15Vjq5pEjHrEWjuSn_W9AF^1M{NF^$r4Nbd}KT!T?y zeGDaCS%e*I9E>`HR5-9?H$SeSC`DvNezty zi$z~~D$9nWwt5~i>p8pkA!zAt*}fN1wyh7hZ}(7H6FWtocp|}dXTrqx?s`R8yDB>3 zr0?fUxym|VW3%fA&-Xn!*mAUf=$_HJCyzcwoT*1TTx(qp7rsGl?jeRyn|-mo?GtgH zuB^{LmrGb@Q41XkS9iuF_kEObZNI)$AbQw#&uPumRV5^hfQh*G{^^RQmaE4esvHWZ zi;E#gOb(_p-z7I!@~Pg^MCuXi*0ysh54S1uoBpnX;L|lt{kSWlUQS-g z%o^5?m@6Zrj4th;ky)L+>?>4lm3G*<`qc2y)i)M6xcflm>#J7Mgvuk;g_(;UGw*&xOf4PHzmB=pu`hn$+ace#JLfqGxj0yMN!^C&h-$#%<@~&<=Bn_@S@0rJ zuH0SF%1k`&*HdV7%;#f`0Jfn&m~>LdHdGL=_2?+@sixeh%X^VEERu>;7d%x*@EVK| zc9jm^yE)oACrL^5#RipmcW!Fj$j=Wy`%}10)Ay@2Fp%%W&H)6G@GbXO{4`Vabkm?| zzWxsM0o}yS8V4vF-OC$ukHE$Hqz#pZxVOx^6|zKgQok37P~Jivn2VX1KoMvul{Jqkcq z+ni43LBZ-NP*-PYwa1^El^dR!Zs5~>h<^ne!mdu!BG~2b9dIL_g@)mWP!7-u-ZyT& zD{5(?i`l}*v#jN(rF~r1@ktSsl^|X4y5j;r6S{PRZXtTV!plw|eiO!Bt)0F$!Wc*3)~DsfqCeW;~qTY6z;R;1qTkb9Yr- z_|A`akLg)L9&X1c@05RC_Y!&OONnEZf~--pDgposcXvcQ@_v5l+#Qcwjjrnsjk&u( z(s=6ULc4CaJU|)~0e{b}JdCXf_79%i&M_D_y!s%^hMu+N9;W3TYzI+MV%_T;z1Y40z+_)Mctp1Hd5*}OZiUSMS% zlpc;RyfDijF!moW$4f{vplNY0bqmU`?HS?X!oCsKP_Vu>aQ6OO#BwUSWe^|^gr39W z*E08k^b~v9pu3*zV)fLYGrP>I(X6VS-n!jx;~s*LOt*11qb?7Ai8`zv$6gq}kLP~6 zv0l9^XO3tC`h4H!6HN$Q^)(Z(N_cn9_p(nxl>9d;d@XRLc6XI6MyH!^=)}%r^grLS z=U>js3==VU;Ds!1xp$GsIh4jfZQ9-0qx|y5USX{Q>ZYzO$H3{HQdLY+Qx^xHZM}6(Icqx^vv7qJaK@8?S^<;>ZU5hk2ca z%A;cN6ERKHn~hDelHF>`x{A@drR}YQpZu4TbOz%xP;oNj9MX@atA7=JKOYS6p7wXc zZJ7t-t?{o;bm=I%X(FUzjNf=a9(%HM(UVI%a&a8evUQtJkT`PrlFp^Lve3Aov=?scAjg>QG2!~wYJ#YJPP<>rMs9 zP_$k3HR!+K$t!-{f8JX9b`Ry(gZH7fScCB^nPIixehKb4P;Jl?D?%20@5gt$@a2UK zs~YDe3HRH9vv2f{(+ery`(Pm)?;`dH$Df5W47XfsNeWKeC;AmPs1x(*VP=@7_w$Pl ztBQ4*I_MLax8Enbj>XJy=8R8KVaS%&P!m@4-3t!mC}P;hM{hP zamhPEZ)TZZ$PMGDoWt*WJ4oq2wQbvrdAo`3*24VJ_-4Bn%amt~o#`hT8fp9R)&-^t z7YXi4aMO`OtI@9T#8t-CU-;OgRzsKTN}Tk|^W5+G6)oH9M~g|_>Dj#&{cGVtUJ(;u zVoR#}OKZ+LxQ$B(Wp;1ldG60TRrjI$vYhKmo=4M2V#yNaYGp^pFn({KA8IY&Drr!+ zT*)QLd2%kCzqxA?+WON9h!i$`oh*-;xyVisU z&%Yd`4kaxry4gh6S!>mrlk<0_aG}4Pbi9>7RM1ZT(2;eh#<+MOKsKj%=!U98&4G{j zjmM;MA-z`ef3tGjSr3P_Pm|(*$Y{SJ%7R>&^P(iv`!t@jTYfiq<-XO1; z&YFi8Wo5&8G6?F>u#lgOo3|v*ZbJ2?Z$9;EdrbGwlv;gOwPDE{Xg8=3GElQ%Nw6%+i2Gi!r4jE3>W3iqReA97>pH5B-S=g}E>S6|J zMq<>N3_rCU_#M->Yvr;Ul#ZaElqufod;B|1$BgsmBX(#Gr@*?$o>kc-W(~Ywsde)` zoxclo*N9L^h)e)^Y`(cM=5leEr8dER;ZWzqg^_zk#d}}w4AoMUom>e1k$*GG&UF}`17p85t}%wlD>^#6QK)fB{6=Ee^u-^TXfx!>{ytu=7x?eO|h6k#`X z)R#5iV%K~RJGW^IG!(oSVwMXrk-|n(pI`LHshU#ILXEX4QT90c%TLY6-M4mfHQNcy zb1w9947Ax}0`E}0VLF{>)C)}K9=_EMfq%-3)OzRsa8K)=tHXan&Cm=oF%Rc4aUt^} zX$aCI3e}!|U?H7_Ok0(^W+;|-PrZkTWJul9OljvU#Gu+w(rQBA7>hZH9N1ZNUE456 zM!nQy*X$ioV*>$YF>j1iX!RXXvGMsEF4( zbwz#e@S%eB1GVy0d08^w7P0u2EYZttc2AE1ys`XbWKg3pE$rCh7Dtif_peIcXCpbG zqffB0%86q&TIxr-YOODvESBf`km+c=o*Cch33aFS%ysd3LOI z*ZTQ6V$YE#&dja#cBLyw=8t}TfxgJlW;yrP@8#jNQ!n!N&l|=eTl2NBZS5nv6cqXd zmyWClYqXvGxzZM_+(yn%aS5@m$!*oSUgGfnZ?rl~qy{-IgcEsrOGbR9KFOz$O|y=} z_uuDVt8%US(SCZ(UJu(u`)J|%MJd?@L`pT-tXv_gP8gs0POxODyMVrdwQ+l`(^S`K9LBB%!}w*i*^(gWLtB_{ zdYNC*`fY|ng@PYKTH}56MDyiN{OtPNYGg6`jX5`L!&^vp&qfh1ElGmJG%5`D z9VSh>!KRa_WBB%)h@P=M<9?oQUTrBuyWc&*|AN#qp1jl7uQnL_zV3WW6AFM#HBewp zS)xCtOz8GKy^+Y~L1PLy2Hs+`7hIR=h^;YAE#Ws3gU`SSoaiUOai#)RO^(Er+u|=o z*$+Cf`^~`XlNYUaMPZCH&=`2AjmaxiJxG5Ru(ExX&iuHz6HYM#HvbWF;GJz zS)VO1rUDgzpm+4XS31;=%0-Iiz5m`<3P z=E$ECVR2Hr8fM>t4A>5OcayUOqfmh1v#-BfAO3y<#KTav)L0d__z10cw2MBg02XKE z$3-_vn!gQTt29~lqnyi8sB}2gV}_tpOZUBexD%!j3>`u5jEXnI(8>RtaI3}mWV11LML2+$chcUsqTs28_S$W9JwM(M7JG;M$tMc}>dA&|7rm$C<~Hq=zC3HWC0ZyH zdE=<}+QVE@j5Hrs9UaeZgKwlyUZ~f^_sdxW=^~XMRm4o=lgEk0gWcMZEX9&0`akHN zeKavF&20Of(Q@;uc+#_x_|F;o1(13(?Z5f*{=`JoGe@fRmo6qkq7bQ5qLsgrUq({b znIq<{>5B#|wk)*OsFB#zctHB?jvWg47S$fVbBNO!;9Jn9g&pb$JA6;e5Q!UqS9+>} zhgt2xxe!a$1OwH~J+W5&jpM%V&;yQQHbpniJ0doE(9B?GjHfo#u`eh6cYJMXy=}7_ z;74D_4*h6HU$YHJe>46;6LmRXOK#S}ls6CWRcz<$JXMW4$xpN+b+=U0wIf}a;$KoF zMY;|W`dNu_M1%(Ny~-|CZ~4{G$^(s%B^tXa`$(`r&+&MGh~OtV*#P<4c`@GUc(k0l zd7!#vZp&EZ9c-6IcB-PyZBbukz~AY@A_>{k+ms-LB90~a%g{unAN96z3CW)`7v6HW z-v?>^r`y<(13&?-3HdxCE-Xm~b`4^$CpiK`%ALkSum%?=W{h^f(XgAFG z|K0vr%OdOj8C46iE!Sq9zVEczZ|iz(aQg%=PU0nE-UKp zJTrC7q?8qbFDaNZBKc;!4vW42(l~7Hm%eMol*)$kjGkN4_b9=J;3J7NxgFW9ySUqv zyKQ#;R7@LESv&iB%!2oPU)j*#?SHDCHIKzWst45D{Iu);cm^}*;rsQURDora$+XQG zmMJh62WL@6L?Ed&1;X7&k0;AC2;Hv%%Z*BfzM}=6 zlw62YnIN<-bNx2vs;Gddm+W{~nz6xYmMityyvG4c^u6#<9sN|(z_>|dsDBR z|C3X$(-ru#$XvOB3)Sc2Mxzk;=xoTt-%?vvRvG1iL7ls+<5XR;5q*}HTNFAsZL*`~ zE)r;Cf*DYdL2_6MEcvx^MCeA7n~Ml%)pw3)0@d{F#C*8q^}qg-?bDt{e_OaY9M6o# z&lERaQd2h;rj*#{gknPf*LC@0B|S^aIA+LXg-&rsLAEd!Lb(l<4%*5RbW#Nj$2L$I z;CuX#7Q>vQH>5UV_*5m8RagKa#_eLL(2Rg7w9GO;x+td4jt1cr4Qr>lzP)OHd8Ct!2f*Un6TGImrO0Wry9<4m`NI$KDiMzqi`vmpIfrSIod0 z;Umd3Ta94MR-M0RZ@0$h`olWd6V9Ee@tN0nbk0m6`yB z@&*eyp@7d#*35lcY2NO#*oKb>o^%Ky*aH9sKT7(ME9N=&`ouF#_Y4$whM0LSoTht_ ziBd)CN+a61A&;@p7U@MIC&?+M%M9K$7#c%%`qB8^* zBeVe0@CcX-Bc1F>XDvIsSkG1FS3+9){UewC;RbFU z|Hu>w;(Wr8-Z4^&`)EJhooZDA{R@j`zvMp#q%R}+DL^we-|m*b`l^v21-ci-{ovMn zt%y)+04G7|^w|E-XbG}Pwm`hk zdAftboOs`1&*ciCJCLopN93ln&?6II;Glr$npBYep#t<}z)8&>%Im=Wlb}YY;n4v$ z5$oFaua3*JCo+FPv=SmvXc28fG zo;fl1IuS4`uMO!q%YvsvhUvWgQGdmEhsppKYv;4v2(9c&f(3Hn^nYF-cal)IPxl6q z7OgIe2F#i{yLGPfTC9*hzmu&|K9KE@TC$k`o&NvfW?o4R{Ib}&NFudnox01UK+3|* z1(p^PLbUP`cPz1LeoWh_{0((3V-whjEonAZ?(Hv8T2!iic_VUTbnUqbPz9*6V} z=uDU_1CApDE|^Mu4ndjhU?bD3FLm21B}*dA|uPQJZ7`UYx+ zsT&oDy1ll{(<* zE!Toe(lJS32P0$_SjWd4-F~bimM;a$LOSoPx@WqcDgXUp>=Y$h8#D4hCLX654u_eV z%&8jVXAf7Kcpo%~Qr~J5S%V~sG*3pYJToTo^R{(5OguGEuHjy4|8G8)A)2T5b=-fk z9_vg6+gdkglUgsfMiSQ#f1Dj-#8L~p<}7u2|MWPRs(e=Z;#*qTt;Z#@u-i%sP)UC5 zGF1zqIaTNi@AczFI0mae>+Sy0l zAy!>phq|fL@Zd4`{^P<507*uCAY(cBRUG{ludm9-qW@=dQphZZz5cg`M}T(XkR3i= zbo*4vT};TLY4jsjkPvgAF^EAPB?>DAC@IrCcYYYR4$6_7c%*Q#^SW$yqsI4{?Vv?m_~Xy|JWgcEo@tRq9I zQHK$kSdt)@snvB7A*H45h9nH5UkBLz$f*=UqMrZ}3O1yB+t{LS{VxtGgOp|geEWpM zXbqpk>R~;aMioRiy#VbiwP>{PQ~j`{Hq@}U)?xS#?%SU~ATh}44*!(ARqvSwC5;>$ zXlB!Zl2ldfY!Cl4M; zZoQnSkL;yo1(tgiP^g#xj>A#~!6i5(sO~i;tyKu+rjY9rX?$ z6G&K*`Oy_42)^`|eIwtRM*-G&%?rvW1W>Op}Ps69Py zFI=`?Jw9Gcz5x13fc$0?m|B5f8Tjx1GRQPIM6~x->=>CP2_}Z_VVZ_CQKmf6> zOA|{SSy@?&b#$a8k3rKBil|r&JQ_WX2^=Y$wWm28#(d`l8{4NKOXghCtG~OjW{)hC zLXrps?4*4^T(KJQ1dCE02RUHocyqly&jjeY*2l?va!|z%Mw#S$T2eBZ34D;-3V4FMI5N6*FtY!YTQv^*bv%m&m=8EgjH?XKq4s#1lC(n(H>IfQ;Pn8Wn-2&{>7pVs3`Vb$Z` z+Pwk3gMW3H5a}sZdp|%HL#@Y~4Wt@lgtX$5*7MKrUd*CXTYZVAs+OHd*dY{K?L?Q?2*i1|wzWuw?hljtwf*Heo>c^q+|)Zalu!N8*L()%u(~I*$rwc zJcz{zqM@820ZQX&4UhH*3mP9W2#{yj#Zziu9LzI-F5($Jx=6d-+H+GQ+^Fmq%OBsQ&AP9fw%D&@-|2Bd z1FX_GjBU29w+X{HX%W2vbBKJvNFct!cLB-$xyDO7YTDW)0_Jjw?sd9Hc(5#M>I#EU zZs{Ji+IE}Yt*;!e%e9fQQVS4!wO0d4Y+m^<9xt1KwF8LDgJrVK41@GZQ9-umzYlW? zGugETL<=f(H?Q}mM(Z%Eu-C0wIpo3)fny0U=rnLB}l*-CYp3MOvob=cXB>fl(g5lg#jwBCyx4r+|o@2Sd#W z)kVR_`4KJSoQ{P~V19hwWFuzS7?eNi)B#`5E`D->-cnGW$8D?5>qYkuO2QYQ)T(c& zUarz-*Vi9gjGEJoa2HXX+pAT59pAC0gc0=KN9ze`@5HNub3}cYxDOhj zQh>Xlw5@E$N0fprNSGFN&4;UY{wIlH1Yg1f(nmteg;VN<2(sRP`M|gc=bE5MTBIMX zC6i+kUp!yP;?)xA9A?v4P`oP1NNWib;pv%3&L5hkHJiAJ^($ep@@YK? zuE0;8uEUmR65oVe(1{33KW(vjRG9^Z^?k?J9) z*JL3J^8*hLqJF=UM0U9}mC`k}{x5e3k*^5PCiv&|g5?85* z%$Ie2_-XfCV>r%{R}KFH>hOCq{(}KxXcF>IpM$z$51x}Z8~=Q)LUvegsqtPgX(QK=3dQ!WAo8sG+IO;Ekx99rCW2yZbH@g)Vqw|n`kjXdX2!{F87N`v^{Oj zG+fBzGZbbBqSn3@!aG7eBPHc=e9yFBtEml!z?=Ww9|ap!K6XZ9in+g{md@bwz_BjY zW0wuztT*qn6E=&0F?2tszA-YSb1v1&w}|{n_ZgKwS76YN<4cL~mduw+$cf+HA_zjA z_<@bN>Z{xyzBIYcD=udxF>Kb@dmFAm1`6cr-oV?BrQMZcp;ng*UMyb>PniGgV0wWX zLbue~#k+QCG#rs!;FA}Uc1QGbLiTfl|7LZ4Z&QHIMsqx=d%y)c4(Zyp3|Wp$H9=ud zzi($W1GQe4g&-; zOk3WAH*+8Yr#vcBo7YFzrgi|&4}%M?Jxk&sqa-3M`^kCCK%4L6iBhZJ0c23}W|yH~ zvp2C{j(R%&XP!j0>|Micoa!5BETVIJqim!fD3O?AdYd~cI9X}>E>4-&3e(3iR|sX{ ze}iNFu=j1>H1ivX%W@Ok2lv zQiboYX>}d^LUedZd}RA4u`G_PGwU%_drv6wpK$XZ-R49ScvY8k}U>5$*4m=28sH{v}M@Ad{m=B`U7UWb`^V6nG|Ct!+v*qHav~+v#&VpdN-RNdI zzZGao%W5Wcd3XBHZ9Z{)0;F4kmVUH2J7sPDtvgRGdDG03b@vT`Kt8w zKoaCIMU$ZyA>GtI(qRd<*+%0e9<0{CNASY}XxpOZyf97_M@<{!H`Yh9zfh{Kl>GEe z4sn9K_wclrC74l-An8bwQ=z~;u@$f%?{jpAlly^QSB}cv2aTy1(hQGM%44`={*0O= znO`mpVfwA@ahSKQ!w9v9Na7PHvGLS99H1Uy zG$dJc{1OCAev3xiV~d91ltR9QN~a3f2%4k{D-pv23cpg=ITJl9Om)G<#KdRhX=w%8 z)e4Ws6@l8X6ztFaPGRsR#x-%|#XXre*`%3dcKsFi8ju*)?m35#qhJlvpNs9Lj zi^gYv@YoXyh_6Mhjihw4J_7TZfjhi6ZDjg=fo<7>xxJxW^@&|d_|SFrNE+A#59)pM zZ=QDRuQ-z}NOlb->*pW+1#x<%2Zg7j|0vys#Ka*7KH*_3Mb0h64NfeGT)8FGsIM6dB)4LFY20cXNBmbAy&{1OtD6&oo%o_&_6N zpfcFV8(WpnDjkZeYlx$N+;CA0V|_Tpd-CG)?{a+t5(Ua*z9yv>8CIB1QH1pmM%xcZ zAgPl)sTQRwt{DIiO~9^J&3zD)gZlN=<}liZ0{$=p#eln^m^h0_-o_YSrp^ETVMXav zYxDV?qjFo9z7MYJFlb&*W)h}-kIg-SE}K`iZg)ELcnho@FGxc?ZHdM$UZG1oPId^K ze&DF-)58tBdKLzgoP8$QV#llEuM81etDj>@gzpY3n9!6I)ZP!Xeu`qY|Hb)1>Hlg0 zo-=5F%EU@6*DjQ;A}=!_8?g_#>S_1f(d=5yYuo3$k+T>FlGlnoIiv9NC|C}L<>N5` zi&q6b-w4Ry|Nn(JRIVk!*+^^I+C-Cfv6s>pqLIPV&+`={8)YOm;?1NEFG@`h>v-$S zpVa*e740Amyiyr8fF#l`K7um9a`%J66=m-!m-)w%pyoZ?=R;-T>`_7O&hzvOmznH= zR|~xVb%%7JPrRH8Ac`&h-!QOBi@BWx-VU4*$2l)<6h6lDUg2@~zk(N@6kr`jrwM=g zg6Ii5@{vGMjdnbfRXK)XO$$)UCUvOg&|Kj!tS1E#Rfj)>y^NfzojdZicO@L1n6Y^J zHbW7OeisB@fKpHTGHBKQS|H&QN~-q&pGF8)b?(DGD4wemS^z?4ifO1A)d~lY$Hs0U z+HfB~4e>Tydt=Xz_8rX}md)Io;XxE?p8>x2gr6)MV`;LisfPU?AF(UtAzA0f9sxA= ziW`}KUjUNrJl}*Vo&|7$%_po^hi~6lx`B*c!+`*h(Yh?^92pRZ%2^t>;y=}fYL5zi zSYLkXf8jjoSph--OFfz&tT<8nGau_9p;K4ZW&G9vF0)FW*Yj|~P5|n&`-`Cxz!K)I^h+_oU6cT1R8_|Xd^!Nmsr?eg{rqmc|1sXuD*ZwW-U}n% zb9@P|yboa0H$%ER?Hu`=-=HZPJBv&I(a0y|74KsETLkI~bH;f*%qwPOXKh&yFU30y z4I&Q#F-~U^I^3)Km)TXwPW(wm46jk1_Z_MEB<|x{Bw>V45(Eu?>j8#UuS56uzpYE8 z@Phm|LLr5|iq>u*2g$(N4a13_@2(dx%9SU5cEuACVEt4 zAGkd|9?bJ&wc|&ZR`r_T1^#YMl8AV^u{g;%+yz=jvY%(*Vn_sOv6%7{J=TW`Rgf*i zFO&sz((G4&HO2h}2o3YMw>PI&_tDT2-_u3v7$R|Hn)M0~%9$Xd9?WX1h+==_?8m&% zzv!(}ncx?UxUWVCDl)Mmi>Nl=Jmau5BJ>`X>_-ysK=Ie9&-GNHz_ZQQjE>oJFL9_;A;$SVCrL!4&jm5A z@wX3<*QuzBjhyHNfMQS67fwN3{>rW{MqJi03Xpt=fdG0{7|sx@L3&o!kxVYiHsn)a zFRbbw-(9s-$5b`Rq4(0$CdmFxiClZv`lSA7Nyiz413>37-N#-ZVtI_cj-&WMA37q7 zl(z+ERT==v@FkHS`quixw?P4{OKmvUSPsgX1Zf{k`{`+CLanZadY8W1; z8NmpfAepd`@%`tNf{2Fu~~`QmwCsa^i}iR7>T-zQ2} zFGMv!7<5zpvkhP8v`Kd`N zgm*StlPHF1p(@~y@*k#J77Yr*=ts z{T>H&zjw(rSWh^mSqMQj@6EgYF% zad!n$^#6{-0X6AGB8Aj*fNW&%akbc(+{Cv8c$YoJ3;ALd|1S68_xmCUlbICf=FQMs z!cx%ABD<>$f>A<_tGzuNQ8jSznn46wpWx)u`R^=;coNfOX`mJ?~_&l9vq<)&PqK(VUTSN|*+ zL(HDGy4HxH3iQ4PvII?Oa)Bk0ngsyO4C6RooB8<0X#>H~@SrSYy=_HOzy&NrpZ7OF z(RA9KDAFNIv5Z$6a+86e`Ks|vN8=Y#N?TKBeR#gfeUm!G4CPO45CL8U4RqR|flJ*D z25^LKxh7Tlb>AYT{&>RFVqz)?6}Y)LtnSu#aKi`@^iTsMAT(#{EsqTCIl>4w=yd)P zd;R`ShR6%B)R@XmM$QK;T{V}StSpCPvDnZ4Ua;u{vXLZjS+~Cn@*nJ%Sn?m6C)3J= zHQ*tf;i1YZRv^7N?48sV)!UzvV}dAafW;O?{t4E{VymVP5n{EFLdQ>7`!gZbTLM1` zk(`_rZ%rQ<6PhtpuZkTe%V5Ve)oGDI&m4&J`t|{DaDGQM!VmuHit#_X>&gG8yG)E7 zeYyw567Awc(#j#Fh`Achf2{y(0KNwg2Esd!P!)lp11SJNOfZB%k@i7XTnV*{;JZyW z2HP!(3xt9I|F_iGWUVJMO~>7}sVrx^tj$YCC=vw0DkTDHYxr%ga-Ssz4k?dx?}L>8 z4l9585b+`=^Zk1|9Il#39*XT4igXvGBFt76AkslhsZpgFhz>gRv>t) z`{}g%+KxZlIceF}yIJgZ$x}RIIkWxF?jkqyr7IaQpfC@{s%E)B`dk3>%?B*W`f>lW zo-UXH0Yy{c=%TzBa)v7~jF0r(($W(7^jW#4XkD?9oAxOyO08FF^{Fp-nP9Gxh>=YlzI57vyGOlGpS}1lwNBb4X0sy{dbzY+lhlSO8 zNm5eM<7`*wjm$o<5N4}?J|2YNneF!P0E9mNC_w@&k`6SmNYd~CyWcJnD5^~0j)Cmc zHMlPUhI%0KI}poS?uJ*l7%a*}aN~~x%0a(Hx>C3Fy@14zNO16d#+U*b%0kX_1u&JO zBF3=?NS_yQGNa6Z|Jfdl&NW`7Amaeg#EAkK)Z>$rBtQzQMd6T{SyeQOK^T91%@(c~ z_d6T;O%U5C=5YZ6w%6=YOxU#RG7C!@?SNSqq)MaBrT{KP^oFM)5NuWw1~{0nF$!b^ zCEy}N7$r$4a`$o3a}cI!s&z+r3e304j+DbiAcFK$Uk2%?^W+~CrpR<~7))Q;2FsL_CffpPixlpt<|kUXSW_0!W(@JLXLLhzR%8B%Qrta)IMG)iv{$>!<-r@qp7o-EyU94M5YY#~sFkFsTyJZ+? zr650F4FrTt^A4L5baNi-4c>vM-kP!xC$+4bst8^XSpvBRHxi3@BkTI#ERM_AP%DIK z2!UYdsUV?QAfk)AiPmeCo7Is5?(XiU28Th@`SvXWQ_hh^8ZAm}j=N4i{9yWx0qYCM zSZ)B%e!?aKpoQhzpGH#Hk`ab9FbC*>e$Z=$2k0}HK>h?_gqwN>kIc>4pUv(c^Mv4M zo8tG8{{k-b^(?X`J3hR8)7)N44os1VKA?vtMMNoUh2dA1*}`tHR9v?; ziO)Qi6|pNTD;~S!bfB(`EmzMy1#yQl2w4D~6En=(ugK6zK#St9B1#lX0n;vsSZ=F= z$z#(YQG-`yOYfQo1s~g}DqVdQ#j_1g-p_kROFf>PQ;Q?1%V)!^sxp`qM<8V!cWp@=uree6U|sAmXparesT6ofJX#O0InNGGZ5^hJqX$H9(4> z{R0@S9w-U^JRbDgCGB4agr`0?N1_Qm2=RV=_#qAv5F{-R0nI8Cz|8e6K?PPsRY0=zRN~M-@7?kKo_LrN zm|z0khlo3jO$`{gIHAG1e}UDcOv{1nv(d+Z-UgDeQ!!?q;hqBbL~E90hdmx`-q-L` za9_j(qCjt=aVg5)RZ8QiB+$wn;C+o#gr97y5f20$ZzzxnI;MbVpP@KKn)^Z%PS(O$ zKsJwp%c<6nzpA2yB$;OwF{3G;>k^SSX^)cEV7K>~u`ZK3@FPeevLX%z)ZEHzx9v*m zwTw%a{D2~#P$eK?sD<^S!f27=On8V&-1iIn3q?Srh^--`w;TXmMj9gMZoU#Aobym_s! z9r{ENxnXaiwHnk$N`2bcH1GYiE>{QaWH6u2e0n3STIAYIH87YcB<084`EPWLEFq*C z%CnJGNz4gmHNjqz4a=bXRX|#I1(TT4faa!>aXF6`CZjIPG!9~43?pyp4*rLkuTzvh z3VZ-dLIrGIbC(z{L24ev>=E*0o#GC9RBBEcOx(TG@Tn?&wSLdxcajw&5dkWIJJij| z;2$jw%BRG^B#ib&rX+pIyPbw^2vZWN`loKR!+bdI1$6qqJEn zOC(4 zITRJWDw>WI7Ek*Z|Ai^yq-9)FMo~24$8X|wSrDkpj2G9L6TnpoBnJ(`=T4Rmm0u1+1g?Dpg;wHMdjabG!kgBs8>CB=&aaXMS~n50K8J z++Cl3174{uAbyH<^>qD|zYJkY5CM@gIMm`spx~|V1BbEFTUEH#J3tG)j!{8IMs{Qc z=W;rD=xSiDj*Y+lK|TaWY3)z2)^=|9v(tAp!o7@|Yx5*|F^_q`qRYL>NeIUf4?Z_r z=?*M5`9LC^$EcDp#3d0K$fK6VlUX$`8%u=(wM>{jK?tdyzHxlBI#Z^RJx8XF?&55o;orC0PZ>aBsY0uZ0tzky zQ2d=!nitBVrJnvsEl#BL6-m56W(x}Xckf;LhL#cf&#!4-Uwc9tob?6>8h2^aYD8-a zjQj#MOY1X2zkBco5i;vkdkWt?D<;*H1TfNxO$HR3jS6cI$+t;dz8~lIR;=$`rP|FG zxShE}WkSm&9iP;(W&68&8RaY5XE>GN$J6hCm8GCWocb)ZT#rO@K5z@(H^{s9ctfu^ zGtPGT?Hjv8%DbC_l&?B4MHkPV4E&Ic2s0O;nb5%n`%EkHJu#P=h#jt&^${p9suLun zq@<=PsGlZS68S1RFhZZ9AYY;|{v3zn)pgw-q^bJK!PI{#_vt`rnb)tcK|`Wjp-pED zMotj1=?>{EciMIVmuw2CK-pQc@FcIK5k6UfGI@+PKK0ao3{ZSb4&x?yb7efLr?y#y zHn^|&HZaX@(3&F_tf7$(HFShf)nB zf>=X&V0H?t3Dzp;0k^=9yBqd`LI&OAhk>wACGO3pALPFv{%>Naz1W+g%w6p=Qc zm>C?rBrsX#gO@+=GQI;EZH#44u)d&z3|pCe=cif-m^R1egNa1}hHZV#b+(mzc=4Su zs*~HKWn^Ts)z?)MymcKkG&D96JI2$nOVBmfe}9CKupmM;R0m>&wL#IJ1jJsdfcD!0 z;d2OS+<6(+-fkR)upZ?OSeIi9ePaNd7St=MK>&2^@&dCgYmOvNO8A3|r5)z7ge;HZ zu~TX)o1Rv6897{@FV6D0{S*JwBC<8cfmXUp^QbQK*UOh@5)v=Qrs5g8li^UbaEyGZ zXF*+*8!Z1M63xOrM|*}Z_58t$44eKNxAuZgJfww3J|5iPDVAW?OMq@Je0LVQqtv=*QB?*j9$UFJWNnIH? zlxSv|Y*!a2f&z;+#|e-_>tLpe%E|9BCsr$E`S^5F<&U_7IruZ`{48sFGAgTrLnRXa z7bBpQNj1c`A!rp>YNKS_dI>XWsrVR@4uArfVI5_)8?G~%SUWD$+B=f{ig-LOuvWhi z*AH3fxH#Udtb|R(8gg&_idEVh$fvb9b8}6|c!FnO&+_!*Xfl{dKS{4F8!w`V?haQ` zhV7~R;L)Jqq$FkB#5=LNOzU}uUqUnTdaNF4g+FKAEV6y0WVI(&&Xkx~=`JS@quZx} zci_uC{8~=GhE@k~ka{n9-FJf4;vRn}0Qh=#8L}1=XDZ59Q?akw1mtYQ3H+#eDp~6; zg5qUsE?WG|$cAQa=V1gBEN0#QI%$+9Hdwen66nLf3eQfJSGI6W^G%?7vR|crpO;el zO_Skfg58KZ5WAS>!p^DM+Tar`a(g-7w0ZY9c~$~8mg@5<0_T{j2n#q|y7Ubw&FQ>p zmi6y9Z*%^5d~H}5WOQk2`66yC?riYz@ZfkKUW_hppw}FM^c)f{9fft#@h__R8)$fp z&FI2VDV)NRN{`oE?!ei|#KWWgaJ@sb9T(9Q0M`sl*9EGklv#qMn4X>gb7g@;tXsY8K}i3{%2TJ0AvR*Gcmbsq-a2pS{u=h=f5g~79Ev0S>cp~^ zkJLPOiRBAZK9L_CYtta*E)-y$k(T7X_w6W-?WR$t_g~vX7IohanJ0Bme5kp4IZ^G% zJ-S6KiCnR&B4z9u5JyWVQe%6Ol*FX}tCIi<^NiG{i6>yyO(Qlb)4z6d0DAb z5%v8#F#h|$=e?BI?2PhMcoLiB?~}h?!?J&^_y(1K9rfB@uXR_l_KW>c<(^eG>Nn zsRXlSbQa8a2*8zyiXvzJzD>e*Jq~er=sA4Pff4t7>V8@JN=guW#kj z6R`7Af#k0`qVd;gCq(NCATO00e8G3e(RU!S@B~2l&!kuh<5I9#g*Hb^i_DL{$E?Jx z9luRqh;i%_wH>7v_bGt5>xHT(GkYB6BkEtooW^ZzSF=G-Mp1LRu>kqbbd3{9B`3eh zkA&}R{Lza+x;=m#*8s011#34DcDCugTQW!l%(lCje0sC7T(w3?Z>R zuH$ew`O!J^Lz~)svH?1nR*kbC>09TSbEztBr>+{o#ggwGe0$bko^%PX=ap#TXN9ld z9iHmCcU+^sE4_d|KG_+Unl_~!gA0jI8&*31vAFwqt&&^xX5i3sJD+{ROU88GThz8N z?(1$L5iwnm7Rowi$7$B}w>-;|;U?;J=Ezojo4eAkbPnnh^ByludFr`uv)oKNmoYuw znvW&e6uKGexx&R&o?WV^=6&-w349lMu6=X6`l)of+rXgS+eGQ62s`-xHvP5f0;d`M zaznVIUU#CpgOCCouGpN1B)*)eYW*#ZGYsZr%?aX7lNgj9>%Su9W;`zV@EQxw6Lfn~ zt6C0+ugjg&6BZ4Cp-)>6_X#B2V4xTcQJRb@n$OUrSu7Gvq~54e7S|CdswaTGWfBPl zjy?Mop%lUH$ka=x{+Lo^oMV`so*FS1H8oh!vNwUm37XULZ+O)nTQsFiJ+tT?UVlOa z)ds@=-?0bfqhGHEA~5YPd+eP?66pBcR>WoEfo_0yZ9OX3T3PyotZZT?Cy*_L`ozJ~ z{M@{HggXV5Ji(gL6*4q#W$TXTJ8w4Yf$EFUrKD3?&TRA?6?w%LonTIb1O7R1OJzS!mMCivsy zoBglg8dQI-sA9=z5giC7Cd1m1aYXCBe4wef(G(oSYXNB0M1>B+ucb-zW1Ctj+Vip* zLZ2(6=TMKj_q{f4e|AnK(-qKbwbmZ#>|R_gCUNNSJrl#%XKE5CQ@w_b{$1(Pu<3v2~puJ$}AdB7|7z3}%|mJrR@ z#rDFtgP$+6FW8?}Rpelazi6a#f-uFXET_L|cc27!-oD-Zs}~x1dTjzz%^q5rORt+C z64*%u#cCOmiSjd=V*`R{ZkG4RRO z-&t%+BrZ4>Qbtei8vAN;ahHXa6KfPozLvo3oM^g**3Gew0PRq3XIK^#^BBL&-7bIJ>NB_31~UkEtvYVB6PgPEzUY{t?bIqPb9Ecb z-OJNcD)Zg}@Q|s(y2;EMwgA(?*TfaYTqF4Nj>O4j+^0f$Tu?v?e}!I{d)`d)`$ZwG zIPSW0vX>^GK6aDd#LfJ-mwUHsJN*-mt{8H2cyYlAXQN~6{nCB?6trsB@8Kb@w~iQ+ zgyk&Gy?Wi#2KK%nU*|9%WY9_-;IJ9v_#c16ZoZ=&@e~*cj=1X<+j7aW+oXsm5g!_g zCDdxmU1aVtxZn$nMyCiUCtE6dd6@m;hN3X2BXl%XGX0~djL2}yYFTEhmdxmM@H}OH zF^11x7G@p>a=0*qEC5Yx$en7~Ed%dT`}|K9$5y|#-p-~NmPj7ps&wEBm2a=wiwX_$5Z#eF$0&6v=~L?c7v;-y=(fPNif=|sIXMqj!$-LT>?c0OliCCL)SHawm8tq zqUu?h=~=^dWv}%OyN#UCDXa-GNG3PFsqJuZQhoE*yXHNy{a#FfE&ICaRE_FdV$GYl zeC0dKkQ{r>JUg2(wO!qVr@lnQdMHP@;Ug8$dQat=%ua1vK7UBMoa}WIg7~QPo2oVk z-e0I(FKv&tRf<Fo3CCeYt3Lbhk5Y^M!Y#5>k?KCdHcP)_l&Q0PzO`hJpHv` zk8yOyYnu^Frht81>U7EH0pDfeK+`!Rrr|yiPQLPyp|WPB*W$MI{i4v1vtQg}L5Z%Z zA1goM!*Uzl6eqo>vGks~(2}*U&|Vnc^%k*}kZ%TFZ4SCSRV)3=5b-652@9zWBcP^i zjW!qGrGJf^r;brLRfRcFj8f=1vs4Tm8`?mv!wfJH1C;3{fj$i`z1wIzlAYPBwHy2GdH4r#7+!x!K*xOavn272ECc31+oY`l zp|P)k?Gkh>y#noSFYrM}y z(8djQxz;v>RAZH{o5;F?5ATCwMCM*_^bgAPu1f`((d7>Xgu`z8gZNw{ti{;j1rQQT zvz)|oBz+_+W%ozzvS2an|5AL?6qWrCc3q|1gSTJVE|i#R99LQJ`u7YOSyNDq{<=4K z>dF%_(b)}AW;9vpzvgb9Z|{>^R%;05t}wQiO<{@VSuf=cwiS|;va*UED}Ajqdl$k| zpSch^j;*><*^^45J6D7@LoHwml1m1YEVeKgXWi0+l2r-WGbLvrOc7P3e~7bugx3~B z+{1{xWJ|{Y;+GM@6=-R_df?<#UV1GXFtK=kRHxQ9Rfy8t`o4n-(+7rZipI3HBo&LRBn}KkSNdo3aicc)Gjp`o7?!fi8w03ert0ACr<-` za0V~HSUK&x`fTV(Y7dj1t7XZ0@3iKXzbha5LwUpJ?zY;K7*#m7EM*8ln)7nFx+c^&FakFYT3+VV6;*JiIP&FH9V`Y!u9>AN|fnx1G zdf9+51=?ij=*%|BvpVKOvYv2`=L9bOJT-vY#k<^kP*nFSpi^ER8zIibk!8%EpnsJW zZyF>AuWgX_s9f-}0`5M+GNJesz1BrTTNX`}Q;Xn)-@)Q;m!iFHV<@MoAojE&b@;pwqRGN?A6xYBDi$)y`(y;XD3Rx?V8c8+?yWbKpVyMWWXi0DF zMJh)?E?KLP+@!YD#QZO6zJ`|Fue4~wv%?c&&Fprg?P$RGf`h?d3?_}|>loVfYDO`! zXF%;;0?u_V-^LjlJq(!!R71Rx6*YhT6~?XJhLjv~mCj=!F=tlNiw|`L5oU~?(OW`d zKi9}2&D7p*vKU;8CrIm9UQDlqB|Su;_?8%WVZ91&vtV`6NX z-U)2lRti%stazsc_=tx15a7#+!IfQTx#ij#nV{xq1;I4l7@_@xoZ?X}RZb=!e2_oB z+ElFmt8*S`6RZ}NBq=H~uAOYpaI+oI_mu#>AM8ND+x7G% zoB#RdpF=Rc?u@@L>Uy6ho?OV86~QMQ#*GVc{x(yysR(=vrn{ zG1#C7PjesqJcd?9p4%0u8K@*P8bw1UEJ%d>-#x`E1P0Jr1_c1x!ad@UK43P9Q>;ln z>!+*hjjCx^qe1x{tW>2$={WS)xni^vz4N?FPr>Vp8Dlo_q6P<=tz5Z^w}NS6fv&AT zCICZG!w>=+lDdQHW*`XaGSwltp7dwQF#(xM;KKjfg)C${0^LS?g$Ng+Iq9kJ&n05p z0uZ_vVG1xymq)N?NJdY9p$94LNzC>NOsxG*)Yd(QN0x$P*!q0Kf|TVaevV#iUy)@< z7fQS6i2pW}iC?XUetkhCw1W?;*Km^5c&m{`$IU9c+;V?>y&f9tK7Ff1pt>;=pj<*{+8p_3{!*}qHxi%mny!#t({PT6j0=LswrtvQx>BJ?Tx zPe#%_Lj0;nKTCmQEBhTxS^%b95Q3EtoM8eV=p4ilLllP_1K(M>^00l!lXiR96X99$%KMx|5r_l!7da9^v9lYlV!o7bG9YtgiYw2S!gqyD7r zkr9`{G#qQdUD+_=3taM>bCetz4ozD08IiGZ{y*&fWmMMd`UMILf`lRnf*?vrw=_tH zlypm32qF#AT`D4oq%x?o|RIJ!k1~eqC>0ts<_% zmGc)_p5WRmdTe@SlD-?M6I1oHPw3~lWaalsyA#K3uu9oZW$w;pZQ-E1s=_^^`)1$f zf<5C>8;2iQFNi2ELD?yd^bT(j1U%x$*SxIWIpja=l(brXt(U-EmNQYQOjWHg&QE1% z7OT`mG7@UqoH(YVCRKY587G*G-srjLP5Y^9pzRSc=tT`M{%b`>y%NwqPQ!f)*_t95 zR~?R(HFSL{z~`TPE#4Rz%B-uU7TkktV;m<*N&7(V$Ft3p{2AvbsAah?EzF`ce_ z8SHrvb=6eV^wHx6oyRj(JIISnL-!kypLp#MYWI1rBD`S*TjUz=yZfba;$5A&=RTwB zDXt|sC|s2_C;KL!dA* z{^w;jyv2Fe-*&^bXVAWXT8CMlwJhA zUT#QyXn)71S%VM7e8d{7^>MM;d#uVj$xyjF2QTE#T*UUUnrR51mKc4q5Jz+JP8Yk7 zReW~tDHFDtByz08nTZPyo3p-FBa!iiPy6Y2^4Zy7Iwv0pf_u$8arKt#*fB8bC< zS$xZ1NRW7s&o6z=kSTo+{m9aviaKr=Vz|6u@SC=ql9JD_gF~b>($5J1K59QCjB!b{ z@HPn$@5(`LNKj-yYtfK%0g(~9WW1;x&)0XQIZB-9!%@gd&D7}EV@vx?>EUFZwkG3) z=k$JrbW#DzQ9)F)$z!ohL~Reum-Cd0qla(A9h>NFh>z;wwbYi3*V}$cx7TF9XtJfh z8EiqKG_cyfB31kI9XPBUb5iMzKEyHKCda{Pt3tCM?eQM5wXA6Bd9QlD*H!XFkY0^i zEsr))vcGh9y=d!DekV*fE{-|hg-I>s;Bhxg&!dRPZ~9-3+}i1+3r15{eKfZRO<{5r z+to2TfYjV<_wjBsZu$J)mj|K9BI+Z`-?q>ZK>>qSnBjaF31;YC0D6ksye$4wuHHL= z?p$Hvh5aijHHVvi&%O~X<$1y9Q5>XZwh z-?U?NR8KIN?0-01(Iu_d9X2+p?)jI`8gRz8jgEXsj;kKIedcIYv%yKT}=Qz$Nk(kPv3WlaIK`U7Vq=?vBJf013n2nW^)0(Dt z`}9@2IJHLe7C0Z6O4%bl?1nCI=CRw_L{$&;KmBf8Zp(k4j{5MP?&8DiVN@X76}1tu zIVDnGpG(3&vTMOkqU7Y=N2^NFrs(5?b|oF{^-B`vs|3ch*W2tAO^u#gu-VgP@m7DF z{vvUB7=2{jQ7!iVBdRGuYA$!B59T)!Z(<*eIJv+`A7f0>1;HPSc*AxMacNjO-&qM% zvwLZXUMvWQQ|U5JIM=F>-Ai^hSeYv5!slymxs~7$A8pf>Pm23e4mq~eK#TtRb)dCB z7|@BiPS_ko@8X8!;#sY@lufrw36vD>5F|KIO|U#z=_|Vqs2f6L4-bz` zD1m}Bhh)fHEl|pkBPiJL-$QjGV>1__LZcR(U$VGWBOEU(6|25NxYt-<;FIaVZrG)| z*HuiYOigC8yXJNhCdooVhkeJs$51+gs?AAx`)v0oyg`MQBDb8h16}XA8{tucDM3s# zUy5P<#fpJQ@WM6YiY^Y|lz?)3_QM(|s$Vx6EfuCP7MlL_Kq~rq<^j5OK?(2X!z}-u z`k3>cCF|%Lp5$ZaPv)ee8q)oW2QK0|##K9L&2Q9SZX;f@$wut^-F)r zRFsJ`kMG3D&9UaI#xzVg@2q^8pZVw&xMVf;mdimaH22yg5<Z6ebHnq=MNNB*K-0 z^}*z^HF5JUt#NcUP4&o^{IA5mM`x`*o+?k24N}Hfuz%7^b0)6;X_zAs&RbN0^SItw zQ%#NS+$>ba>6uS2eCM|5X~{{-u6_FmAh<6DJy>cEm2o7{yqZtm-}dV}y2+n$mGi0o zQ+w#)DHmrtWOsetYZCx)nN)2a=$-L~v1r?|A?antT! zOyL#qse;x(s*D?hFbr@!XdtS!&fC9@mkod!%+OvZA7TP@8UP{RM~D33f+Zm@hW{Cs z0sbpL0Y;9PErr7)weuM+TRn?zb_-Y>^83 z_k)9W;A4HP?jRrK1av~Gu2e47)Cynf9T2{c1$^JC(=yK{I{NytwUNJCdIkX$_ngWN0l?wlTF}6?jx9d1 z%v8slY8rW}Iu?jHnkt)?;FPA|CEI&rS?Nu;4e*HNLMGIlwqyYs{abVEjYXtiSh1l0nlI9cY2k= z$j`~(%5h%24gP&Ae;yk?FCg@n`3IJNVj=(U@_}6f`2gZ~XUxPtVhZ(0Fvn zM8F^abzwh$B=m&vR_vODyyt(rbpT&V!r`|x3uyjzRlk0`i2`r`InV3p|EivM27~7f z9DX6sK;d6E{p-j7EYAOacncCZeAlDS*#CRqh~L!Wz?&hOzG;Q~-w!{<59=wqIZ^-t zH~PO{|9kQOd-4BwyCc@s|91TUcKm-c$Nw|+|G!MV`yC9Ky1F{6oh3Dl3l~D5BPzMR z2ZI4~fTzM`(c=f|df!BuP???-1r`9R2eBKbLxNm1UahBNFYZ zZUl8PRAJMJz=Y-*JQ`gg3VQ6QLAX#9WDJ!7<&% zoM_pthL7xC+Lh~DxoSOh6K^D5R#4dz$wqvzN2o&$hqZXpS9+&%P3?5ZQEdXU60(4Dg`SXPdF$9_xCflTe zYiGG@4^Wx;Q1`tH0*@x6#hrbwh^i};PkJgHtSj~wrLojc3<{QcF5yO&R-QEXB#JUC zL{^V4>@V$TOKZ9nr;Ue&ZnXqblS;p$#F&%+#9p=o!$lKrH+*p$RWgIAUzzW*=86oG z!_1yd0IDQ#5dIk#;qW58K$n5hwBWo)&FN!OH_;^#{8?~KcYuy0+Ex}2v()Da7z1If z0K=P_(25D9LZzUBxlm|Hm_{LaUIrUhmOj_4q2uOsDF4Ztf8ve|rRAf*y8T7J!KIrC zYhMXBWt#jcrb9Ur4%#jhZua2%}fi#KE=`x$w*o)6|Pzj!tL1W*4F&1h2I*AQ8R zSB_vxtb_I?xe1K3guxwrymbt79Ib%oP@SM6tV{%AQVQV0e6y2Rwtr@dj@_l*f%oeA zn`HrOZ4x23q~|*8RehVT`?CfOZ-}`AoI}`SoO=u3QGW8h8g4ESa(9%urHohv(93+i zg28;@!6DLkZH+rA`s1X7vptapFu8==d@?O@a0nYC_klmdY|s|Brz-lrsiTyog6N3K*gX?`_AVgS3S#CqocL)@$vflzn#dfe^L@zmwJkzdcprAyV}uDS{t>>Z3b!z3OMZ z(TH7U;5emgTsmIajW6`JxU47&m-UNVIL;lZ)aeEYKY|+R!K+uW->)oWt%B zr$0J}!FMcbDmQp7u*>zPW(!BgV+@{!}UZ;=>U zBX&4a^w=Y`3)yJ5hxLxwhC_uvl&H7~1ily0kSxn_J!{btOYSo)NRvGgo;+h0Azu3O zYIILh^s|G)z=#)@P>TEH7PMytQ#T~p4CXNWI@M@Hr1dJD@#H%SY}uw9_rtqUP8`b@ z2W44USboe$nd?|FVOAJSn^)4t|0`6t=kg#tFH>SZ83xmgscojYwksoa-K45U)@ov* zrI7a`W-j`0lrEV8TsVl=+CbyFRc1iD7P+KQOGEIIpF-?*@|$x^{EUw@eC%_F9{M?I z5X|Cx%8fe9m&+ZDr;jUPVX1xQsIX_(+_bxE)L%U!!Rcoh+}U9Kd^Ba^>&mp<%}O4F zG)sGa-H86aWd?L*B5me;BgPYEQzj#2%2&J2A4`~i3i;*ll|R7{iwkY1%h`U{UrqTDe~E~q3+%&V*oR@ zB7P5(D?%Vn#hKM70&KZDMUn>Z2<8 z1Y=7MhwF-TC{5~=Q`)T8UT2g3czv9Np%QA`6GF?T1a?q#q|%?JszH zW$M3g%r=G|uo)fE{n$~JA|$RK4>%ekyVl20LQ2QFP#1iXSRqd-mP&|gVLTto;Ss_V z_?eOAbA{avQrA4Ksl`O1j~i++*sAuUS!fE62V1p!%*~##Ih)uWrkta{ro=w$vcEGc z2m8{p)HDx^zi}!{b*CXS|Lb5o+wjTj+L@;gC+RYZ#{6a}X-w#b4CkB~wF4#s&BMIa zwY*bHbL}3_ec^H&q$&~+-B^#H83E8l$k#5;Uc5Jg3qLs&|9NR2Z{V1U7y&m#Pl6cQ z?S{cY4WP_i3`{o-_=sih0kdnZ$#Udh6k)tS3?@DJ%C$>OCrPBs3W{_ew%tX+!Lb>* zkZwRNFH-7zg*&@R{1f`1=y7b#s$f{niNlQ{g7>{;#C2@TCruY-amrb9Zu4}ZM|d^k zUnq0Mxf*^u9EDiAT7?61L&rM8fU5N-ETiR z?e3oM*;uGrvYu_$63C4lJwErI&mxnFRvwB#Qfoye&j2Tx{SPm&{*#nQ|%;#e^WF`e;`hF_a(a z{~1^7TkveaRZbb03Z9OXWFnA!Y%W4)lAY9&Jm5A#{sVSihQXSY2u8v~tY#B#pQ76w zWZvVQNBKdOadnlrv91SS(PiTN$H|Q^>Zo)&kG4sk<36SprAE(cA2%vnJTNpK@yT$% zqORD}n9w4uqL{zLp-TLKNu=KN9h@7u=Z^WO5UfW3`MS@6%!MJdU$P7nV~Pn6CD^@J zZC1zb%K630e0gJLf+5qsr}x%ia*d4`XLI+mBTEq`tJv45>ZM2ufUw;3yk2S{xf@$X zky^al>{DUzz5yGnuqnqGlYQxwh|f2FJ>DEp(IoE` zj+&jzD6x8?EYJ!L%jiP)u@h4&b;-j?wNP)Z0wL-?JZQk_f4&s*iK~8sgFwgA$(YXArhw^Atlwmd#+{Q%T1aoZtt;H z$!7A}+Bu;sl(x*r{aR&v>t*}SM}bSq{2GqET++Br>={pzUB2|&$I_K?SRImGDl&Lt zH!^RXUOqCjkki*Jwe9k>&2VWF1UZYnYkQvdn0b`oEk`9csp7M{sBuyrc1^gZwT?d7 zT8~sNu;)^_=g5lITJE;0!>kjKWX_#FOj$NbqXn(1G z`(ygk>x^b&JVBHX_(S`&%(+2{bZ>;2(*I%5;rMsRaIaP0(2rhQdu&vY(G%r1~PsBxDwOPs*ks#OmbBc{K+)yjO)- z%-tsGC_ZgoT#>lClBg+4Kce%kgw+Z<6*=Wt0)%PzX*INXodjAQrbZOAi^qxKeQPy0 zG`rJJP9Az#AE};TGC9mQ&G*nxI;5vqY}$-+{rOJh(Tl^cb|aY>=rj%mi6NA}(+uar z%>+3PjyqWVRiDHfd_{8ztj~Y-@;t7-*!P~%^`vI~#{5SHaURm2qmAawJ{4FnLV;F|xMm0tkw^v{_?d*U&Rg0bdaoa4N$^!r2 z>v#rToQ}5tl=FZL2CnlhA3aI4QXmIlFw=0MQ7{atNWy$R|KpKMU3g!VZaZZc%e?&5 zc+DQAj=hDzsr0?SJ(b*(5mbj)uUyIo!l-3OML&tKt)W&h-yL%kt?21ValM2uk&La@ z%LU3yep0f&Pwr2aFfh4%d*O)spaN+NyY)Ng0M(V|-Ij)DkJVKQn5T66Y?jv8tiS!B zjUekYZC{T}zutN4B)>b|rD(-vGx3TyO})jT!qEeZq}jc@$jV4!pTnY+b>_Br`?IZSsk)SOn)PUAyC?2J2n2};BK=( zGB4S(sq`d0K?CuJTar3IoJ>2t4Z9$3=C^_m>M+({W>CKA+m`oN=s?Z}wQgKTHWN~AKFj-7nfeaxnxPaql~qdc_l;|IjMrGP z?ac6u#elb>%;5Nh(+hNm<2tYL{S~i>%aMtSq`_EhzS;OKfWpVb*GHug^K?XUD=ZD> zuow?Yuxz*W5?LKn-q(k!QRkcNVgt|iNX`V`1%Ky@7H0o(yx@eVq)`|#WpIMlai-?x zs1Osp-jxpJ^}>8TBqQUk|BI^i>a z9`78@eI-RvpZ5sGm?iPl5wY{+@pFKw3NTNCicjH#5vY`xy8FWTEd>^h~4E`&~vqbmC3jsL^3gD>=0Rjyf7$WdzSK;f_ z2jNqM2m?|IFtQJ&sd0(HW64HP#JJ8{FfC40@eRt_3DohL{nZhA>Fyp8UkC(v2!RZJ zkS&2s34;k4dCL0R0Ml^ZqZg`KTA|T!U;H6w2Hc@eW8L|B`n$0)bv7%h+hN{|Sf*&M zzg{b$9~{tIk=vW<0;!-5B-{p>5hIpdQ|u2L(ebaA0{1IlaYr&7aLgBqk>3Ic;2V!6 z!(SH#?MiA9IvyTPAXUox=(;k(i4X|l>ugASgc=8=;^~53o}QF2J}ME5W*lFMCENo$Bptz)gzsdD_t#;`kTj_8HWJ$qt*J9@@yt z82fM)%R?8jKHbV@LKK8ST%^e0qu-V8GX>L^|2kc*H3qGSbjIrZ%nZc~Y|>qwt6W{z zDiKTjsX3R`%sl1PDoWv!nGU-tU(iTo>r?Y$%b9_{?tkhP3Xq(=dYoF{3xiG(z&XSa z&`Z_a2l(p<4kO%*>9X0+U*e@|El~>7A8VeamVGx>8gOACA0B8VQ0Pt)n%G8P{A&r> z6ChF1LdEnBLMjB2tHX4#1Azx&hb{cFr_}P%#?DSCj1PZ|r&@(HVM-olJ!%ph&wZ@# z%LUzIMql%8sp~vKP*jx8lbu`!_`zRZ$OZ104j3132?=T8@rD6=M8%p(H2)^>DAs9_NLUZyMU{mYrp;N9Cjbac2=6Sbuz>Q{m9i z(tg#2BD9+FDh$g9`un5Qu0Sgu_wr>*n1s%^G&e(#6=9;xcgL}S5@Ep-_v=YVxK`Mw zTeU{U2Ec=u2qYqrSr!QCmp{HdhNy8CKuclm)fE=z+pjLag|WtuZT7`xHK>U?1r;I&i1qdX85Imzcmr$X!6+1o@o_6QR1~(3c1E- zD+@#F1jaM>o8i5Fra2>$2Kdv>evNyrz!^c5mg_y~=!;9$XMO_A*{@wwY=;WKXm5g2 zLWY)1EPolMae!$<(L7*0tq#9i{`mUg%|j<1V08HFp_4_TOn?9JqbftY#4K|TslGuG z#%&odx`838UZ#=17NrTb*{v?%$Zcl+@T)D#5RPtFJ*I!{6 z^xcKYx`485(gp_55FA9-^BtC=;q$svNN5gGQy!NGwlU11Sk45bvlu0EMNj+n4|Z4N zNlOME!2ds99V=r4Ldv=mgs{h0@R`aGup7o#oyF^MUB5x0TeZgS(9I;04^yUe0O84W z&DT$UG8z8yD)#_?3>$b2s*((OcaQhKG`xT=!j-$#&Sk#Spq-4+p-Qf~2L=G%u=-3N z0`kcA?5kpEJ7uvfgBLKP@8>lQ*bf2!jPN#4N6G9H)MMWHo`sMsQ6H2wb#@e41g)HK z1fg8cK;~TvsK&FO{Yj?>2i^WMWTar)84de%dv$zNJ}aN-k41)9aE5p<*+JdK4(IXI z5qzLE+%sRo#&?6+D3{*JNteAC;hR0gZPqg)7|-eNq!NN6&3jE^_FLyF5K2QpD{}+- zBCq@Na;*`ZIhTax6QMn!Z6b~r1}>KlxEGXajsOVtF|&bi^#}xB+Q0^@3i48xNgVUK zxt&;iDS3w{8l_ujk>$}$H_!ztNRKBtFWH^HuiF8&)0IPRlcC%v91m)A z120_Ble)=ICZgXHWEIC_SnkHL~$D_?PbfAFH)KI(BA(9=92v!x=)7E zhv&MbLj{-S74vE2ih&vCkJ3e+Bv}{S(nP{TB@;D=^L#r1>3fIUK3xGD9ix7H-2$aO zf;~kCLQW8#!g1<*CQ#58b?4f-i=M!%LuL|Mk_%kX9t>&@4et~?;^B4-wkA(Q2Q`=H zJGcprkK01ts%L36y;G4T{*+!>CNpFuyU=j-*Ve36!D!sr+H!iKpVg9=;1kls^%|xB$PkpoT_>0d_nE8a`%<)J(~;5pF1 z(1qxc=qPNtl%{@+QDD24ZxJTc5l)wHm!KdSh=J*W7%K>vT_dQl-rUU?i-H^{0%a=3 zJCJKdD2vcNiFK+(@4YxiojECQ_``OfccTHKw*0h%N!J6W0qd$STGpVQ&g*(u>b(6~ zlFL7i_^((G7VAY19uX0dHHd?LMg!ncN;_HJD^Gfx;muWVChPXxYf6jZtcLIE;E4_ai|pK?MJ!f#1qRl{URvx_&#e?WDRk zyO)<2>csv3Vx|@#A6rx8T!ADP7Y7GHC~V~$!Ee7V3~E-!SMN+zI-r`*&NTTg;LbFV z)iS`-N?#fS$E;(Za}6EvUym?K>mwb2Nf09EJfrtuK!af%Yqao<05_0*Dny$2{pBSd zhC|@v?Baqtmjuq14r=wU#xEVYtpOPx8&znNWHtvER-pP_j6={E`ieWogANOz8Sm48 zY$j5i%WlLBABPmSW^)wyeI0fzD`8%&-@6F$hTl-fqa=Z;Z3k0;G8e6cJBok7{4>yF z=R8-=y)P1)(h^!3e3Lorkn=ma_#43YO%b8!Kt&2|Q-^*03$5krKQ|2J{p%2-Bs4w) z53csT5e}O*+8dw^9!TD&0Eo<&p=sDtWUQfrZI6zqSGLcMo?-Eh`FiKs=%w>f0C%%c zLe8r)D5njhebL0pE+annXM_Tt<@v7=5M=Q}!5g=N5*sCWD*320jfpT1gNfTAVF2;( zCGWbjR~{;0I4*Kp#C)$DR;dZiw9B9wA|X- zvbJ|yMkMq_=#2xBi&~d6NwFY|ziWBt&nJ#+fd*l`5h%*YNTq}M0UpSvv`3PYuxNup zQde`C8+>dJ$f6*UKb2&Tc>jbgx&%hQW{(e!kzQwC8dJJ=~03mq) zcja3I{T)iu)YN_Hm|I1W1F*FzL0Riu#QUQZBtam!SH%MDbc9mx@+5{fFvnRKA%9;1 z)#K;J$@r4eH4xbcvptq!*qx^S>m@^EFAI_VgFr+VA83m(Gs14WwiIG2Ocg;IqXCL| z#IVTELYuzuBJ+PwV+=rutqi?2`Rv)V<3=R%*BQQ()iGvN@Z$6FQ6Pm=Ko7lKte&&? zmOKIx`o|g=^F?G)9^oLUrKTe${{eb*rM)`QZ0bK5M?t4d3!%}LvfU~Vf29+CX226v zz$`moPvO9l9k&igkdgi;916+q2Jn;CPq{cd&w|V!^3q>6Y~4U z70*YHfoD93+guN9$6JJq9~1=4OL?tC5dkVl>?J&Cc&@{4^F7y~o^KHfNuI_CJ>B5O2B?nA^~Hk1f~cEHqy#SQ8289(%GpP-c)EGAz0%=1HE5J{*=1;igDjm6T}Da zXT1|}DsFE061ljMat7(oWkwh+?CseDb(N(1b)HZ6eu@)Tw%j{prcaBYPP$* zrBk6sI0b|vR_e7I(Sxuj5@Eo_bKM^Uk6H@!Gx!6LXrOflRw(llB$0SOi72<$@7hgR z{40}|M1G4Y^G*@wVjCwX@4;we((w*QLnSCkCGtQ(?l7{=ix4g(hDAIZX*5IE(D}Dh zX2O87Wt~_G#H3bR^Kx@BUfk=k58tf;^q~=^ZPaYG#Iczacn7+o7c#hWW@`pdBFbeW zZ>9kq&R~St{d*TFYdM3Blg}!!Z~^J>Qj8wP7qT7Dyh4@42n=(glp%KvK|QFTyceiBu|jsup|w&!fZVbJX{N z$MVOAr%GVZ@Lz#ti#(ouX#!OhD#VnqN<*bm)=LRwRw^6|7U9o|d7pn{0X8pm!UG+5 zvAvU28TxR9f6tB;Ikph8)lX&ChS?F!wim5<{yGVUiQD11Nxm> zY7q>+H_+j#{xMJ7A8<-X`EDVQT+IGs=}nKpoO=l6SUC#N-AHPDLptdrd5x_wp>5qi2y8hJT&Kf)APNyv)0V36)k_-|`}qv3I1C z`4!C$hd{|G9YXuoDEB)baQd3Deiv;x-&!`hP>&&#Gg%!b*-a`Psc-b26MLETkWo{a zMviG?`lFvZ@mT9I%66=b4*N=XIMZJn9x%&s!i6YQ7gXX%6CoI+&k5pF7ZJNnJ$nZ# zSHbVQqouoqFg}caP(n%SkI`svs<`sFHGzuyEuGk_3)8{ll9BrypxQO_kBl{pVvvhDzmPgC-FW}ET7KAKwOCjhEPXk@ zkDxIPKSuo>)v=)KQR!t;-LZ#*z!ew<^i6kyNTeBEg|h2-7Eds&{chWgF}iefvft_} z1x??-iiZriFW<&{W7Pd5x13&E%bADjAs-Q+ZKi%pU>*KK#z&Cf6aaCkcu-8#IMRiv zAOiSVv>i3e7-vsNhvG%N^npTjDF?yb2!32n>=^Vu=O8LxS_2~mIfNn@> z=v{$W_uX*cOYgW!3B3`8PZ53BWV z9{|-Nfb4R3LqfYH0#)hK0$L_yuAmBOw8N4K;ZV;9N$d);lG}=+FV?wkecut0#$n+| z#D1t;Fx;6*(hc>2B$Mp?r%v|QA1{TsPFKG`BTZO7GeAGax;bJWAdUyhD#Tuc*kPz{j5m!9#rCk2DUC0sv8BDbi>_POf%+IIz?8`qeBQ~& z9h|r+c_I8dL3X9VLsMWaq#TruE&9s<3L_gJpjHX}fJto!{G%N9{5p?EDhhV;5M-&z z7>$WgTH_1>S$c*y5y+^RjUdQ7UIXMBm3}zS(Y?N&RY-yfw>wlagZ!@%67WFpQ>3K` zBzVze-Gezb$LaMPoMKK0k%+j418H`9vByva!of}Bmc*1Hb~3pPWNe-LPFuVFo5ET) zjL)uhC-VnWVg$9k^rz4-J+bKBt=T)#ZJVRNvZ|v6Oe=+vqix$ievz%zTw><2{if-W zdpV+?QAZ`rPKQ~~m+1w`)qP6aso62pPQ;Y#Y4Tk^hDEA7S+>H1{xvQ}x z_Uk-=_uQui>P^HH9D)VZQPD0;W-9|=akDwVxB{PZ0yG&O3T5LzzR8$z@G zZNdZG#ZahBOItk7icC$_ed1^2eA=h@<@P$ErMMc;ER9H0F+|_#mHd(9S1$733lw@W z&yToWD7RjkRqr9LA49NQr*EjG;#@U*>>q!*SlkJy_(OS6N1=d=&>r>~BA>lDzp#*E z_0;J3mTJbCue2gYBBQ9A4vXCs{IpcfVIdT5*Oex_jRh1&LQE+vZ#A=;)|9MGNHu(# zUyWl}xl*h#+Te#VAsd#t;#r&yL>-kXPVG%^ugE>H>f*EJFQCbeh;GA_1aa4RaW|*3h)^h1 z-#^;niuyeA7!9~cD4U-f_QVP}G45iEU@RyDmMLpf(K9|xAyF4p&Q(Sub!mq36 z;xJmETr8lcRM1Rv4%N3?8#jm|`^lH#xx#pNF*-?&w53km`XY?9b$#j3`!l0}&+ip# z={$qDDj`w2Tjf}UN$0tTBO163s9`oLdf@MMN|(9T)ZzL@f9E}lA=q^m*eJR>p#mE<@4qgk`pSz}Mln68=9S5bRYjv8 zG0Qb`$^~Yg%J{`C_J%I01V*34kF!_vSC+YUctxjncSa3A`+ePQ7~TAmiY+&dS3l@# zc2yY(d-t&4+DIU$=x%KCWnUGkfM>k3s`pTF@h4==%{OWFu8hC742<8lS;2%Q*y?kWa0wWbw;Pt$5G2OU+(DdNqF)B{1X& zK^*I~2$ZriuP-7PMlLh!3~@*S{YgKA9OX4O{g-VuHb3ba(i`g(9kA5)Gcg z#NuY(2c!+KTPrPuQ9l{JqloEBkZDDxTjkczae^RYD}6qLptelzzNo`XhAvj|j5`sR z#5)#Eh+nbnb^ScQ=vl@&zzvLulT4BG(muZiqO@77=JaSu*e1)e%#;8H3|j~1ny zewW7!+2##^(C$N9C`H09s2|*B$cu%ft_+kw#!-@>DjY>+Bw5X5-)aP*XI`_aqT|tS zk@Kw|D#Mn7g*`e$AJFn>bT^+_ZFJpcufrJqVYe5qb^G{D6`w~*S=R~AF+p0(fH%7I zyNA?xN+ZcW%HChopS-0il=W-7=PT3R_fm%Z4##bFTBn!#vad5g(CK4^zi*1Xyy52T znqs}iYxgDEx|_FVaxHprZ{w@d>d{ek`_=(p$!f*g+JWi(*h3pm*Rs5MSEA#m_d7Q^ zWBP9!m*jNX)T~tLJKelXbeL0D?$j|}MnZDl-vnPZR3$yNKTtg~*I6{%c)6(T4auQY zGybivQS5tHw*%rLjBd7GxP*j)BYfuc+Z#-Kdd1DuOXKwJ?kkpRA0+%A4d7e|fgO^< zWx6S7%Ba~2WRpuKxgHL2Zal;HO4_=PhgI${u7O66q*%e2p zFQX6CbW{$m4u3Os!d~uFv&q6N+Wwej{xxVRe0Df9OZ6&`VSGV??R-}U|60k-rGfKi zSO*ga?UNb@N-Ney!`pA=Hz26gXC`M2x}4wIpqDXq}WNe6lP4YP@~wc5I~`zGHEN4AfsS9o(Ji^Jbm zRjcQz-lpN_Q|EP_%?&fx%l4l%@52^4j^Cte&u|=Gdf-HVJ>z2an}1%<>5CFhgqQSI z#!|Npa|{UU@&~_WBB=YJ{Zd>Y6rPpY*3!~)wZxRn?epy`NttKQiv4|m!eq)w$VpO3 zrOb~}5wi2qIQE4PQ_w~-tlnl4$lRqp_xxYyt2Kx7@nfP7VLVky{_257g3jl_jJ^eN z?X3d_pVdK>tADK_xX49D;rNiQg6^RP6qB4}fL<7bE9i=qvAv}WE!=`#70>g3J#xgZ zKt`MdWn%`&9FMS&~;alpm zI{VvB;TVDiG6pCi#!et7Sz&53v_NjVq+AJs_{l04bQV%O+~Q#O9T6bGo28WwSn2m%Da_PFdXN z{1w~k-IbEbr;v5YPeCb*sOZV(8{Osb-wW+NH6XQ(u^lmzr+wDNyO0Vp zd_Q*UxF(P1!td2ryNy{k-@g9r?g5m*pY59@K@AMi-vReO)mpr@)CW!K9njh`f{J^< zUHBFQy!rSH^Ezho2~nY=oCMWMzr!F8O}P4pN;4#-O0ZFx5 z_NaC2CQKc2V-c}YF5f!XvKiD(IXS2~>3zrgXmeQ(WbbWagvWrxBgGx?&s{-^1J`WR+?yWy`8viUxgZa-g z&VoxQe5i98APB$0Xk=H+x44mGLBXNu_{HZ+w3^6gg7cx!9{dn2e~a$hfG3K~=R&u+ zPz{iNx|=_^=-Jd+oP~{;r&+O93lxo4#S!=w4Utu$qKEW29TN$ho~-NDd(B(3@x&9${6CjpO<5saI4Ro~{l1G-I*bK-}oKNH*< zULqa6%x_4<@g&ehDDl2;GkrociIXh%n1xrmdR97Y`Wm1$jImuFdYiH0b~r;?zL*@v z|Jl|xV4w_iv3MB5P1sjK{^bemgCBzPC^CmAzv6HYO}Nd6p)-QN7N9uUC58|-Ha7hO zI#@&u%lJi=dUhFi3zEUQT(^yk%BoG%t11WCI-F!@fWj;SO3Gh5^$~g44qf5U{6IoR zyys*==f^nvX$*xzmz{nMzMQE!#$u&?NUw=^=j%T)lER2noh z=E_eXV;KN5LQTo2i$xe}Lr=WDNJxqQY?3Fe0hCy)HoCI? z@tk`#Iw`BDEct^wW@U2(5P*`%WQ&|erXDZB4BU7)edE_#&>)NT2r^AK&Cm-_TP{6- zT;mB~GMdXdmJi@WX4O9o5IKWGv0E!u_><$q9}gz|4C7S;ivi6?Qp{_g8w@;0YEYiu zy4DS8XFu3fdl?2g_4cJJhBup`_+ zG?LPj7r57H@e@}Lxn_$sFuwJr1*c?WUFBPdwuXh{xKxVqE0Ep9B~zR(J6%>}ov719 zv@uSrWm8b5c6O|Kj` zFHHq1cc@u!vOi>zS8VA#f1B9 z(;K@Ct~~9G{n)RNfF7NaL6I$^Qm;Qh2PES&2ujp=NnO8_djusy&sa8G z$I5qib)2|>#zVQ|+1Sg|aAf2>4sFsA#hKHe%Fk1>EAE5J^W!K~toXV(V$!6boFON8%X4rj!yH^wKU2wAPIg)qJ^~Y_9>w2MvFg;{Glw0@1_GX9^=qktL=sGSYm3?$XV43KYDXC%~ zo%8sWu(slpDavGXXPrie?vS?v)S(|C(Z_eR@_pvbAdz8o0z*!vGH3|Yr=CG3z=^e} z#|SXWeN7^uRWjjdteG{}7TOEofb*YlOMWl7TZl)!wajjN`Z#dbliagn_d%4TVc(2g zSudk#`F)gGxisu7tcq$-$jn!K^G{W!@dgBhepmPY31FEqb?nA%9IDx#5`R#N4KExaSK2rfyBH|3+RG-=acVR8eArfWiEVWuM6A=;JI#@1< z!IuR#1q*qv_6+C!#gwO?;zhLfS)cqm0BtGk#5$}rq~F=LRC+Ds zPHHXyJ!2hjm;1Fa-Q5w!@!CHZEO;3)VZvOwUY{yP#pi1dfdfFtLa-&}!hc7~3=kUY zMPTgzeho0`lnda*s^k4{S7F40)r*R+f2EZsP89;4dU-$(uV2d5UyT?4#bB^%!M z*+2i_^fK0c;f*U$zcTx^`2M`^|G(ep|8DMoZ|*+}?0?Jh|J^-jVCDgRZXhBMAEt~M=zl;A0y#Hf>JX*l#-zb@G zqdY(K>I3uw6=3*a00uR;fvUoLCmzDRbjW<5C!YxrUcQVs%ff-Ch2!78*K+klqS_ky`02#7X2vD?KdT1P`fS(3fyToVp z&fihhsey$xo#+paZ$!3e5ABvg2O$>dM`1X5HmPHeTQp!n?i)d`o=LbT0`y37kSg>; z@R9@gFoXU{NEODN)~Y~`)^uz%QS{#~<&YUJdXGrf<5&UouVEghvVc1X|9rqLpupu@ zEfq72buj3WfG|>`3UW7~vwz3$lklfdrKqo>39k9+n-#QyCN>~+BrePa))KSH1g zN!$)C@5bvl={<1zn(zWN(zypE1%Q4CDhrdtcCD|nf)Le^XS>6O6%*XI{<+^@%fWpV z9G+RXaAfgG&mES`z%q1Y(I*!KJO{8^n$kGVsW0kVAu{nEj*e+`x&9S(> zMU{n+%nF%W-fOFK`?J#D&)tIu6H0TJg3~vz5VAh>24(hHolhVWhIRcG2YE^{mE%Q| zf>lHz5K=zt$g7}K30mLpz8a(n1t0?s_%mYBK&G6jK}3;O4W+ENIv+XF$t*5-z<4K5 z&aFW}9fa%JMgaK2H$oDnTAO=;|%JZ$go7Owo8C;^Gst zRA~OjmB$3R6bUbm7R@MInOwj`Lg~?tH}Su4i8g7Ez4j{})D%MqHJ%>HO+^^Se#TSa z|8hvz&sBJFT(c6S0AQ@^%GRn9(x~PU#8e$i<4}{!hG&urWj~S?)^Sm?B2?h=NrLv& z6Bt*zd$y?C2x2`IrzsLr-J6QHirSi_*A|b*ql(=22=E!y?l#G&Njn~JKw{yG*K+Kv zVJSG%$~bocqUrK>@qlSFL9yaQunTj%TXaR4j)!?D0{XguzS6Ec(wgoB( zN>C9HQOQZNh?27rC?w~MA~{J;5*1Mt5GaCVlptAh4x)f$DI`aU7Fj|O3+}Am-F^1n zXP@t$mwTViLqBx4u3GC~|C;}tV~#QA7`jjASF(Kem8*mnHw82vDYH39&U}uR$3;ni zlKYFz6dYXNPB8lWvs?#0a+&?O9&nH^az~GOa|8xNCNyl3b5sc*m%fpC51AJQn?fB? zB_DjqN3`h%WXl2-P%ewlAj$^EHWKo_4^F~XX+}HHd*sCO=)Q;Er?@1L(eNKeo}Pbx zjq!;99%;0F<HWa*YNPmd)Hv}>{9JKW}}0TJ(V!$?uh^o(xHWZ zBfpZKHHeBchLYQKcl@ak63j%5Z;9Y%?tWBb$DcS4iEoQU`U4ooYMBO9gMvSx>F(C1y7XVnTmq8kh4%_ zu=T{poR?$AEnB=@OT0a9*@R|@udKlqcLms4q%53HTnh>vV(Yep{*hgWYV_5t54l?T zC_tB2uu54vWY2!ipCpq=#XET_edyPvbNJc3CMl!ZMxH?`6G2v2Rh;mWu9C~0gkKPd zd;B7<9Dcz{eET<@2V5jEG+f-}7o+%3-tpI>Z)inP!zLK)&ZM$?SbDg$NA+lwvQ)N| z&pvCN&lPwfLxjuDG>2z4_d>_{W5gQ_#g#5{RXL*c)~S_*N5YyzywA%pI>GOJ%Lg~Z z26(tVyB3KdW70?-mEyWI1DPy&f@xN0{VzL@ z0ZUktj;GUINDv821HwpNBM`4v+(-aTHk5?wIf(^qo@!OqK;JfM0eSt zz)hmoW|@Nb$R^ztbNXNe4pCkhA_4yPY9N-A7`o$LBLGL(xX;e`jVwaaiT!Q-voPLEg}?z@4XjcXjYmvjuF;8f%sPw~fp;|I`;^;l(*k~G%E%-sB9 zYUYpn{-+R4GeKyUjdQ-2)CqqRCey}3c}w{G%UkgGPL+y`!}E&VQag)J^c+{u$}qmI zhL@i$UO~+84o6t1b&MS7&3@KE`)*Kt4Q2*=rs0_-oo_B_>+|~P z-vhUXy)<9r<5(gXU}ZxGCl}C#?CY$`;Q=yjeSLQp&Wq&W){Tsa8#z{zWk#T;3y&Rb z5nGf|19w|3jmIC>0F2Xk80+xj99<45&sud9_Q%YX06b9rGbC(Xu2f)QQ*k0rHYb1i z4E`GJdDFZ=Dg=L%BMm+gEri}zrwIan;4qc(D#f91fTX7b({~9_-d^aeXcj$ceqI*M zMKzi^RxaueIo~#5b~yl7XWtM|4tplkhPB%Jy};mlS2i*1f@r}@92;dv{2tj;CWyTg z0)*Ne(fLkAO8HKXYT^`!L46}-7)fK_L#2^*2lSNku%YB8MfL-#9`~ek^mj}S5LQbh z=nB7J`Ji;^vWPJ5#7os*g3}8x%GGR$OwA*6)cRjj^x0F`u&E%Jg%p|IK8AH=eU^3G z`E%bm$VUocH8PssM&3BJ3BE+cS#@l;5@a}uzwHU|TWr(1`qq>PJ*{9`3hD6ktCI5l zWx6AFucICyT_=ZXowN&S*!gDEu6-3>{m0iXOu6MO8e8*b3|o;}J>SMF3K$U0O7ag< zP8)L44Mn++XKBUSAIm-(QGAq7fRO*1$w?52+trWPH7u{~cI@9FlT42_^5Ud=fkg5H z-P#aR=mm{hR!d3_HayXkt71oWA0`ry&4=9i@*T71B_(?5lzNK`|Jak)Fe)M6k0wuM zG&_)U-1|0{+wnp$8e^|)l=}EqBq3st3c4)PQo0YdLeC`WVO#JAWeJHG&qLDsm6E(n zro_BUz-~k!^dCW0buy(u_zjJ)5|Fc1;iM1*owbXZTze%kGO{Vrf~7>eODmTtzjUW@ zB5#cn4p0VP+jlZ;b-Jd&w89*?$sI5AtV~?-J;{w^CfHgxr$Z*m- zA7OY=6*k1k_udp6b@4KsN^dou$w=YnsZ962M3_Kff#Ve7_*3~h57|vv8#?3o?;Jx5 zq0Gm7if(5nOk4@1bsCTbmNbDQhma-E)%StQ<+{86aG{!l4e@Kjd#yhs)+qi^>3HA~ z$YZu}{aaXu9Q}#{8|=F2mx~;dyHn4+rUFj8K$^!OsKcz{wp#fXw=3rvUO{vOPLC_Z)O5}ZZB!(^uri5nLQ%`H=ZtY1UV7Na67 zGU_JfE?&DJc5S`P$wDF4Jr+T?^mvQ~6(5f8E#T82r4qdPCHv!JcHjOQMX`f;nL5!9 zVPm;tY}VG7Oc_hljNNRt6?3)FNeo)|m=C?#hN^ccSvxY+L@Nu+XZ43Ynv$(@IVMA) zde!Eg&=o0lVpwHVi*kBW!b#zyg7_aBX6VgVD8E)j1NMy8?@4)W3FdxVr1D9^b>jTq zeUTkD^it1BXS2`9AcN)(gsVL=Bs#`iD#=IImm6v8JKu24E|G6#_+lKN_}PN5R%&}C z86&*uH$O;ZgvxMA_55@wgX2Crs{Q;oHrU*E$fFx)+oJm4-y)U6=Ien$C-3x@io`#6tFk4xQepT{}3Mw3%_IaXoToD$IRuu$MxA>=?*~*dPp};s`n=kLj^UbbJ9_lH6H$ z0Yd%9xG&|(6%M*3H`0J>RoXcW9&{qw!<%IS1vaa7@0^T0oJ2WIUut9`cHNB{B*f#| zJe6ZFN9L2zxkS6Lu1k+Jh2-znTC5h`KR>)9?O~uNd9&DNv!=Z%(>nIKUq!0a=aKq2 z<}TlBYm%=wmXwVGP&sJS@K&couG zq<$liplw6@&3bqJXR%-AU3QaPC47aKhP4ajugWsO&9K*E~ zi*XG0rn2PC_ij}j_4s>)2+?QhCb>}MK6X`0%)})|-z@PnOt#DM=W~up9XgvLBl4Na zM3)y5>DV2(eFDv<%=Ut5-0K;1<^=L|C!WE9jjmUIbmH4gOtvI`ZXSu65*LeWLbTx> ziKltxY<{uxbys|PoK7K9lq02?X^&jBlLZaA6oU#9Hk1?AO;Z?-+C}Xk@9;VO`eQ!GrBC+2JPM zFjuCD=Ss|=OV3C0S<;W`wM=42FBg`rxJK_o!w^&Yc~*v zZo_5~<6ku&;vZwz{F2UkY;t`zX)_8fwP^C?U+K#t3%o`Ii+W9J=~0*OlX9g}W82#y zLASS4I*!=p(euhnuQHV_7mkv|=02+z7uc$11+}@1gz zuMeS+U$UXt%Sf2?fhW{KlG+W>SJyj9v%`Ox-KLB z679Sr>W#H)quk$`7`csoITXElYG^Rd#oJ3i1EPF(mX76T)`-cT7kKE1tzUyntK~b?(pgT@7GhUv$E3bKTM9k5U^}msXzk9x z29A3{-OZ|`L(V1*{?N!)q^_WW@7`v=pv}F%XNHnt(77+gd8B;Ga(bLHNir(FE(UAe z;*M?TTU)07B+*ViTK}CPdH>FUyOvay+Jc4%`*OQ6*>O%XrPrbZ!x3+f*hKXv@3`+n zse|~hia{aFkPAu|QKGyQhI~ zq?`6QX#&i}pnl!TJJKIAW3SUKI$N0a;N3L*vXUN%&cR7!s<%#6e$L17Nf3%k+u$=v z#wAI+ph!FGWNl99wwCDb0NC6K|J7&4RUaOpFwSm`S|pmkaN}n?nQIsKpvWYNkxf>~ z_`zyj_h5m<0m5f%s&{7w*!zKn^}}Uu%H2*=Q+WNp#pOazsgJ~7*3Dyjx{u!se$USG z$4}!%E-#els}yS|ZF1$Qa51`tjq0XdcxLg7gv~pst10!cJu?X99gt$9WQC1)X$exZ zTvsc9;WAnyyY_9jCZp>${@_r(oS*L7o$Y~%1HZO9;))=Sa(phM!pgb9Q4d1I@AV=o zSqJmA-YnA~tMW&kTK5SRuEX!;1shU-dA2di>oz@lwsq(WB=T;P602~2z^neoC!LG@TRkO9+Z2kKPs-w7LlZ$5}E}rW-@`a@u zJU7`TMaxPO+L&S1{dV-fm#<%?q z({RbF-s5&Sdfgz7?gUxN((X)&-|!pjM#uU%^$C(w@teqs#9L~g-0(93MO$Z96ANp7 z#;~S_Mw=TFx%SdIbrbtN`rSJ>7R@Y|4Nc2Tu=s|Dk1+Jtj(;7Jjk_EvdQLbLb`0A% ze=gn*%FofMel(CGeEdmwu-+?Q%eU^vl1=%o%9Vx-)oD9|WrdDfT+w3L8WKbwS?&jX zNZN?e{xq9$Y;}I-t)w$efZ7>#@+%ygIoxHJ5@p3{VVO%3uGzdqv_;KS-$i@ zU$nHect&yxQT}c>mpZ6ph^t8Mb&$?w2U@H(P|^fhs>5VBnXg{E&1eqzK^)rfLg>lA zxB#fl797Y5Kmv|EIlygZuQ1e-UykQ&GzZD_Kz_x5d`K& zK~fM{2Icy~m9<>EhW9^H&S($z8kXdgN6UT~2}hOrRSGTH9QaOTSx9?m`6WeQ_k6H1 zgRVPs?n~Sj@L;?0p)XoxUDPiR1x!W=nAgAkxEK$m;5m!S)k&?gTpzi~Y)Hma`m$Ojg-7C9#N>OjP6vx_g2N$D(^py6gb{q_B~x#ziW?PzN(fTf~wWd zVDsxKXf+bg^OK#Tx=oW3{UsP53fknZhLv&mF-KHc&+|3I?T+w(1SLO9dR{pa6yQfYZ^!lqlCG4QX#Al2MD-v=Vqd->Y6>v7Ou*h|_?Jpj+l-=Z)M{Qi z=i4>{Nq)H-GR?KPrR^aaP$8&&Jlci0ZN3m;`o`RtoxI2}IQTwCTz^3vMnbimYY zEXb`)#E>kSy3!4?oVAM-=P6#B6V?`OeWK`)joC&egt zbj1snIn5!cK;cS>Em4Zo1}bNw`a-$w@bo+1j7PFKz`TENX<=#pp)j|zg}D|x&vMmz zC#j%Kf1ZT+z)U>-Vt;9@lGM*xiH76ma=t{#$gBC^4qO^A)MbM=d6Rq5870a-r6J4o z-fY)Ob&SOfQSyK#bO>$QWAtuz9X>K25-bnN8Fa1tF;{$T^;V{1NAD?w)dQ-XG)j(p za`BsGB}5L+*GjHW#j|JVi7JzbQw#}=Xyx8j;~I?8Ba5`Xhm0mWIHff>tIhLG^yseD zuBuT*p@IeL%RY~ zyexjG%QIk&M_!(apQnbl=gtYx&RI4%1VdHu^MGkMpFQ?$ZO(La}*!-oNVm#NJgHp~mZ4!^PxJsBO< zAwSr_p-EJG!yxq$cV*@d37I~RT*=_!$dO_E=KIncvklLd1&eG_V$NY{eJK*`L?))r zp#$G)<`nHeKle?GYuLKryHD=z1_yICr4Gpkw)WHb6~%&nHT!ZVWzqR9VtO)EwB9T8 zsJ)6)x9?@q5+`2T>1zL`%{g3`)gS4MNFt%KjF0rZ8K8Ef%!)K%7We^dgW_y@q(`m3 zJFDaUg~rm#Z_tqn*Ln~40?zW<^ykbitIuZi%Mb8vE&_tk*<;lEfmI0*5y$c$&=$s# z6nGQ;uu$cGs?bidO-Xhbl&`s&F`b2&>xogyr5jS`L_NamjcE=T%n6g@GL$b?pS5*h zLztv|4BQ_|mLJt7JQ^ucjf&>7RXHwk08Tf>66{*+%s`5;IKX>w z$%xcG8I%RX@+L-H^JQQl{m%H8H5cDDzvlk!R3z8UHW8F6kV)!v*;Sl+l4Ho_%pi%; zNH%PVS%poAJ|g&f{Rk+(*+AwI^H@*C_0UMDxW1|=*jDJoxqw>K>SxnfmUz&B2S`NP zjh<~H__O+yr41qF>4#44uk5IW+D8|9d?8+_M^&GfD}bhvQ{5eXmjCJJM2-MXv;mXX zD`0{LBpF&$-rh+JTvGI*7ONTH(PDeEwZeO$vqv6ArnufzM3rK^3sUjs%&_H^!F#F5 zmZ+ZZKDb`}#084~xg7^H;FB1$!7DFb%E#+X*;luOG8|`yPUHz=4+)H|%Nn(>Q3T89 zm-zt%W3DS@gVr#xtGM2M*<+dIJLUJF(j4GF=VvB-&Wyu#)hH3Ev&oXYEN|O!EklW# zH@xGpK~PxYmyUwqc3JJ5U`SS{J!*0ElQZ( zwjuz;B_&?*7cT3uESp{+OrntD6JUftc!6=s7<#Qc^U(8@^d8hNSMPoy9PS`|W8y8> z;A@AZAM*lI}Ay!v=U6SINN_FlFUXKI~#{epPOdC@$Qty$fyM=6#n48EK)Y zK=f4~K81<*5reX+7pgSd4fpm~(O>F+(`jMF(-qeKhigg)Krcazy^r=PO`29n97LD_8!Fiy7WrpL&{z*;S$$CTRzWe}FkS$>7FTJ3#U*GY6I^d1Z zmZErR{2t?7#!DQ-lLuSqXNnZ?$dNA<)rrz0V8yN|3f)l@hc{T7u<`ZG5I!4ejF`)> zRFK24W|A}XkATrk9Sr$Wdiywlf~^Xks%<+{kYE~!br+Poek}rY*Zn$|T!!%mT;f|| zB?+E1#K}F!0g0gnpnQNks-99$OBh}5n>T03FgHXEL4=qHkpGN8IUMy#&fTJIR&iRz z1c8M#QtOQ;uUt1daf66)iUsdrtbU`NyY~J{QzD!&IN4Hq0Pc1l+_>{VA9uS+`@3Wk zy9$R^SxK2VrU+P0|91E~BWMqNJ75(=*KG_ygmzt<7@v_;>RMHNcjF#-oDTxQZ z>GO=2sqi_ufXPwiywEPy3dx5;ud0``4gq2!k=iTBe-f7IQ0mzhL%<B}sgG>Q6mZ>vta<91g&U16a_yRUnd0B;H^qs0HAC zHb^&>jEMC?UTOUFXQ1>`7zRrvD@0i3+2iwQXEydYvYS4GQT~Admym4;J3p)klP}q z^g2i`R3*K(892=OYF1=2*ajarR9bMR}PTX10 zHoi65_G((o$MGF4*Y9*0%tB4b=4EBKJ^Rh4(fJW+>ba9~%b8YW0Z6!U#2%-cpMM&6 zw3^w6)u~fsnH}w{J1IqSUMAkfIz*N$O!D6a;I9-naGh8Xpql{DzVM9hxJ(Ui`#|0fAAcy zfQqWe?__E#QM!L4Fa+};YAyn!RviSyY9@EH`3XpoBx)dndVf2})`yf3IKb=4C%BQd z!t7h$j^SM%rQDf()2QTE1N7T6@Vdb1fW1J<7#$JuGvO#47y~=w?kPCRFNkNYX1_&% zS$KrQzOw9}dXT?)q%Q#jRjXyqW@pCJ|JesZEcnoytLh;G@dB2wW`RM~%T@Q8_h6g; z5ma|xFbp7R9vo&0nC`sTbqb26@h$x~-NsulxV>!Qtoln4cHglOr^S7Eb&J!T@0PBl z{VaRry($5k-jNcE@IKh7`)wHI_kYu*|76|1?S?C$B@V8I`@oUy1ww1VhfM@o>~Oj~ zxy`2kdDiVRNN`F5HlYYgEEof}uopD+aAdT~alw(lRV#mTnHyc~F*0UgpNiT#N{VD4DXU22^ zF3GsB{N>+%2>#Vy8sfpBsKeHk|9-_MijedRdR_Tb-ulmf;7^wQhr$1FMH_gH1-*UO z|9#WI84qGpHotC~y8m!R1JD~dg(koF_m3SkP=2`d!g=DC;D5McJUq0`mcW~Te%gQB z)2=$)eahzj!vAo^K|lcwK4d-o4-1?EcYl;9>=pXouUL5%j;1S;XaBRVeGH3jMoZa6(IILyK(bonQ7~}g&yc;M&V*Q^SzK?=}}5*kYQ_r z(}QM?hCH0CxMbtgzxz&|2*8I3K?P*4D3*0lmMj8abO4<4ac=7QIz>&yk?(nN1Lh%d z(`fMZF?{;Fx%zOUL$mV9Ps`k&UjH5hPTh3sCM*ooR znpR$1x7ZR7r0DlR)T@pw+BN0N#oDwuhpln5+^vE42Ta4)P>K`+1v9wcv5A=dBY+6j z6SY-v4t0@Sb&#gmI`?lY!UA!J{vA=jtE4 z#$g<(5egtQARhgHNteWM(k0*=t7{v&YW{F_{@ax1uVWSs(oy8Kn=8*8!~Q7`@bc~w z9Q+7%+u!mfRon4uoXbJqzsi@uPPck4Mt^$ZD%HOqp;N$MW^3p%A=&bq{`v|hBucD{ z$kTc90}6t1$5uMA&CZ)qLQOzwUW9HIc+q?*1#W-VI7k?98QT9s!34IF^xn(-igDLJ zU(Tp&!hF9&mJOJ4LAVzPlX1vf$yy|pKHE0TXgSm!MjK-l=%=qFP$;0(8-7Da$qY3`D zkMctfD0ob0a;5iXqjd8PeD~&Z)U(16vDcEOVzTM+2sSAGs^0(-F!vdi%7R%Nr#kNi zn54P@Cnsv+HMU0B=6IEtODm^1BswQ@3uAR2kAQ;xEq1S9`e{p)Vg0CtG1T{3S#>TT z7)Qjub9)G008tYjGiMhxi!`_~i$*XuF!5oLT3bSiO+w@7ro|kwImU7KbNTe@-4%jM&}nVJMBS;pPH z@*cP1lS_H!x@kx$GFlczpzW)vRrO^ReV2?<6MR6@ZEpGTYcgWOtK}JY6bbdUJQ!z? z3n61-k+BX$nzAPFp;|-beU3qjM~ka!M`dWq9+W5Ki#RlD4JT=WuZ4p50hXu|lo+$g zKJDeUSyh7eV?(&ocs^z_J*fUj$Yy{?$#1iFw-bF{s&T~ZAYGQ-@rf4HB4ctlVBpOi+K)G&18JC4T+LO;bMF+sBy zDzuF47{k-`fOx18Ax_`(q8V(~HGzXkm&SgSo_G1(1q+j$JB+vRF`oH)WjX3KK4CC6 z?_FxlMSUI^iPT?954Dk>C&gD^z2WKQ&Meb$_5Y`-Ah-E-1DtfIUZ&R!i!9|9^mBX2jP4-4mr@aEB>pZcX~t)XtY6)^*N- z4UxkCM&LAjhx4M~Dqt_BP?69nyj)}X19Lv^PO|W_2dM#j`Cv2P3XcnGGrWtmCm`ab z!3XKo6h}&SG9Z%lm%uBTL;}#>`Gmp#pv%(BzDfbrn;f$86du8QPyvK!y;&k(4~L?_ zZ@#@mvP^yaBA-znu4q>h`#=DYqv3Gxw%a1je}Qt-;2Cwnq4(Po3vf$AxB($?!oiyV zEVmk%8CQ_X)=4Ut?wf-!)(faT%5YNFr0)Af&6JJ8x325J~6 z4l`iJVFt!T@`e~bTlfQlaT&%_aI2TTPrHEeP9kGAtXmU~62bUiilOjMBi9n`2@(0F zUq3z@hjlK2yjFg<_Pc+v(#z@LSu2@FKin;KT`sKs|3Jt@j3goYwgP%YC}%0iPT^;B zL)KF5obWv&^b-GVNa54~fEDoB-+EpiO%k2Z2k!WSPj{cVE)Vfjy_iMwgP_pM>lf`G znh>Wo5NG@k9V@W;<<+y?m~H#7782SyLDUtE`U5kDIV32HscWG~nKm|$_Y1@RcGdPVaEsLd;@F13WC?wQ9O#BMjUb{!?9}Z? z6KgDc-|zo!WzYooeT~cU?5($OodtQc65Zi;s;|_R^T4Io)Vu|NXlHE2Q3{HH7PF1M z#z<&R3cl~>M7GA9c7{yoZw|&htk~>O7lVg!4;Y7GB#yi>?_0MyN0n|)&5RuCDj~i< zYj0v4Rw&}QD0p6CJpUh#I;x9juis+1Q3x|XpN!}SKE0C*nkIZFq#~_|hfMG}1{6+c zGM6+_e>nK1fy>nL1*lTa5A+5dSEx`YdInU8uk}vg1h4$iaVo7a{-xdXwtXl2l97l6@^GnWy?76^>|Y41@kH5e9dCW|VAr*FTX*P$#>XRe3T z)cK%~%a4>7qf56=)OtEm;sf9 zOZENTWV`y^>DOMf5weME=XmOhG>A2rZ8C$g z-f^p53)iyL>k5`K>UsCQa-R|Y>iWqN+`l{55?|t_qyyyXq7Oi~wb%YG4xbH&$T|gj zDnN#fdXm@hdzwF$*DfYnJ);WiT=l~)JU(Dbm@howYT`?Q+1=-zKmVJlEy(czAoz2yig&T z{}1_DVlkfMfhi3fHK~ltkg;^CAH|~E4CYT z4(lbyrqpS~Zxw)gu_h$q3Y0j-c-K3n0g+g5D_jBwn`AlEDGP-8>67?rENT~|XNu`l z?h+t^eIr7KdOnKSZa8>LP=UTkJFgk7+`C+(?Zc?(&ZJUYL@UZXcerPl>r67*kn(=Z zDX{UhJBl*-a_ov*BmxBr~nvV#J5APxQ8uRsSzp7iDK2h`PKh zwRub`0g*2edxFl06{9iI{|=UmC+&vL?>2XbTNO5#|G!ee6SNg&R8!s_a2Y z35IgpO*`&RdXFi*aj=V)JsSgRHsR0Z$Gw|xrEi+%7CTMokx91a8mNk9SUN?b#ETx0}Q~4AN#asvekaTaPGY z#hzP!c9BQl-L8K(V@A*q`xI-_)ljT;f^eyNW0||ZL|!>bpVM9Euq)X?VAl9f>Ek|% z;an@N)tYIk(ET0`#p9>dC0S8aH5EQzF#8?Re#A6)l$u?|2he+$du)Eb2-4fzZqNIz z!$5C;_-W*f-5t<+$@2ZB0BhEb5QE|cTwRB<=C~a&B@fZ!sF0m>?4(6QvAJ}Q(lx&R zN{Mv^&md_8wR;DhhV8+F{WTq{g?Vsv{QT(jd1OJPx7KoLzO)&^VuvoqHtI-Nb)@$0 zzOP{_Ujud-^_v~R#*K&_V*#>St;HzDVSm}jYG7H(gD)6|Qb6KY-DA8k zD$)+yu*LDKNj_agUn)HIbPhI+V}-h7mqkBGkKWXo7RWvvR$IA`ll-2}9gv46%b8K}B&BO{f zJaq^4Oq|CXk1f_DQa|LB3mM=@$1mfTF;J{Z4lp$i{U<|(^CP7F*e*Qe6 zYRF5uLe$aL1jD+F!1<8%3+Hc>(Pivlj1SJa&v##!H@~qm@op_I`d*x;&FI!kTZSE4ZW999+$vOF-b%^uHKDJWmf8QBp>ZV3F_sc_k+0ORR zk_DBaWKnh(rWP-S{}O6Q_>ifSHhVpLf&D5whgA^kUcF2JzKG{WI;pr*HiB`AAVa|q zpY)Oc>7M5?v-Ck^neoprt$fEQBJAt->ov69#?2-5pJ(r#!x*$1(_7j)yXX+au5s&z zjfqabe#x$C>VNnXV{u#r#Izs2%E@1sYt&Wr`p86TCnDr1Qdl0T|I!~=n-n4AS+*pf zpwP#von!REKW{602`RWb?QF^W$tv#Nx0azPxZPpg8!eB6L(oYf>1oeT zTD;Mm(y!0iND$qQWy1PgAsasNJrD0}j&yxF{gWYX;n?%H@`Z!ixGzg)(?et}?``@E z<^|Q}wl-S%61ZPQ-gGnVJe$j6+2{7)&Zus;Pj|i5)>)}+E9=$5bL3IgolB8p7_GI< zZwcy^c~{x5atxEmFEUGFKiq2nFwoi#V=n2(Pq#GKWXn0R5>kzx+{mEgBuA!?d>ri= zaKrEs9Niov=X>Bhv3lyc4S}ENB3u1O$wb;R)@iMUt{#TpqmraUn7R*IJey;j9Qli; zIm}LGkBLr+*_G;?=eP2@(|YfQDE)5XUM7NLvB=)w^h+CgMo;_;K!*8x`3gP==I#P0@g3}k-&}f_&AQH(4sA(jmy(EhsHj_?ONH7B zQJC=~n{I7;8ZGiXL4c6jZ9_76li&vz+X_PeA-t4L13T@fW%Jxy#uLV#y88q^<5M=s zHse$GoGyHBrlBtKSbOwlNTi7elZP%pRc4%TXLdg6WYQtGB!005Rx(4Ww|5ni(o%tvafRy24%xhH3j%sD7#7{tGknWybBJ=cC3 zX+|i{Se?PQS@rlcjl({Tj(t6xWyADUEgOogbny6 ztH~q}1~e$2u$ayZDt*c4PZ1x2va#?-hvu15|IC)7Vzhag-Md+IJVIRvn_#?}VEGam zt%2Rj>rcG8gD#zlk~x_zdd2i_KMhvze>^xRlP{5EaJtg7H@@yu8*gi7N?aI5zAVn| z<;%W@bs5G{HM@tdv~k-3uC`8{yxyve={$_T6($@E%P|XO9H}qR4L$PbNDw6SB#1Ui zj;ARE1}rkS+5~;^D)ynE`09^$@_U1y-iQqH)-F2;&+u1*Xb`H6EhDj%gR5N~VUmbC6j}-|`U2VJJT|as8Tm?2T1rD#Z_( z=Lrccyr6MKxA17vS|AB^A^HoKsn(LNhiG4iTb+}v`f7v2aVQIQ_|&X1J5yam;0*#M zi6$v-&UfSHLh%j5hoTHln06~EA!)z$O$$Mn9<1$@B<20*P zEt+)w;V8kJBzY8Odos9bm{fnoac1J|bF#DAhw7&ZGmz4$IjKK@e31?g7w8Dq_mOo> zq5EK2wvBXXiPj=O%-5aSYmaH5C|)R>)f8Lg^D!FLzsra|n|_X*ImZY8rB2#8w5UK` z5t`cjvf@=dCiyLC7g=?C@=u*j`|48|W0`86%n2#yBQtd0Z8FO)iWthGSku?WKxJvCs8g zcZw%`cqE9`IrPrJmd>6dOgzp|t<>PMlC`wtn4X?q-{`>jwQb)k{$Y%KTI9~S!r2sg zh>y6%Ij1Y_a&#VXhAZMDUiK_GQ?^3%HAKIf+83%@=#8$9OZC1Y_w9ciPRD*T4O)%>@Q7eJ|M{&6A6|`>MBT zg15*iZnaEwJwd(oQnOn^uql%w^o$3$PM<5h$^RXbAy!PRvz;9j%Ei%TM{KbBWuv+- zS!b~ZwWS}{l6_~Nqs-{henQRen`}#;tyKoWfYNu%(hp^YPwo`xPn5VHqTJXB*@9p| zs>QKF;~wi2Ev$FIZL~6UccAG5sO;_f5i@7E?ps+^Ns%iL1%7{d_0p5Og{NuA zK1uDCyuBv(rsliHJK8(W6{(=8JDI-p$m%<`HC0-Lxxy!+%k3I&k^M(VGJuN1MTHnF zwDFBV#l!cx_2FkfJ#L)e3xdWIu9@jD5z7{7Gx*& zIu+tK-gh^m4Kqc#9Nd3bc@oDhk2O_(d+4|n-1$TV8f0v*4EEQ)DxJb~D%H!EXf)PJQ=MmGE{cHn za~L-X7&Y)6|$|i@lpB@amMUy^Ke*k2ioQqFW;-gI8S}Q z(kHH#w*C8>`WlD(dW5O#re4+|yUG*1)a~i?L3I_iUC5*MOk1K^r-)#Wg{e%_-o&d) z;<blrLW!M84lh=>TC8n^u6HNfpZfJu*3537TxGcRFzMJ;4H zIj$s#0>_wspO~%B!b=SSTS`~dCQ*EfR}51ceCZvtroVeTI+RSYoUO{>uc%9-LOz-Z z`TABNX2dWiwD!JTNgPWj?@#xTR*Bvc5!vWjbGs;NZ=?(8rZsz>~ z3%`@y4A%t<>V3~u`gL4Ub!zyUrJ*u%1Aj<((yNg6*dJB%D`Pn`vvay9J1H}B{DuS@ zN1K#73vWHEc3L`>;p6CJ!VF?TyljG~h~H*Wh4N>!f2euiZBzYbKl;G)$!P|1@`mHA z?bS$i+2EH?Q8e7yw{FR&-MQ7)8yvDJ$7lwdGUJVYV*z-i6VAT81qf>q@J>@4;{ZR` zD=#&O)z8u3{MypO4E};4%0TbC2aZEe(5RC8@&3%$@7=w-+i5YZQz?Hhns%6?_w^T1 zd}~*=N4F!bLjxn74eTsNwML(k?-5FQ?XYBUmWi!O)xJ(9^?r2d*P3A5CsZ;pLR)5q zyrF&HvRvg*bXGZiz`&Cy`u^t3Jh!oE@oeM!bFV)wSD0A}=Oztz8h<#t@mgL@lr69? z)}~%yvimA!!|{}N>5+v^EpnA@vs#>NHTAniP-pZ58tIXji_N+EK~E3c9WSg}Se5!X zj7g?@yRm%tpF4^4`N4m~D;*__@G*X>`^h^1m1rOy<7)ImkIkB?PQYjGNr=p&z)0)6 zVPZBxY+9V6FnP2MX3ck5;TcFgp8oxlR)&Cflu5g@=jmj1Y_F_%nm~2AaA{zqKyb{} zUd)TFrU?C?PEz8x#mnmFv~Cn(kB|$wf$6t)-xpl{tnea>fUPJ$^p3rVO~ZhbLc9?+KOI zDIp~zQDSnJQd6%Evfz3SuLV1Hf=@_iCb5tc(mXLQhpD>}K5r0Y~4NZ}Ixw!tcVqV5{hd zBxpT6dYYC#=*Y0V)DNINh%qZ58aiZmkKE#0Mv2ojPaQVJ*{T@upW{mt9G_xqmbai4MiK4W}i=pMS&weI^D zGq3BKF=pC4yNzYPzHJ!n>#{^i4XYn*5Zy86)C%RXC*|LsYLD&Q+I>##!&}3Z8Fakh z;Ui`9!tYAq2(g!QXxV_ex!%HA(vWNXH(XlZ%s&1|t4t<0ln0eHG!^qiKS|K`A~Hey z#OTer-l!Ij()?6sDXUB+I}-Dm!f)5h-%wyR-Zjh<4PV=qJKbvhOb!3(Xl#mqC1wX7 z|0mX|cE%WP&D8ieIFYBu^?MBJhe~u_RnOO+4SVxBHf+A~RT&!;`oVB3=-OW146Q}v z^|s!JVx=q-D{ZMQ{`$S`CW`alesxMpwkMZY>(LQsWRM2xyU;(_r`>}ImiIbh@hYtc zV|{Dx^nc6Mybn?nJINHm5{yz|p(#QR20E-3%Su}2M9%Kl{e}VV5sP1xgGpqvm$xe_ zs~(-EA8reopTP|)N@Z4a70j%UCRB2^E#?iq16Lx0^D7%XDD7CMpjx6Wd3<9+fb?lD zp;*ce{jZgz3v=}lRDRYB37E_PYO8;z=HA7TF* zAH6NoFx?freWNe@q;y)0j3i7Eb-TE}zRqSeV6WUvxjs_oMh;-fCV+PqS8y7Xvq4Fn zp!T%Bko^tcPZ*G0^CJbiK%BmQTf+)*98va87qIDTo0^*Fj$kI>)Q8lENxspG-HjP> z&*(Xu&d^^Ly!OY)vTR0#sGlUT;(fT7R%&2{Rs!Ti)QyGVHYt3-y_-!4nsJ`|meHhknME@bk2_oq|euZ>t9 zouuM}-UFviq7V6bc|)3=Q*CW}7awj-ZMIU4ewzJ-^_|z@kXydC^ouJ$alwA|{3@#_ zdc47@c*)0>%c0k|7dF;1GN$h~@~L5nF+qkJuom{-3EFaR9&}#5a_bG+w{PF%&V5|; zrloy>ZA zvyg8?wJOOw)Pi@Cw)({94b@#;j|j`U2HPoL`*P*J9M!nX`@Wl*P->`hw-5$fmSGy50QKsc-*@aXMagy-JOWtN@H2{7p<7BM*X8P$&J|9w>0fv< z%Se}+8)1~CPn;=LbSFm*lFc7d^!I%>ii;zzv|kq_lI)$bt#~{$@PevcGa2>yaMIA$ zGh^_|#JpL|HF}S*a_L_RpTfToX)HNXy{Tw;=bibEv2w6bn!}{;{F@cwoOfKMZBb(L zIJwj>956Jpiz5ber%P44`a8$baQIKfnOmD5d7&$ObXr!i)Wp-RXI^_?7(xy?iJ`ob z%2%_0umCKcWodlUUM~sg>fR7jT2F0~Mn2+nrHLV4>lIO)(W}0ovai>o^*zjv{QOPh zT&BkkacI*Q#f2v4|GZR>x4m{ifF=kiJ{0_COAfheA(_GYDPTJxQL;*_K&h#YpJU0Z z>xGmYTrQnslWfP|)Qb6PKSM!B#@JISXYJ^g`)0l%R`;GApQ!0-iCNy1a!)>so?=XK zLiFoj&HLjjb&=C~)#cpi+~n7a{GK!$^dTLKEyoJU1^mG$>KBuP?!(Ls29-glCZ_$xj9 zs$K~-0sl4YN>!vI#8p?v_q)Q|BEc*n$*#`3Dm4On%ThqvTVMMrog(aVzS`k)7#o!$ z5uvK<<}XB`C|IbaYF@lOUiglal-n&A=OFIH zIv|N`+GAv7L?TY*BtE?B*RQje6AOO!(}M0W-AFt**%3-X(pqwMEauhHU#d(K*6?N{ zD!q1Yq_I@ypI*TH^KcTlV1k@oOz7D}85#G=L+6{v*|qZ6q%>t@Fp-%kBqdvma?qqJ z1^fYSWrY>L3jOxt+f32yjC*ri-}cd1kzKrihQ&NyI@`rG(G3r_@8EpNFatiAbWTkI zf2zTG`Lge`XK_DRRZ}EB_+n{2Y(GuFOaJ;+o%t(>j4`mWwVgiOvGXI1bx%RFR>QbT z2#OhfJZf)wx{9}ZXf+%&OX;8&ocb=gh(nl)12?kU%7~F&%nA^ zds7s!0IDV&z*QT4Z|~8dZ6`4P-~tnWkLeUchK841}pfUTjyL z4S!n*1^I@A7qYxpSz)PK$84NE9cdH4W9%I=t!)J@-o4*`rdAxUE#!?SI0UP7g^L#)$>d8M;DW5wRhtz#j1}ZVQu$KLWF^~$j8jxFm?kLT z!@}(*uMl5jfM3adNjmQ#<~T=)ie<}uQUUlHL@6)8_-QM1Py@iaIv(I$ppgN9MqSx; zm+K_z5tN=Fs!jnUy7ovkJc>YXck9;BOCQa8#o8`mG1E%S#xoJ3Ax|k0eaW;Di4xxw zx_C`2aIaU}&on`4HA>jUdeGityD$Yq zs!&eh+Vhwlkx%5qE^}BSwLgQ8R?W$yV zPngjrjWT9|jrrVIO^t_%6gJSKANpU4MAq%zkSOIAXrOD9Jy5w%B81(aEq|fgy5P(r z15;t^FwDC121LPJt5rX;b3I zGG6((zl-)G-#z68D~&>}{1%Y;VCT5R6XJN25H~Q9DM3i&=@#xrHE_D`(D@bDP_VPj zUuAX-#60o^1KOa#_>2y{2Hs})_t7bLli>_*1J8!RHKJ)lWLaqW)xIx#67&C58{qbfL0(g2Dd zToaJku-Uq1^5iF0cEFvPQ<#!vcYNS<1Ouc70DitNT8N77<_K8DeTc-r`WwY^IhoNZt=Y(vsmgZt|rIB#6&hU z>cKd6VX^i`g4`&e3pFu$F1=FTSl*nOTj`0Bm1*4HoE3iWx2fQbSu2qN5=E#I(H-Rt zjHP=O>fnLgq%OdpC^Xi=8dfK;7DsubynTt&u}TQP?r0+_uswS0Fr&?Qa5y!&jmz1d z$qYH>`a8<@thZMOqF=UES`8znaRM&Q*kEi8oQ_*WPXMW)Uj9%6dLEY~B_(sRNiZ1L zkb@KHElwQy$V8WI-|WGqm=eTzIALQv^r}8C27a)h+s+kG2!4(TWvJW=#riQ3^;CbW$1?>l%;%0c+Z7E@o#&Oo#s8Kh~IGed}{*8{Hx(u4AOTZv;uv z0kX7%>|2ktw2k!Bm9sP=6d(woZbZ_Mdw~=O4|&D^{DH{>AK{bYLEGvHgUbL2eF3bg zmIxjJftCz&;hl|b3USZco<<=tF=M-2$!FMmzz0^ZePU-)({ii`8}na38fRgj_C>H9 z?*J#5;)W%K^Xgb}W8{ep1Tqf?-!kK0yfVix!3u3=$hhLc6qxBxc9dZC{AS5F4X=_v9d(?{yCL23j#^v9Z{t%YlHC@O>86BM{rowtO7>*g6 zjw}o=6WA59Z%NrQ=eYfzFJ}lSFsK6PJZR>tE{EcEG5>L_=zP4vfv``|qEBH&;^zT{ zg^`{f9fnxy7DYk2{8F1$F-{ZawYpwTa@;;kefpi4Ly0ElpHdN=l0G^5vqV)MtOV8?d;I{E!wIO|||oW>`bU ztd71X1G6n*WJI;II$naS*ma&L9{4(8YiqXJMcQ~kRMTl@J^$aH^tW$C(klBDGV`Y| zytS#gjB6=4bze0?m<{~#Gn^NA9()P>U*nyx>A_SIB*CPEg6Kn|DE_K(Z6L3C7ajOX zR~6x&Je82ZKwKF)xhxn2^6JipeK^Fo|K)lky|2R;@;n$j8d?N>t-DZ0NaQsS*GYlE zUmAXDWJ+Pda9+yb#OWnM4GX|}28M-w0%e&uKndq00c8?-@I>k`9mQP5LF|uDN5jKB zi-cb>kB)Ib;>aiz#w!rH_0s>42Ht=KbhV;b)o3bl>|i`O;yxj6_y9wNI(Oiu1EJ}f z1&Czp@fhrX^E7{F8lDr#G^y*LACM~$eU>QP)YSAVj7T5_S zoSlD%IC**wj*iEkwv7L>{9lFPVV`zK(jEdggdPb*7}(k)87)DsLjsh|-D$RAq|$Z3 z)8?*5pDm0A)IA%_;QZwKP0$7(>5lalW*8z#Ea#Up@A$01PJ{t!kPYwj~vmd`u2 zaoGV??91$NbA9y+*vV&dsCmFF=tM8)y+4zA<^j zJqc}d(g&i7->omfykL0{#0sM!K4$$hQa}$to`r&pHP5dRlnsJl;1QeBadGCmiqgGE zdTD?hoyO8lUjB1oiLW6@34f&*c2s@3ETB@TA;j^^Mml`O37A26A#tda_}eYg{~1I> zOdW~j?Cv*2q$GxU+f6<(k)`M&bGITyRQXVU{N)U!V&WK2=w&N!?En+`{E34bs2!B9 zh!0KKPzqR2FWa1~(eYyc@o+ve@KTc>f9hh)cBjklZ_GqNkx1ZN6*X4QjfL}gcpu<; zL_+c|pI}e($L>HWAQ;Y`bgt5A7f^U<;W|kmo&FR)#ol*NVm<5R)16o-Gi zri2g>USd!=aM=KH9X8|Is>wNDECNDm>SC{a(do?QQ{WMD_FbU9=?5dTTpzL~{Yxl_ zK8}>`C|5iy5!O*DfVTlqf^GlNFyq3{YaR!VP`5z?(}Cwm6`KBsd2Z}RN={{d2Mkn1 z(K6~CNC}8a2t#P&;+%AN?epi)e}W8K@YjoEWMp#+6ykrZr8g}UFsJ3)w{+O-UmAi* zhx!P5^?c|@8Hr_fG8&rKBL^*#e{8!1#tBriq8sk@$lHBM6(&avWfX&~Euf!hT{Fk6 za#8GJVl*(*SrW<;TZ?R>|F!|F@Z|Ow7xTr`GGK8K!wUjEfq-EriaO_mU@D-VN2uCx zv_Y`{-`5;&nYRhDYUCDMDulqylma$(q=1bv6BE;33*MO}4tGdm0+2#97$)qs#H;@j z`bzY`=B?u?UPl*NV38jKrn8^V8JxU<@CI&Dv1d3Jbi=W`(nN>HKi+r$k3{o#-ia;& zl{_@2K@$3Zsyy>B#XaNB2j8@T7H&)Y--X)vzYDe8lct1!euR$_e8j}BvD`K=zQHiJ zkDh@6113Q`!BmMW7gZPgOIl;uwZ4KklE8g&{m&QJL#iyEDMo7kx%SZ!q{b7Vxb3L0uxt%YeuS@^U%5$n*z_1m zoVClHHar~?|DDFt*@6YY9~sFPw688zw!XUfEgMHJoa%+CFuOjm{-r<&2U|XZhMzm3 zC>_k6{(3I#WXGa?@(?KCM8N1y{m#!(j5zg&E5fzMJHzaQW4Zj`cbZ{t&lAoxCDQ7| zKkkae1$g0kdIIdtOi;8!fMP)8^PZb~Sn@QT*Id;IhHI~+0iCr8synZ06PT4Qj?}t- zU#W!&i+7-;W4vv6gXH=vs8g=}I5EDENck^Y!i8)}vOzaiJhzD$97Z-BDcNAs^$|Je zD4`!0q){6@; z{fpspL2m5TXmrzAkQ2Z?e_l7qe*C@D=S2;1j{{Oi2r1vD`Pv+h(1$SQh$J-?y>Wt|5#Um)Ed0%2QZsNq@>fl;#JrG*k?4n zD&*30ZeiFiLv2+9Bp+fqbnqeh-Ni#MyrwBjUwG@F;q*}a(*>e%L}bbx7-M3mM-DTG zAhkCM?8%PZ$9=CPp)KMuS!rqT-qAvK1SFdP{J}o;|M`s&iKxRdPv1dLf(TACmr1(L zLN$D%T|>m_URTN`z1K!Qd`q67H}<6@1ee7$vBZDK>k&8>xB^>olp_g$oCa@9BPh1E zGrRj#K7rwDC}wrABXr}3rl8tKjc8F+T>Lo)wNa?PEL6-?|;5;fHnK_BSm zY>rjhjJLoP$Pg%jaC*tS0Z4F-{TF5G9>ewo=_*8J;x6mB!@pTTBDg_3rhV>nn}5dz z8D6I+6g>9=)kLbjP65EJW;5n?e6S7D5q%Ez3fnypLw&2VGn=V{xle3i?>&V4Qm?%N zr#dKcqbjCAJEUc`;nZ61PBe`Y0PnUfZuH{k zx{SHX&#xj?ATVQ%E9o0)?(I?a;6vC=Hg=o+wXMjwjg1PMqs8Buh@DGH{xeeBqed=o zf@U{%4k$tr--XB@kbYlgeMc*-D3SBbcfOMP?^sQgfbG)a>mBv??)z^UuB3{(^%*8&pG%Z66 z(NS8PkwKS!z~CFraW&{`4!#m3BS_cTV}VkDRr$i5pZ{P1{;2J(SH#si0z5p8 zB&pWc)@-PGimhujUm=QuERi1~hVl(rQDYWvlv-{GIrl-co|D#3=GHHM<6!^Y7ebsJ z0IWr10qx(8&(Ll3+vHSR$_7+5*5@YJ_vhS|Xz ze}Lw~*BTd_v$SF(m0h+jeR$k!%S2 z@uSgC?cu`$(_b(=0T&^NGZ`%)EQ&ov`Y%7vw_#R}O_H3OzssREuZt{keLlNE=vub0MyvcX%vk z&{0atS8s=JlBG#L$)573vw)j>u1gR2jZ)wuYZq#M3S`gd{_~_*BV~b3i8Gx>pz=pV zN%?ZCKT1rPst?mo36e9UaB~E_8UJQ5e3Z)RV$@SDVOpj?|8gR0Vm_bc$tO`q468O@ zsQ=2_s2CzeuUsAs=5u~{l{Lz{0r4!{FSazO?59=2?Mqw3k1=r*_V%7Y(*y@h_oha? z(eMta;7h<9Tcp_mwVghGGb3nfBRMhym~TzLun=S!8xf4X>p6ab9wIo6H&Ol?xAEXu z!(qN|KdrDe_w8fB-KtTWpWj~UapR_gMn&~#edSr}p>jOnE=xcKA^*i})UNclnS>e#md}@_7sMo-6=$c zk<#L?uqh8$k%8@!N8l~CA0j>Ox06CCt)Q89t2#`by2q&2alQu*FOLD+RsuA78{xLR z@jCT{+T#zZ2zOO+2rC-`yGbM=`Nh;3f-w1SfWc*t9X-Ug@|PKhFGAms4Fsg#F8-C1 zFWv+0pEv-QG8FV#s11|u|=7zg+7q%_fC`j(G?We6^_Adm5`DHAQLS7)uJJ(-A z3ysTw)05qXw#4h%SC7k`P{qoiw@L;qUB7|?Ht9VTRz%W-fwM2y%l{M|d7FVaaPy_6PyE zHW=|UbHkO~GIx3oBjwh62ygonalYK@N;rw~uEA@Y)m|20O*h%me#@dP**>a9wG5tu zA`V_K`$4$pAtRjplSCcb7ACTrFgvUjLXx-zs=^+;So(F;cXJD4uFCiOhR^#TtNZ%i z=`nLEuqx#sr*cX)u-Fm?+`>%aF!=xc$0qYsklP0}%F}w}P)eP!?W_G8QF}7V%(kbjWjp{^abdjzCY@#ge$MlfJp#xoF^Pak`%4l( zK^({a$-*PIj1rRuYwId5QG)^Ac5YgE?3n<}=}iN$pUgb_RG*)tQFVLz+BOA!8Er%EnPO(c@5E-ecz^NQ1Zr={%$nF7)UR(} zH?MjJqWvGBWK*zr472{&8MLzH!bC0xXiK+qSwUZ+eI;{jpVC#gg%M!dKSD>~vO-DyNrB@7Fvxx?Q7z#hTiR_BOW5@p*6 z8HX^{_Sct7Gs_%0MQlx)K*Sk2F$&7q0?8Im6}Q}P&YDfcSj?4E`I5wIm*InUGypR- z#Mp-TI!9Hw3SA^JyBJV8%=Jc9uKND?2i(veSeenz<=n$<0k;BQ*=zO1bQ_7oOb3~) zc613C8ZG?|nBO@GiW-gLr~7I%ec~?XEc)OpWtCB=TUn5cPXZfa3;_fIsxAHIiUODb zCNM&wv0MiWtH{f_`8$!OkDRddcclYv8UZIEMGUAsP`f6Ub3!C716~FxHv0Mb-2uJ3 z0#Gr!d$e1g4LSLB0woBpd2Lt)n%b>w5oif0Tp1A)=b{wT=$)8Yt^n%BvBDx9Q$!wa z34{frx675m4Issied&vrp8f6W_!Ou#e|`ty_?aD=OZFm<2HxCli((A0W~_3YCqCF& zei_KjGrjW$>g!cZp>sEG2?((Wyln3W*!mikz&YFsMJZ;W1?x!i$45*!h`b2Bzpe9R#K178xW=8Q^f6aXzqO_8L~*+XQFE z78c_M$Tt#`=ppUPHX(xa0?F+Z*G(~Vyodv)FlA(n8azb-ft2BXe7rjzbTz(%pD2{a zyekwOgXnG?6s4#{pOQkk?A>E&vUqT|0g#nnw(MAbk|r2~xdoL9Sc^K5kS#dt66vrO z^Ss@% zOftbq)H`>s9V&h5hvDob$I*<>UqiqHs>${OAh_WlBmur=UP?$E&!oQ=Km zeqk5c0*~Nr+6-J2d>@jwk(tA&%#g%HigL?A%|hMvK2RB8f=TZ|Fkg(*Bkdl7kZ2v1 zQJ-8p?*m?V3aYmq{eZgoGXByl!Pp4Osc#pR0W=o(V03qN-28d8OV6j<3XRL~O&aR< zhtjgoVAVYltddD?%m<1TxX;g6zow6*3%gteX!$nc0mT;gmvGm7vl6gA-9&c)is|p5yj-(3=)k!V)${EK?0T#;EWj&Vjo%1 ziESg{8*n7;F5^8xPzKx*zbw{>7jzWV(jcsv025s0aaa*|&SK+=j=H^rAsDdOg^TBpiPc}m5R*j1 znGtY_NEc}t4RGl^|3EpJh`W9*>+|{HA(eC%QO$;g>Gx{mK-Q; z6T{CeTy?4gR=OXG_BkrYDAbYJ^XQX}YSUBv-qX_h!BYOwzKorh?{3>W&JAVmJss_@ za$U_E?5Yf_mn})EGkW{^Q$3QC`9|BMOOU&ykDje^Y&Ojq6Og#YPacMG>pQKWrBIU1 zjRCu7hnSrGbvamZ-rO78K(9`l*PWb|t6*qs_;sk_36HMAaVd}%iK}rG&-N1L_|C&+ zQczIv9-(pu)LSlxV$#igR2-^Mm-ri!JrL5f^YO)!DPVivl2H#14z_0tAdTHhn439F zU|eWrtHej?pQL_5DHBh9q&zRbYEZY0<4q$3@K?Cl0kjuIRkva@_^Qv_e(&NcfPc70 ziZ<4jfW(x{Hy|G061q-cboX1EZceHEZk8SKS4waRPe0=x0|L|b1)QSTywS0R%=rs0ebn9h6tHOL|#QZb)zJUPlo`dMx zzI{4QOA4O+ z?xl^2U79iJ!@=csttGW!E_~Eg#_ei&HgLixgzqOCw;dXnIWH0jouKEa&IZdazQgH9 zTNJCT1!Lo3DSEe^b^mFf*<)R;2~;gR2RwM zSugmwVm3g_$JRazp}tJ-ZN9z#`D>95XG7l8{W&S4AQQi(eNKAA6O>qlj*Y0Y2>$f9 zGjvV{5!O)!cEfcSiqZRSva>0E3N{Lk6bo6rz8C$x&&@*Oc#A!fBg{#e_%X+P*!;9t zOq$3IeqonwO&phLB?fz(+4Llggb3Jz<3hDQ46NLHM0UQk$Qrh`-+#zNSOE5|I&GiJ z%lbn~s)McS$-CphNXp)LM_RKvUdYE|#xg04noaxt+7z6yer7HFtmb=p?>3 zE82K!N`_ibn7nQ~BVj50{v4hjm%v0Xr_P1qlRKf*j@X3T|NQ?b%pLuuY2@~`UQ;`Ckir{q`qAW+?McG@qYHz-u&Vz0xppntf4ynS@S7?y<3bS>I_qy1-SD z?$XBOM#I}wvu?9CcNP^Uj#e8x3A}Ns8ItaGoR76Wd+#hppV!^egj>&}N3K=e_nU92 zKCD#UoVihT#nrpA20ft9VD}z=*M-`3vB*RI$0o7^1J}Enr&@$a2ufpw$Ro=r)ZdD! zw@Ft9;Dv}fc>8&HAB;!VmYiYr#D1N2V}BpTMz~GAcE~ePi4}i~w+=)57?aIFlxRoI zhw&7B@_~W<6<%4vSjul56==afFrsV1dT;Mb=VVs$e$e-ha20=;N}}0XmI)^EpW7GQmZ%2A{%rFKOnH-`{-4UAv&(9IY{ z(Jk|8%zqQTV|jmKp5$A?qGbz<~Ko6u4rZr$naBF=o3 zJY!zPaMS8Cwn%MEdGY_K_B5Dla`tVVIbY(^nHQ*%X`??}Wk+*sfT+szhQ%=V3|bZ| z8bie()mM2L)0kIOw-GOMbT#*EAoMX1s+`3B-I-1u!LD1vCqL{+ zJ9HKF-)=ObWWqzrYetA9EyiDXux;MIWM1L*TFT0w06;} z6rvo@=6$TilXXvilQ--*?Ne0U?Y7$zom+K0lyziY zho1c)kso%EfPqEDX0GZXD;WxCuSI@6=Urw#$D{!3Y-3vRUHosP_2AzPZ5}GHV#uqi zeRwC_|7O3tQ_yo?P)z@*-2prIvV2io5qZgLzUC9p&E1D#-x@0~?FFBlWDFXCANf}6?l9dJea6dpkK~!|xu!nxN`vM6 zV)gg-i}+H9&kc0Qj=yf<$kfq1cP#ua8NTh5+OH#2kx#88%q9EjYjAL4`C+d$=%Q3U z^|Gz|5c%j8kl^;l2Tvu73goP*EYh_2%rz98VyK9wZPUDqGh!1Xnc~mn-c&Tr&6qwa zi=D<7B4%k#eNA=dL^Z34%R!aSg`Li`Y<}-WXrI!nm%8EB*gM=9)}n{{46UWT!OZd; zv9D9n^90^r%+0|l`b+etbffgtV5c8Dk1STT6+Pspen2We_gX}f1Ze5Zr#8M2?KX=_ z*V%9K;ZgB9no((;e&BIuIfBaJvoR?y`{UrvC9#RV`;QGvWYPH#YPdDtc89Mwe)!Q> zufVL+{yxvl+Fn~(7}a-%c*j7G)@l5%MX0r!!WXTMSTm99)chhYjYD~X_o@2~5BRG& zP11frMZlxmlqdf0G8n??x~YtRn*ls+7;?AwEcDW=?4^kAmuGAKL?zPl($asR|CK;1 z<{8LW(WzlI5-BEoUHlVU38-&%MkGh%X(c6bU0Ha83uN23Z}+C>NoE97o_U;LsMcN*tZvOB#@hCR3#P;w$T zPkMF~=dRT}+uv^IV#yvgTpmdXM0=Zqk3wqsNb8G6k%427QbR-I$?l#?3t#b7sd&woZ}Gm`5>kyl&0AOk{C9|k8t0dc_*T0E-I52 zVfG&Oy(g*~q1r{nXf6$butQ#7BC@^CS2--Btx1Vv9 zw7%lmc7MCAXUTsVr|r7@vf!!s9S!ouY)>6GbQ||ONi(nAU6XjRnDx>{0zldV!4DlE z`{J3X?JtAjgQUZUX=sL$^D#;36Cg4XO%}1_IuV-DYpzI}yIvWZ)680!(=jg4pikI? zkE*-4&7*f-acX&F)V=ivi5vs`V8iSW&1i&R{bAbQq(;AcOKR^X+l5F(``2v5CN?+ewmC zIy0yEu9!2f+_z_LBdMbxt1-Y~*{xwp{?07lXSWL9p2Xlj`KFvEb}N>{ikzEDoGm(* z7uphp1Hc0Nl%&l!oI}FZ+ODlSZAT}MzSH?OmOF93dT?~qlZJcV3-sY=rIW{KH{^CG z|0O)5ct7bZSfKBS1=?g+?zsqoU*<2ln^3@33{cEef;jFq(rmvhzWfN5YF?((N@&Df}&s9HU^UOb)~C!mscN?QQG$wE0GS6^`STl;%QOP%oH8UNL->Yo5@s`1ypbLrk zxq8w$QuFv&%(JwG(|SM=nU7->b%?GRlWTH>XFA zx+vq`WBK9P_C9~@!$5`X^RVFZ-mDD?B4y1F+QMwkX?`W(D5M@`X0uqn_ghWoW|wTL z&-WT-hv}Wlm4-x)=8-6F8{0fYmSvzl4)}hUE%X1AQ9>UYp%AUOop)zu`*ECkRrl29 z9dTP#-qhD>)U3?2B9Sbf3fj@vwIyV+H#_HZM!HzC)r!UT-v8`(w)!$AM9G!+PJK-2 zwN$&tTW0%&M%rh@)yy3ljOaJJ|B-;fBDe?=FsvB9q;71k*E$U(3)h4tJe1VRNtYKw zGnne4;vZXow7f~(c5E;?e8Phsm}=^30)}C?yH+>8KQ~xj^tJh873~e* zB(4z0f6H=c=rO0}aJ4AgiKa*sw%?1} zefX%~JxXc6tdGwwFsiB3a0((J|6oZ+QnJ@DK5A9Xla|1l`&!B1kOSx2du`EICVtbY zg-)ZEL{y)oN&F+;l5CU4u*t3dSGZko*L1t)KvVs_=lR5<>T|r+_{_tGBifSl>(UaK zBu=#kv;{(qV)VmeC<9R2G4iq*eXLs8W#<@3aZ&W!x54mEhlC8iOcuIU?EClJ2OT+Y zYf5uO^``0c;MNY`XoL_A8wt@wO{^1vzITq$qUjC9+?Q@RIi>*T;yQ#MMqW2^es060 z-#Eju91I0&^ZE)tKach2|>tPThIajCUVc*J2ZAPHh zaZ|~y`+VxWfqEX>v~;6v-N5*J!%2FL^Z8}ZN9JM|SxW}-QM{fneEN8~P7Sy{FFd|> zw&a)VkD`d)AHsU2v%NG+Y6B6U1-P@94Xxq~C*Yn%CT8RqL}QN{++|lUiUq;Pqq6BW zd=q@sWKIQZ#0XoTXEEreSP}O0o6DIkQg)Y%#+EmQygIZEFJ!NB7I}}O9E!Fw{m9trAI zA3az$tYGloO776d07l+K&)o^}kHyI@pM&MZ(Xi5}NgVa+N80y4SWP5~1n63A&gg(cH@fDIT&9KkagC2y3N`>v)BfqHzs~F+2({oR6hd995o;WJDKf*N{|{Woqhyq_=<NL zU(wrrkdli$(Yi>>##L)pI}rUXSw;Pf%9z%%fjGsU+5NYMhAOzlXJx|P)=qc1S3fJ; zu@(M@Q>ZR=Uy8kB??Z}%h1;UicryC-k|IfX?#yq&9j!c*mR z8n5#%S~T4=_!#f@FDKX6rYOvup_0BEyNYjF+1gto(nOk?tw_GPSd25hd@a-VNT-y{ zK>J~4pcYjZiLI`>y(f;@>pRb*DK{&MxNYxcJ76zwk3}g|mYeeMgj62(l8Ybtofgl(8hL?M(U~Q?CJy6bA*#_OR}qTyYht$m}=~O-1ef z@y5sJUZqBIP*)QzPtv!(l+J0A5AXK4rqdDeYFRP2eF(kJ!?T=MrnfDHpvtrF>Fr-B z61@-U+%&!~_GB1xygC2+?FAK?1cxNXhM*ndr;my-qB$~i{H_ME3yQ1r5VP%O78Z3- zPyHp{QINax!}8*reab}&*{zqySZj0j+bWhl>NW046P*D9|j+XroGeRp{BGsB_#?rUQ25srt4RPPyGXe$1Zv`K-&eLLQ*lp<~n`!7HL zDpxi_|B~LT!EN^{jksqh02H7Bb{1OEQaek7NJAVti8HHIElk&7-js~2tWc~pKC0zQ zX-RG0&Z8o|A{t#s{hyqARorMuNe|=X8J%d1OipwO<*YZ9U-YDqbWAjkin_;TZc;Ml ziQ_3Y-LdaR{rP*i7%jRLffsTq&#ZbTRp}6Qzpsl`i%U)7v74FB%-QPIAH68czCxWMFWD}so&q8a$g2@VRzF|F1pK-m zv=B^N!>I#9LOy~PKN)@dAbk@uxQmE}CLDUpnU8)H1@ARydI3up8Evu$LtEEYy-tM{ z??PqxWOJV2LT;7{j2g%N2_-y$M&2*Rw}+{^+@qtIWDtz4EyNvyGyb$`hC`1__KjAD|LUBCyPfvgEpL?wExF;|45XHs>Aj{xAQ?#8JWOCry_v~8AP@yzI%!MGD*)o~AFbi<-@FeUf4wray zFYyJq;3DueK*N+D07EQ(e-#KN&9C&!mTqR_D*>?lW50=4G2br80%W*Q&0d9M%^JKh z99Kh{90>uQTpHV{S`YjliJFH_-@_hq=Q)cP9t!xF4g0wD-!4nNkv)$(R30=c`}o5X z8mH0Yxv}V%65$-5VeOuKO?%T37uM?;E{bOw*AQAqUkkNZ(*th6#8U(WGhf3bU6kX0 zo;nzq3;QDi*}`-J8>wO*2K&*T3O(bf@PWe)`JhZNQ&{=7FA3 zEaguYPdV9KnA6a$Ny zIrxeUEQ(IVZfwjUQ6vfy2v9&ny=VEOVrw+(+ML+6e5|$I_GwCAR-1Ee(Kt`Mh$Dp( z7|VoMC7^AHA1TADEDcBXULfCoK#aXx^2pb?$jA&_g&uEV07hQ>zy(@Z&=Cy4Ri>w> z4{mH8p13k!19l%E-FfcIqc#0U{#ze@H`#PvVq3gj2_`gm`OD7c<8fNhE`Qvw8dM7O76eu#Z*8^7Cu7kE z{ByTg#&rgRj{pP@aPs2(${+7Mgqrg36$38N>bk>dUJ zKHXih+KSYiZqUXEN@VIRzh4XUC4w9KV2~H-pwgo#ZPxav8{10Z70fpvPjWuuQ@}80 z%)m!k7TJUyZG3xPMcjb<4+CX;wQ^!7}a7t)flGm z5Anit>Ek~PHlhPV>Bm-P>)>1w)VgO%`>gch&^t~5+~?P7039R7AARe>7`isX&G~k zRQzL;;M(^32duJwgjsJ{3$JJpY04B_8MOGYm^)`6t*_+#^Igb2f;f-%15Z!$FyILuBv_ni6yX9&L za3Xa?^CkAdGmju=c>1j4v$#Ebohgb@94r=&c?G2NLUG6hIKYPL#0`T|iI`8nI$#K% zIbPf!Uji_j4u3s}v=lsY;DgUvqxT+P2HYoB!xX}2QfqxSqJ@u=#Qp-F7_$l5=eciQ za5(ef!~F>MRa9M~&q^``>Rj(#My@ZN)JnWZRtVbC@c3M&)wsuQDk+<~ME%i*Qkdu$3FvT zb3j9kesmSwS382ii)&84YeO%pvqXhAfH!9=aO4RnV?`cpI1D1xaPFWlLfnl`jEj(! zrL5$dYQ-Tyh=yX40vPi9-?C)yid`PBiC7E$6hi*e_9Xxz#wl^Xi2{s+g^WWx5?%@g z{NMWk!J{A2lCK2L3!v-_qeU)Zs%7303tch%-T3O)B;DPA^^*!pVBzaI=(9)W!EfVnk0>DG94OdXJbM`uLtE8d(b-j zuK2o9p3?S(vUgErCi=LdIFX`eYaWV4GD<2Bj zCW?8y#~)7wQr^Ingy{%ChT8#njVYAX)LEV%y&_Pg5XYv`lvX7?((>^nTB3?X(CefY zWT>TK!|&7Li^hm_b71A#iZ2d&!NHaw$_UWRPQBiiwwy-2=K%~hzgCq%pf+RsQNEG<0-RGQp_V&F$zkhcR#@LQm zy6t|}TD7WX%{fb-Fd|F$7r*%`OkPa_VVD|_v2ahB*FyE?fQw}KeG#gMR$qygD+Ixu z0YFt!2U%neB-e4k6)j~@y6uq~sCS!uaT7MIBCscObuwso`RL%OMz-G|nNGItOXY-A z{W+vYT|kk>8F96_j3<~2@$1djFn17R6wu}+mcACJ9g5%WO?GC$J_4$jJhqj#JduXg z736u$F~DU@kpJ4!$Dx^J%l_~o0~G-b#a4*0y&8xxC^FFnN)-!6^M!`-x&-!oWFAoE zpdYS|88+F$IkA##1Fm8ZQ0;JA$l4YE#kg1Vf$dE8LNXeNZSvR+7={NC{{wm+cyKD+ z*Df?mJmfa0S^P>MCbc06GgCl`sAxZ2!no3khdu@`ryig^8E#ELRKiF5iv4*Hc$@$8 zgY<(p3Ykg|xfV^j92z5Tw6H0|S=T~?>Px=FZnMyz76s{ld!`NJ=?idgcX~7{_J3ac z-|s^T4Gq0uZRgU^u(*EEP8jupC|wz(kJLpqPzKub6u9~yu8e$sE0%4?<-9ymTYlML zL}&UJ`T0}nauVv%W%RwB>P{WCUQOz9SgY_Jkpl$?#Jonhz6KH~xr9Db821LulZv}x0s_|6m@Zr;FxkI%{ zub=$2K0#P;FeeL@vO!^BlO*29H)oW(FGqCTl>B>I&?tx2Yi}*|N^)F;+Eth^vY) zB8>+$Qi-79!&~BBOAB$%QQ<&nj6ulCY0H5jzZ{>B+Hn3>mIO0Hbe$2mRUezhT%Ev( zQ)DCgB)c~;Oa24~`5jjH3WQ$bggDK&&i@ zz)S+yOla6(`TJDFD!^H_>9x*&b{kYV16fM=3O77|`|FuX5NfQBR~vVg#J)nDGT%Fb z5Sxg?OsqS?OpTRdT!`Sx)bWgiDcC$v&z^foJ@U6VfIJ0+CkniHhE7(H?CF7Km=jdN z3E--VTW0rSTo{kjfE#qfVfM2y!Y~$Nj8+L1ri>lj>ir#vo%7RT;D7Pq{ONijtIaAH zYq)-IBlu;x4Vl4_LH0xAv)UX_8{vX_q}9NbHvYHHC+0CG)-DEW<4*dY`YyrLYp~hS zK1;&1f?2HnH+h+Hu!0&u@x|+EiRaY30NmfFiG#x_ zaSHQ@FT&H=%e%4mw?%|Jwj73*wBNkf>*K&~CeI&avK6NoIr)230+jYKbVSmBzc&(NL#MJnaU{B>)18%r|G4`O z=nFmQNEBz_23Z`Y398jsbmPV=cXCMk&qD}b9*yxHHPf5WB9(V91L9<;a-#T-EqpaV z$~E&wZ-7-;6AmI&c?fpYa{!^B@#+}oLlty$p1%vf^E@24|M*eHg9D*^sV{(j4pbxt zf%oS)75%bvDj*nj11*02O9+Hwb%+fsFWgthh9>)gS*ixUW()`LQTL#Yh(Hl8q((nsn9w0xKpLT;7( z$1}|YZdL$w5+F&?0GBcBZ!+Wmx}^U-qALRu!lNss`cVN)RRrO)QgJ*12TIpCVh8Y; z;C30;h&pnJ`G+S<1|rVfHlXizghxcg0&3F< z!^?BP@IYO-4pe_?qh)t|rFH?S0kbt;_^h4JnhQbhi0ubOK9S9!!;^n{PfQA={DF}c zztFO=0Z8G>fCRCF{OZY510_=cn%!ROegq`Bc)jv&1-Z(h|K}!a_3I5@H}y+ztp-&eOBmc)tL0~G}(HC$84K>hEW?lp*y4xMJ`2n&pSbYm(Y;LF6}dhavu{65%@18_++1_a1zuxVQSbrk5Z_A#|!r61^)x->EodrO?YF2@4hXEx7Vgh~UleLAaTq8dXY^Xj zyOnS6jUghw_WJLNk!<&Z9f>S}%XR={FSmhfZVrq{Rv%|&%J_$`gAL4Vy~hDp-Q1*X zbs*s00ZXJf*h2ZOF8@{VviuRd!5Qt(w{F7vdmAgu!MBLL(h(um0$6c3;0+i%%;m<< zcWL(E8Q?KM*73@5pF!{+))Es5ILR|iT7$#1KwmQl43chXd<&`1w`tbF5`+V0_!zoB zxk^%eFXetz{C|G@>-828AWD#T~Tdd#u3zVn1V0Z z2!E9RuiXH+1IE)FE4b@51Mcfus%E*c=HZVHN;oO~gIjE{b?~4i~^R>Moi7;>Azo=jmZvvr2OQdf3x(4f+wQR9s7ZNuB~2NhN+Cu-_Mo|O8f-n z=~t_zceQc>Rmw+e%$VR28?M9%+JUmno-jp(XY8X*)ZZ2x7;olH8MVvU`6s(DKe`K3 ztN`&}*lXt=29d~Sypn~V3VA&GaJkSGtPCRHVgUlGCfFvBk6qmR+ctqEpcIl}o|V@Z z(bDHgUI^T*>x5l0BoCPe7u3j!a0)|Kn!weU-KqZ9^J{5h*MGf(G*#HDCAf5_l<-F9 zipeL$Z?9eBTIbb3`_oMCwb-~?O)uXfH{<@U?>EJqWK2GB)Uf<*75JPGsXie6QO?f7AQEkK1#25Lf|%JhQzH%nG+?%S^13liUrFB zMyZEt?zu2~eybUuSq)&Y_`3Ttas>Oz&|1*0K~L_#@IQ%#Xfa-`y1xd6HhS{KA7dMN zHDry;jk!;Hs>W-A9C6>gwT+Uj_m5t7Za#~8OZbIMJ&Bzov`6b-*%B`3(Q})&Z0`FZ zAHTe8(}{#+;K`UYATI+TIP(No77j3E^{eESzIyr%0w;k`&dXVG~ps* zrx$Z5-K)n;(1yUPaHr;$L5PwTmw^MwQZs$_zIp7<8s+OKa85sfGG@fZCQQAFBL=Ck zw3{*Ry1RA5+ihw6x}e?qbmxJ*+f@`aY#Yu zKC3blOSv-r3!4FjWv*IkAA**spxO82U=cf@>oSVSu-McPtQ`blM0XsdnTImBgG(K! z8g51yftgu6B*4;@SXcy!KTPWTY%55sTayi~=h`IG@=T+iO6Ir?ecWnoU^a-e|6T2K zvZYsTa?|d&i8W^#gN&QbN%{Pht=TN*`7a^*dTg(Q$MRg8y9GuaP8`d9%2(+}-Rz@R z8d7XyKYA<4&%VsVzcK5`X-`p_MbA|TT zUMkLgnC2ZU$Lu=PD$?yJI31MFNRQjG%_(waLj6-<#ihD8yCN`RoAsO`1VTI z^H*kbUsuiaE?Cm?B)v_)EHG+9Xu~rd$G9J5BBv@6T;nB8%4k4zhuU&6AGN5??woW+^A@uTfSdZ3f)o zHpT{5D!-vU^*xPFb_TV(2e%>n(R+Q3mdCgxeXHc6PLpUy_|a!qdB}YlpmMUY1$yD$ zV3_xx(*vg%hgEYLPVd&QVapy^{)9cGMd~$8;DnMT!qxGrpzY4b^c(fKuhn7|>vh0E z;5y{$c@rtojM5hxPYw&cNJT)MX@D)rZSdg!Sj8KSB29KIB=@D7;1!t${bug1Tcn0F z7ayrQFi1G^R>X?GKq}nU5g@&gM$dD|JIZE?8&M&;E*dN@-tY;4M6-s+h@P@?S`efOuGG1vmlxVhm;t z?qy?6t9G=04|g-)XZ3M7{Q{8pQHEQLVN8@ys3V#o387bI7#`2c05FCR>j?(Nx5IU2 zbT&6Wrenn8looTsuhbB!b~c z-R2uowKcktKkm0htYf^&t{}43S0A_Qjb;@!kBRZznqzF``V$6o-mPJ|*aSP3Edx?J z2^Ee+C7nmhJW=nwVme2^u_c`%YrR$H8t|3NWH|Kd;?*LkCrDO?=h^7xNF^%`=N1fO zi#{ly44HBqS#K`t)NQ=)+{$FjtBNp};)%N$;&>_({ls92J?=u`w##f^)zq`g|)&`<+!72Bl4$L2!Pn^nD zAu=RQfh`3c1!zC%aepK{nxn)*2{Fp~nABebFzl*;dCg0UQs-IiOqtp(qreVb$a{vK zF_Td3$Zrwyi_T*S( z-qEaK_vh@#MCR@~UE)2CsYl0RlWqqowwa{il4H}9 zV0?Ssoqy@u`)d6k{EFWjx=wU9Zf7K zhI}e__=BD6EgM4V^myIhP;q1*m(#p|+C73gzIsYb;%?7t*GATr^LS@_eI6qFS-S_J z;0aW|HAoYi`;GA#bw0oAzV|S`w^d^2G8Ws$`7+bxr)(Miq+K>V8iA|BsNI}|FuKUg zX?$@qiEf33lax24N8ByTjy30c1v)&w=1iEikGH!oKL2_Ac}mMnMVQ_WAw!^njBclR zN1(p8l4-*xSWK>9i58lx@PxULjPvY1s_S19=%mhd>=aR&E9RQg^FL(cPV6%_ z`nqe%_eTG<(UHJZxq0UPmrbE(=6Y8e4qhD!wBDypZ^79+KgLI5>u=(VZ^b{HR-4}L zx*yaUcSY0BPNeu*?}$X}o457LyLxQQMtNq}9XS_EGWDlgd+hHXrEF$8lF04998{7q zDy-xpX|tcx9ypt&zIZ)SkVoA09qVvhm7MQkCwTVUJEadCvW}L0ZB$peR4Qd0NVdiA z`)kNpugK)mdt*+cqF+4(W0)SO*9|=er*C?cB7M{phz-Cyqmu~4A&1)tdTgk(^@`v~ zfJB_i&WfBqp?TYGUV5_9*OB@S>nk*4K1Y##^t?QBE#dFEScG@?a zWpRdYCL8LMt>r<(@(D>vR6ql`cj&r;;RW+~KTt`+a>G~-z3lG`&6wp5fe$Ek+U41W zvb;LvJ15+|S7rTcg!dfl=x#Z=V_}-m95vm}&PxfUFmduY&(qyur?S;ON8v|9CR|x~ zOls_zjI5^vDnd+TJkfb971ld(TYg=akI3YR-UAm^})RZ zRo;Wzs(1Vy-F!oqu?^dY-KBS0)2r~FctwAS?;yG&phg#E^P|B8K${F#aw|)j8tXf> z1hw~v3UU=$il|<^3{FC#x$h|nh|hV6I6`L}gJm44LS3tGp;dyL3wA=1#agXO8evY{ zQKMx9xIBZ?cp-+4(jD)t=M#-BHt|TRIbkiIKZYl5aTST0j@U`!uTG7h=jDnkM0b|l zU>iPh^}!>}Jw4sNI$3j3_%~MUxH!X9UXOs@yF9h-%T+OTR~$YNtelO`V#syy_e;1w zSMbU-kL(`O)#Di5xk+4ADmi#f!Oq}mITtB9mdWw{xW}G<`q7RWQLL@)xTz}_cXN-O zv&N!}qJ+{9hIXO%S)Es|3bcIHR3RMNmcZH25Z9-kHVxMiGY?|TZY8%{)H0J(rcvx> zIb0d5)P!j#YjYuy$jcPgkMl^;pUglIu|bADXik_AZ-`dM=ma> zu=Ii2(xBrlc>Kigk)va$i-RALV%vsZW*@dZJ;~e_^a}nU_@^8~U_b*b-ZXGcLil4F zDJ-oin~~X!%=-&y9 z&FRIynwiq%$EOnPHwLA*Zu&ItdB53KXY>}qp*t57piN@nf5gVk@97>PbK|$VxAaD; z#?TPyqbxj5gl+B>Zbr-Mj0V+v}KWxgQ<}zPhAeQ|oGK6U>=zcP2Q~Te)b){@&iRDM!h$itBG! zTyMXcT}K))Td_r(yW)d1PA}c1i-ddLIL$+{JX9KlFM8|K#6dSPD5a z2R)xb?N!(SSds6VSilwMdYULt^-z#Il;nDi4MAMI_)3q3uY2>JdVc|RTg82FcOcL37B3AKtvX2Qf%42l(~4MAapb{hc!!b*+BBrq-NvKiHHSyf0Q+v~&DB)$Wvl!y$*aU&b*I7?D z`yJ2hXrGZbDuGZ9?~I0J3a+rkO2)y#?!ikeC)0N@HlJF2eI>gG;Ax%MLDmZ^lXyoY zfOqTubZ&>e39Bl1nixtYD?8UvXW&KJ(JMjsWr<+h7iaM;0B) z+uXU*vAB`!wHm(N&@)-e`Sa^*yxhK7<>wFGd@^evs}XB5y>_y6{#g8F7eDs5=rkuRe1U zWvH{dYVE4JKQTlfl|&k@-xbdp9k0am29L9_nv%?OFMqIgmdAkmJ}XZ`ag1*oef5LR zv7q~jH5E3)WO);Mca^w8Gh)(H&^_Xt;3-tNQ3@4l{%NY!xw`8dAFsNznE0?deNA)> zzGGsXSS-kpPZ$D9g{qMc| zbr|*AyERlY%)$B+4wHTZO(fsTPV7IeenkvJS|^2I7dkC0IS5=m9YqAKGUxs5<%VB} z-JjZ{=DvNM(1cL-*ma$6PinLF@?l)gd-058k>FU?^lRhI&G?B3a(gBXqyAh%Jl@WJ zU%y9m^ymtf5<8_bH;$vL+1u}L6Uq7rwFW_j%pV}>YRW%WsWzE<`dt5f!JW7K@pT`T z>tx?fi(a-4oxMXepF&FLviOnF(8Upwe;120ESIz5%zOglLZ!?7|;bc(RX3A$uURG2q$HBNi zkgVW+pCfBvjHGs`bIpUJ^umtXQ6?g6#1eFf(H$2^&*PIY-{gnji|h26>&3%hm;23A zUS}A}p}O9UW~Eb*DI?TCmg}T9S72OQLz)h5m#lF%tn@qnxnM91VZ2msOlLA2fgJ!7 z0`@6+6SDMDs+cOhJ3H^P54})B-}JR{A9Y3r^{n?0^K*eP(&4n^=KLe!h9T5^2(EU< zYx2~wMy2d!q8H{ml5~pKbriXZ9h#>rQ{TRM$D-g_tXr9mjxyRaFD83n>m&Or{#H(5 z<5GY)!>|rgFMst&ydlYyWL)u!Ic*DdB(>m!Q$nUIzztci7u#F|h7CAUQ*k-vLa$p5 zrayg0Xkgek%>EUI?}9Ls66zkh7_2Cb&>MYnrxY{3Q>d$sIFLY-CQx*cai+)hVW z4ANZ2vmZxC%6M`wF~({WFzMhYNqOH&AzmzG-D=Tz%S8jN)lT9|h%DQZB5()ZP1neT ztCBjOPg}R*6ax&yn241yMZ8)t5Nsn&nC`M6wpG%5z%l(bbp8Htm0`TQEd-5+TV-2x z=l?j42e4pReH^6@&*22232>Ek0!<8Dl3PQVf&rX)-Ig<&W-A@RxnJVlx3*Uw@XvWk->ugqDVZJ)Mc~ zT=B~a(3-q49csDc>&2iI#nF?CPQ4p#-dITh{)045^j9vHj*kT zPA*i$#9h{&&*A0I8f#1vS;Wj*)8ClXK|3Yof!hn5*Hj6+y_6~A6PpLE z&s20``z{;YRr(6K1FrGF)yp{8C15sd_n<4QZSuxPLHVX~(1=&$_ zt1@fcRt~$k+1+?fr)cVId@NvIKNJq7aS zt?$mg&#%XLv=&{UI2C+s1{uS?GRvnxGTwsRYt}?JjZ5suu4PD>6 z^}M@Y>7i0ULMBrQ%wFDq_;Amk!pMbY)8h;&R+p%bQWroyDT%KS6Y*^s1-H+BRz zV@N}Z+D^E+(DEc~*|=s_e)pMg))|XjwTlnv)<}S9@N4PITstHW97v4PC{R7g5!Nf1 zAM3tW_Iq;2TJKwGei^%l9XQ_xqviOCEL=FuD!s_-hc{%GEE^lgsMX(?X*;e`G=%p5 zJS}Xv_CVwYffiD$q8Q~H@%+T~ABb@!uT3YyB`;6jgOR@`&e`ACI@0R35n;P3`TRox{4MW9c~sML7<~>a~fu z1i`$D)0@27JfnvnjqOGI!{!JL$7%VtJmX z?B(7WeQxo>8|en@kcEnFhqZJ49Kn}aI^yg$_7&Jx68RWeX#nqq;=e|GZ$1_-U0VVRJ)1&sLhXm7?!!KHpTyq=$T_IxSP% zrcYK<=O^S^mTrBWT`1<=`c60Qu_lucRYY+xKo)vVFlU8Rxx^AFS*|&^OWd&;SxsSQ zFWGSC0`m3K4a>plkYNA(Ghva*9nsQjZ@{^|flxo_2D!b`M1U>N79D`{>cAw(k&!Dt z>v95qb1qawa4@)FFo{u`H#@hL^n}?%{~Z^WYt2Q&4=3`VHAQ|}+sO}!y&4O~-d{s` zM1}&>l6`z8ILk8XIC~54j#`ES!KJ%_tNYvvp8`wE08roR@AJ>G$&Ayx?5+FWSkLDu zY+w(W8TH#!c{g*6Z}DwL65D7bv~*NBXMJg9ACYI%?=wn!T#NZ3eZQ`VEMD^J>XQu# zOX7%{&5aZAdh7&-{1BlVut+&zMK+DDD$%roacx3OSk5r;`-(4ytwVFA(CBS!5P={E zN*B{&@mVeCS$$_3^$Ed%BdCk4!cfnR1~6Rf2zfZd1JcGSdcRK(o!5+G)pH%QpK!HS zuGgB727j;%%KJPLHcKdf=#5Z*Enx$Bg~!zo;pVbG4{GenBOZ=BZEFR3c4zIZhqB=_ z)UHj4J@u=9WWsSX3dOY!w7KV5O3!?>AM8HXDF>$D`=%%rtY989N9Y!8t&~^!=zh%^ zUy~ZdupneS%X}3&%m8(HVBmPP#MQ6fO-dK`fy9gJgSV734xs?B4sW zdtN)9u~!F`<0NJDhIa4X8y}2U)^M+=B#5=Q^8jT2W_6gOYa+Gh-8!}B*%lcb$UBh4 zaCIuo3AEJ!Uo5b|9>#y}ia{&5uq6~It7{J#^mSUOq0bouG73pgsU^nJ@H3y^z>HNb z$0m^i&>`*i$FP&sZkf|=iC#|@Qt;%hUA}@&24zqm1Ii+Ucmk}ZSl(gwpu=~Weo81wBhnU%|Q1X zL*JJ*x*Ejt3xk!e8yFn)0(EnXLzMVCxnlARUZlP2hH?n?Js>GzU)V8+z8Ixz)1~Q6 zLTw{d+H&_R3zq2%n01W7OjF~87VJOq+AwHZ8dMA$nOAl*|C1PkIf##O$uRT~Wz~=W zz?X%i86%H^o*u>IcE{x3oR*j~BJG#;&YWSqtgftJ0Kc0Jznk!*(^LK*I3$?vu@l?_ zE#4<1-v9jfY7gP>$rvR`{+$c(Kf84XB!haWerE4Kkm3LQ=l}Ub1coS|&Ay0*N!0)T zfou;1B;IR(|BTrX59ef9;HwBBRwnDsFf)`>8x}LY=fH?J2`IDpvpS^T7C>b@ z4+~iljTwg>2UGpRnMCka;i5~yt#X54WD^Wv%Y>z+SIl@qcSZD!q>1v})$Vy3T9<8PVO|LDA9Vo9pMl)TYX+eI4 zZ4&Zp=iN3uE|_kY+mucO67JSZG{tBbjDcZ#cn`wl!x$i7U#1DWoyLJAg;%JwUB3E- z1&Cst;6q&z4OC*60iX?r)3M_Ny%l*A^DDzXZWSEJckoBNyY@WXc%jxprdcm3&J{R_ z#K9AdoEs(Xf2XTM*zalm7M}bBY?8KeO#RA+5%lz4*3%(8*Kw;tQGtiXVDv5rX1{fn z(`Q=0dP$MF-EIN<4sneC@v-db7$Bf@%W52DLJwUg!1RNX2jhkkzX1{<(B zNI8(I57=tz?gz!h3*3|fs%5alNU4G|%t_kT042;Ez^=MewZi`G54RVti@t^=RoCxe zp1SKff0}Fa`j`HgfGw=*ID3ngHb375*@2<<|}1&KZti=Hk6MIR;z`TMXEM z`cp?z(NG8_3O4Mw_@SvSPzv*WEeGT%|h9%eo)Aa+3bJg(|=owrWKk~z9M`sN&wA-TA!SC(<)kOzT zt$sPU2^~Q@OzYj$DOHdm_H-6LX*_Vr^0)Vap;SP!FAF7zDG2pAfVCUBQAiv{Er44T zNapvxro9BFKOlu8f9e5iCKu*!#evL4vi+w0z>1@d{u@^?+J!g?VNy zbSjYc`SNLOZuPqxc9fXQ|(n050|N|pM9GDzq~qhI}WX2egSwY(o;VwP*UeQe0D2`vy6p79As5Ym7XFK{FWe&@O*g>}oBp6Q_c z@AN;}W(L>h=ws`rc(22B%qRsK=|>LfZ$ERP(Lc6v==Y^OP(S>E4)iTEX!>mPDyCHp!y{nHqsJXB zeKu8wj73tas~KRDAO}p1(8(^4Ztam&7WBZ*A7?zkBft%K%)>oX9iHSI{ zGh;vIlOgSdmkSu3RDF2uPjZ>Ujji;HH-z6x%58PBcfab7uPKPe|4e8>e3?wm9^1%C zM*BE`CNk+B4cg|K_8=@JU$Z5>AHJzii|zjPW}G43k`8gdpMMT-dE?sQ9YT%GUyQ@n z;o1P~>!z7ftIMo9QBeQ@`}Mfu!3))iXEfxeqfcna zYp5m&3)-5|^9Uz2&_O;7qYKs!$DJBS*Q+ z8fh|Cc($YYNfeKP{1>HUdW(-1eg}8B-tx;h{EQ(0&7V3I`RZ`EtTR$)q5tTeyv}1M2xq;Yv=^V1jhUT z)5s6CmZ=t^HC!!9di1C!c7%l)GN{-444UJe8um6v@Z%${7e3i&_usYJHDn3Xh7(ne zX717AM}nFibRx`-gTqKwjmz^!xaQk#PNqqnhHonL!UQt?)(CW|wW);U@vxj41dpU0+ZQ|L|c6uer)7^VFuRBA(hvH2Ua zNI+fJa5?APFEJB>9${tfgVnJ`BaJ-8W3C9XVCR|KGp}3edSe7+Y9R8C=DrLbSi46} zQg&$dk(Ya>nYOU1Qx{9=0o~lpCE^xr^Q~h>a>bgBjr=|z+9untIsN;+mI?$1VH=-H zJXtx24m2x(bHPe(f;D8_?tvh^tkhGa-tyUh?p)xF5TuZ@1YP<_} z8MGI249n@P$Wu~Q1|*ghqOYm;7Ts=hPjRE$qP<_QpFYa;^DeZi83(V?od)-eK27Kz znctoeqUWxh$nOPGffWs%tzA=^m8nwkcw5SZqRCL?Yz(zlT5U46g7Oo_nqgM)=qjKm zONG>CJp-X+;oF-8tTp)Z6Q3rvPbRs{f#wmkQ%{-s4*9+!YpSMX3PerV+PA}hEGYIs zmRm&iicHuzGtQM=#%%Fe=>^Mvx`a#1$;7Q=CL#_*ZmVw>eUwoa#j4Wuz(Lk@?6Y*Q zG3P5KAkq-eJ6{ckTu0j9^@Xz{)a%8ule_{Fgz!2-4R^RYcswMy?Z3(z+#pT^M897I*jgbep$ zSvMfOT1gM2Dq?J1LptbN9&M0hKHkxY%qg_rkN&{Rt+1h>i|5g39j<)nfkPrcxAqw` zBE%CYw~2Y-DM;EqCd4T4-QIAJ5!&I*AZ8J2t=Sg zS#T9zDvYEqdE%`LgzF0xBC)ln_Z7>~_1dr_P~%NMucnEnk9axES$y$)R!Slh7Skln z>Nc;gQ#Q%v@>WE4<`aIp(}Jw2;6+sH=b_v9W4P4mdo`i0NT5jf*feK-Ho&l%G0&gElI@$u|<{uK6$m*VKLa z0@Fq>ZW&6Vy}RIwwIbOA5mJ%!xL93>DWX#`R|3go-cQ%xrZX`uY`^!G@i+CPtVx## z?h$*I_M zO3Kk97|a=kA`MVm;Qd@DN2Sb*Vw%1~#P|EpmGt_h&9792DHdc35g~#obdU60b_kE$ z(*0UPbI}loKTjL^gtqyR_&qt<&hI8oyqO7jN$7IwcKGVG^=l#*35oa-J=pBp(31ec&7*^a@+_scC7i(6u;63ATZe{pa(OCu~48feCp+$Da>b}_16 zJl7_EW#d{ac4TE)f#C7?Dk!i%V|*cYdPP+=ut~2sCBhe-2if`IuXZG%;0tm-y1Qst(3TVIJnR zINQ@%1sZK?02}h;^9;$?6Zsq=<0@GKjXK$KV*kZUfQBE=_fITQx@$uhPNpL*-hxQg zNTG{TwIU2jRWy>8C07y^MEL-nzcjZrxf3#usdbYB$`ZAufYHCO%^4PEqgfYcO&9{X<7yJLDXTR?6)PE(L7bT4PJdHPgxDgpVs+Jj%bSGgzC5leuQn|E3 zR~=hH^eT|lCuB~Z5-6q^ep1lW%Xo!-92j$kgj>I9Q&q>ck~f;tvB=>*PCpEpC`{B7 zNg#9!rZh!YH1*(TYIXj~!LTsWsWTFKyvb{dEL78;8Y?@jIK z>OPxEBf<4&CoVK>w6~xYxZM|ZL#NJ?^$mbE3jv3LPT9gOr3XUc^N6kx)*EFhBjfZW zoF0&ko`kX%v0tMeMj3}*%EG?k!=zzC(O@k*DUGtV zNb;f&Wy0@_4sd2bWod#yG~K3QGBf(j9GsyrdJFD~rBCOM#g*AmGqQXd7Y%iBCLJ)J9ExI&92--4hA8VDpm|7*{EQEhAl-9IR#PZwuQt z>AqE|f9s@t<$_L&ELNgaCF^A^t_#C26mqcfMIQFjwGao;hR{|Vzy|exfv9*HY_4x& zpz-IS#ZWLE{HU;+s98%3HIM3y$R*B7-#Gj0s*d8R%Q?sVmaxS@m_uEt;~%bicO*9j zyD4q^50gU~cEJ;jo`F|ExaEwL$J)wVWlqmVD&_q%L>5|5 zSQ$>3*Y2Kz8ZXc$7pRoY8#-_~ZIlYfo3mS8bhVi4P0=U7L3oJtP+AwAAT&lun9Mr~ zLdv!fqlynMh|FObHVZ`olrB-cF>6ItI3~pLP=&c~$a&k_DHsVy8oESGBcvri)+YkO z40VJp?-qBNZ+?GL@G!sP5kL`?*+FHKV=-<@#&@oM(pM}MBLDopU~6HuQfQ>n%4wk)F0Y~{r{i}WO!~v?Q0WjV zzNuJ>c~HnjO+^CIzc*q^C1U1Iy^)>^dy@!O$Qqylyp*#_kirL1Tu7Y5C#?BCP1fDl z#W%{Y78n&5$SG>6l#gJ`wTj?VIiY9n6<6U43j4gqG?{o6s>KXsF5_`KePn%r0g-$$ zAktL2TN?*57PDEAUK_5Ckz{8cCyH-l87l=;lT}q;$ZHRxWLn;cdmyLt^Ejr7^1)&R zEBAr)g>WS#s_1T?la=54+m|`xr3>;ci~HwtV>T@l96~#@iTMtZywfvtyb8s%8uQPP z-`8c%YQpNQHFcPl(Qk)#U)#WKIe6+LXrzD?#Lus?kv%0nBygczAXdHqfo~g*QIx{f zw6$K|gdc`&ZAduTyv!rseBIrAf3Vkhy6UF#J{&FN zqNA?Cf(#^UF??ro>-c7^hS!R^PUO02_G{M6621{nRG15AzqeUs>7<2|+_w81}-b8{^WzTj^ zNN(=JJEOKi;QT!^w?K%}W1thBR?Tl90>E3$v72;l>!EF5EpV&|+x>jSSy7D#D!o(y zL~09aBBmnGiz`~XtHoDNA&66<2Xt(W(u0tFIqF{Or$2#nvo8#=le#DOiA&tnI3Cc+ zP`NmY5ogA12vlBJRvi%^QE(Ps$Ct7hHXS|37J8#efw}d?5pEl;KJwjU^h|8fvi}%Z zo6J+xJ^4wy);ET91t?hbXzIk=>79s-BkKx-gmcs6bECk6c zrFgK!uPQ_<{ve&@O8ol|VIUSG9BcK|!W44>`xs$vG_ny?km20eNVQ#Pxw+!Z_oW#} zsHwqhViwn1LzHXN`FgWmX&2teA4px;A;<3UKRvQcjFEw9mAuBffPMU?3oK0HE~-#G zGg^5+iR?|xK6%F_%Z=V3R!ONX@t6Dt^osLzBH|I2j3SQ+hdg&|*ESR%BYOZ4C_rD6 z@Gr(b-=>LYH@uguBpW%>V7d?H8cK>lVP7wWw0M zPJXi6|Dv`Ij9coa6uvaqQ%lkmPs(pZwTdzfyG}^Q4iYLdmmCp@%HX`z!8vJeJ+~cv z@ImrSw9FsN6ckY5Z%rzX@XJW_+H@jfO&feXl_P(G zqtEic`aJtU$c}s4RwMhbf0P_vD8mY93){2DqWu|6%-#9ywwZiHd-qvE@+bX7k_pq& zL10AA&9=Sle>z9X`z dynlX^B+jO%vq;TcJOlqzSJ6@~QM?uS{{f literal 0 HcmV?d00001 diff --git a/v0.20.3/img/target_transformer2.drawio b/v0.20.3/img/target_transformer2.drawio new file mode 100644 index 000000000..0c53afbd7 --- /dev/null +++ b/v0.20.3/img/target_transformer2.drawio @@ -0,0 +1 @@ +7V1bc5s4GP01ntl9cIf75TF107Qzu93OZHeS3ZeODLKhxYgVOLH76ysZgZERBtuYYJu8GImrpHOOvosgI3WyWD1gEHl/IhcGI0VyVyP1w0hRZFkyyA+tWac1Fi3Rijn2XXbQtuLR/wlZpcRql74LY+7ABKEg8SO+0kFhCJ2EqwMYo1f+sBkK+LtGYA5LFY8OCMq1T76beFm7DHu74xP0516Stc9MdyxAdjBrSewBF70WqtT7kTrBCCXp1mI1gQHtvKxf0vM+VuzNHwzDMGlywr27/PFFnfy3ugs+e/jbRHma3Y8NNhovIFiyFo8UIyAXfD9D5LqKMafbWd2UtiNZs84x/l+i7MhxvBm6O3KAoker7c7dS6SXlWbA4S/yCQYvMPEdUDz12JstgOP5IcEN7RPuvg4KEOYuOFJUafPXxo3jBGGY3ZKMBd+J+cNMcUW3nPnxAghwCN0UN2ABE4jjyqetq57WHqpwT6okcEXrvWQRkAp5010Y/YAT1ugQhZC2xA+CnSoQ+POQFB2CdUjq379ATLES3LEdC9916W3ev3p+Ah+jFF2vRJNIHUbL0IWUBhLrqI9g4QdUkSZoiX1yRUX6Al/Zzkk+BqTnDUOS8rYUycb4Rx8ErgpVjHwPEJHuxWtySLZXYkLAlNC2WPl1qyumxuq8gqTkJwKmZfP82lu6kw3G+APYL1eSP6JYDEAccxiL5BoQLgCe+2GKQIkCMO07KQVngmHieOneEOEFCPIDAsLWcdbo4v4qGYkjEAofYQqcH/PNiI8Zmejl8Hz6m6LrGwpONvTgtn+v1at6wlkiwq09kNQzKW3MjtSWJIIcGFVyi4w2mRKrGFCgHIijdJ6c+SvKCcbBbHJTmjDkkd1XMAcdQQtT4XhBJtd3eokZtlkmRlbXOi/UK+SFEK7sLm1x5qkG09eJYNVSeQArZWG3BLpunUvWtSuEb79k/edxmn4b+FcFhk2n+LfLfesSp44VEU48NEchCO63tTs24vaYPxCKWL9/h0myZh4qWCaIHxW48pPnwva/9FJkIktLH1bsypvCOiuEpLnPxULhLFrcnrYpZeedNOB7jNmYnMjcMnHPsnk5IXoAk3rDknb7XgRhGIDEf+F97dbxoF+hHjaTKd76vGUz05R570uRzbJIGQKRMs4ESmsQqTOJlNpQpPReiVSGzwEQrQPCaAgIrVeAMG5l1mrZCV3fpBGu89Pbm9vgcnVs8TTXK8LQ9Z3qoNplxpyLQleKQWsT294ngAegxKwPVYhC0MrZYFIdajsNJuQXLOhAhNOY/vgh6SVCbDINYBDGM6Jge03gAVSNQWVLO6CS3hpU1QGw00A1QKdl6OS42DNtdQsdsw46TTO0zRLcRwYlJRE6l7EfzmsCk6fFE3ZFtd7Sav8ZaGpfrbzxFbKtvQy0plm8UNtltuV1Rbqp56KbUjv9959uVYw4YDHF6YxQBkYcwwjT7B0jam2X/jOiywlIqgT+wLg+Mk6WdY5xmvbWjNOq00OeXEGQdjlYD0NVuJrvaxoK8VFYP+0Um3KZAD0TINUdF0QWeK+6KQCk0QIg/3lI7KX78D36Op5Nno3w85PhjZXSCHWQCCiHLyv7thhvFzag9ewvO/Ur8jfsy9cd7KT0dkckDfizs3YGJX+M48fpGldPdhGgfx4C9N0uEhOi9wo8r+sPdMiDkdmGkdmpWydkW3Uyd4hI9zoi/eYZMrXazDhZ47J3usRe7VSApL0itB1RuX5+PmI6PpcHYPBDrtvl1w+EY96GByAec2UwLY8yLW/zBQTZ5gFsqd3ZlmL8DosbT1Sv+kWOTVc5qq1746dBY3i3aljWdry06VZ5bu5W2q5xWebwctXFEMB+87m9eoXMGfyR4kqPW/VHLFH0olN/RLv4N+qUjgw6Z4lfcsSdYN0pTa07s1fWXRbkPggoXJ/1CjVm/92Apu+29AwoWrUZ1fOk+98Y+GF9/mFIuTdPuWuCCe5sKXfxu1a12bDzfq+qZPs0gf5sZrX0WagF/Wxb8WNZFaA+kIhtfENrm8/w1hHE40O+WyUyGK8hFXKu5cE76ylsga8t/ECVfTZaGr0xJzKb4SBzQh5dVlDR0JpaE/0KKhrVi2anQh3akHAcpyxsEjppW4dV4cIGsi/t+m9pXvcgidshSuyBiG5GGDmQxqTqHOBtEOivZUJjS6PjHOOCOG46Z6Y4TklJyR7XmBq60Y50GbthQkEKT9X1Li0Kc5CuTqWr6adpeuYIGdWrjw/TnONSFRU6VDJH1TsM55joCMIHReSa2V03pVWmar7jlydZllVSq1yZOLXST0ehcHlSpzFd+SAEXWdM18ze8+kipisc8iFF3+KUJOxhuzwj7SNf9xPSvqe+qvzmsKq9q/SkKZd1rdNl7Ud9Rmuwu48Wuew7Pxemcns+T3RpEYMIQ2Y3c7b5EDDYHzDgTXCRPaZq7QQMSHH7Xz7Sl8+2/ytFvf8F \ No newline at end of file diff --git a/v0.20.3/img/target_transformer2.png b/v0.20.3/img/target_transformer2.png new file mode 100644 index 0000000000000000000000000000000000000000..fa4c4bd05a19241da6e2ef6ce5ca6ceec5bfa736 GIT binary patch literal 125500 zcmeFYdpwi>|36+*p%XfibL3EtW3v)w%xn&G7&b+0Gi)<9c0iPK2jrYXr6Q+L$|y0yDru7f2>#cWf|iB{o$03l zGFH&iGNFuE1{8g7Ul|CaA2G2Ng)9rrX&J|&bx%j zaAm7`(t*=8wba0BI>0Wt2ZK%nPD1EvsA;Hypt=w>UEVPNx>1BKaDX>OTUU2ihUdTi z7Ec5uKUeZ!x9?8X(zLU1(M0H4S~0!gI1N=($X~b3A~BerfXx0O!9TyuyV#HGL;7nk zk;L*O0MpP?0GR-iCwjV)8Ll+o6wlwn<#^&KEQTW>ZKNwNjuNmj<@Xb%pK zY;R?bgn+qBJQ;6dW8~+L!=e0jP0eTwAEcEPi_RpQfgHFF#+qakB(N$7GY^8PnT9>r zo{Q0BYk1h;5fm;Lrl4iy=io}vM0kR1JiWZN939=P{rt^sfD71Eybr;~mkQA~#{s87 zU`A*YvaYQau&qHsc)(FwCcbP799`2L*uylUpn(r_KT{k`#~gvvwSrstYT7}-ZX^T^ zVvO{MXmc*r~SXXP8rTYihS4b` z27~}1+JbEzsSa*1Z9s<+T(~ukWq~mvVvPedwNOkyvK2j3h`cynN64>sIV18b=TGcwWk^5XiE z{CssF4oF`gwjG#GA(%KaSPs@U6qt_=U^#%`5jdEgra#%*0}t2Y@YW0NZA-TXJ~39- z2qP^x5$oj(ft&g_0y$M!Z*V2UG0k;B!{Op)o+ALQbh66`C zm_vb!Jv3-syoa{01KiaN$#Kxu^}%?vSv)@tMW85V6hCvMrW=#%!-89R`kVNedt*3Q za}RR_gW%1;`QS+;qNSA)#LJR~^S1CJSa>5bOmB*gnIj!W0Wpjz-fj>_vW5dvnJVgodEY;+(R{x$(_C^J)Qw2cK*+r*Z}wDz^LLQyS! ztbH&xBu{%=(*U-KHNzSMcZHG781^KXg{clzlg7Zq?LqE7MyCF{P>dT5$|4iI?U}lE zc8+LWb89Fblk<=GIga+S-9>rv=saCc{W%4w{Xz$9cdtEEsqk!i?zZ#k55E zTRWOtdB8j@t#}s1B71;s?cr|5W|lZ3V^a$-icHb~kzw|>mevmT=2l?1Ey%+Mt7&b@ z*3f`yTBC_nu$8U0ma(ND(V9tQvbDi>c2*hzHs%ByHjaw2F#%H{Chh?=CY%CxN0@-{ z2tOjk)|I7;cQy5KXSfA$5N>EcmNi$yhf34MTKZA_{OvVu+z-TwD7Yhp2f%@Cy z*xEQ_Bm#`%SQ}Ar1UB2t4q~L?1?)h?>2PQ) zHwQ0ED_;!_E0Uv$sWs7yiZj-*wD(6bSgss8TGt+d!vN=NU}+#U9%igzXJ&~dA~dWW zNIdlfqpjf-4Qn%CNCb##20=SQbd5n^D<2(8Z#syrtAp|I!aLf+kq*{yxF^z_%YyiM zc(4!wu4E{Q%>p6-0?W|#a%Xa(1R?}&r$M9;D8}xd0Zb6i6a~!P$U2Aey$535f5H>~d44#HC^S2{n{2lz^ zaFPxa?dZ<&*L2m?ax)Ha_vQU09>dfiyX!FV3`Y$IS2AF$2nUXvu7f2W;mQjk6f+M$ zcN0qnO&2)A*7QJ{XlwA|x&s%>HAU-sARN*DIs_8{T~L{1S6v=hGlAQ?+X3p$(BPl~ z&~DncMgi6yIz)S_v5f}G$JGQlMFWlYVAy+Vxq4fBfLtj^7{Q0jVVZdP_&8`2?X?MJ zrcB<-0;3txpgxW)0@;(u^sx2k8hM)Pn3@{XtU11RzDO%0KNBlY79NZ8vIS$j05Ai$ zGSvp-!8mO@3Khk*fjH8Pe8Czx9Su`AKM!k)3EB?osA=x0fhOp{{q1}m7+_bT56K^A zW=eEmcmXGQgN@=LomKxZ-^w*47rbNF>BD0Afq! zO#^_G2(BN3<*thWfGF_g4;K9gFa^H<&I&*#laHb!H*J#H1cXyNj`O$cTPyc<9e!z` z)(`y2M*CenpQNpHJyi6PlB}$djT|x+c`5w-+Ju_O#I(0s|6IsG*u&S;Z3ER0*-O&{ z{Y!yT(pA;uw#@Gty$?^$EX#F#?J05D5yOu>|M#yR>7 z|KsS>eVc_)cYPjc3SQmxx38@V16$+``Tq8GR{(?2inttO_CMyPq%J(y%6$8h%!l0we?PPga7#7Y|Chkb0eKLG4jxYYAERS1 zpHBUc`;|X?Y138(+sgOX{)aTrhl)V{~AN1OLY&Apd`{#KyvWTej~`8X7vAi#V*~W!H37HjLPoiPIuA%i~Ik zzLS;F9bek6pVadEf~g)3o_ldHLBrLkl>=z~x6L0!8gPrOP4 zk?*|>>+$NoHvuAy8vaWeo~8gxII#EkA_&6-_&GIVfZFe$7A?lPm^uzk;py>MCd|CAH|C$jP<3+5COHy{Jv( z2x8|&QH4wW+=t|LwZ=HA~=q#ED)+01`x zqPHTTUQ@_k+21aJ6M4RxKZn?qAjqG_nft_b&l-MnY-^6s`Pa7p(ojPIK%wUre_Qtj z0i&l#j0&za+dNbEt=G0VX@e9%DlM%S7!k$lX7zsHmzrlscD&z2M}7hK*2Poq{0dk0jJ!Mi`S8Ad`{dLl!X|x?#=9+3 z)m`kd>+_>cN1)8lN$>J)GoI1jA6O527IPT#ZuMGV_dT2fsX57YIz#fG@-(Ibiz&DG zW8}+U?vyY3{ISrE+Q6lxJOTv}R;usF%ly=(Z@ot%g!Y$u**x;FUS6H4BRvb<+L3Ro zSUAZjaj7f+N}u{r)=^G#QaAxE*Luk}#TcPi7wY#IAZG?@;iJ*tEz-11Cc6p`e$o*$ z2(IZ`{ka;pM_>4g@8Y-K$VM=I;Isle>M7#LA9p5T4%|H7_s7iczgAe}Dsc0ncg2)# z)oqq$J(rgFK>@rIG9xAF(RNo4^hRH8ecl7 z?){$FnyzD$YbkkwjX`!7I;byitjXoD!>*VZAnpmcye*L zK8k0h)xyuOzPvDJpNE0zT*y6Cd4eDu9P8^U)fgvAr=G^jfbpsdgWn10Uf5)Q!Gl~p_0G6^ms55ruFF&-F#9Lax zNyrB*d|?^uFOTj`(?9n(pdX1561?S%3H|xd_ix!4L&>%5kZKNyTTX@9I4SoSC=R6_1^p>EA!Mrf49F zwj6;qnU9aPq&j@E_Za57uMnO71kMZ5u=Ald4V9J>3csx_8kia2FVuc~?Wwo}_|(Cs z9+wcwqcFh%XtD~HG12@!KoFK4Fn*n#RJs2E6N_OqG^IT-w1d4GoF8k+3HUMDCLvcJ zxhrwHzq+uzEkH?(>jd>L{UzRdO%u+AOsL)q6A9*G!m?lhZ625&<(NY4YokvBs&ssaS1PT`eqgF;0` z6TiZj7uMyn_tf}?$ozUYJ6!()tSx9_9B}9R^dCn4e!i2w1Ne3{G@!P46u;dGGMz(tg#F zjQsh}SF-rq0%8Bq=aURznQZ2sg>CPPSzes#vzHSW zm+^(@#Zz~E+EsAd^RqWhP|BuU%5L@pzAm_Scx=z^-Qat7dn$bi7tY^JI|7?%N>pT3 zRXEphWUcQY%1%o2s|aAV|Ec>=0Z^rUzZgRPD+~)?5Co!UC^9_!{mOks;GPRY%BZ~^ zR~>ULuaBm>1V0G0YS}?t-4?z_zNEZt`yMF)qc%}ROX`v{@|In!Uc_k;KrgB$KBWE;hXtxY&Nj_~IuKnBe$j$&=;%j(06Gj<#k%*c{W5Q==9ghZ6#3 z`YPJz6XLVN1NmiH2>VJO*XplGm@=Qbio}WECK7%D2!a|43K~*Pw;^~DgkrJ5wC8w(xyQALMg@i0G_R*&XqH?WAU&VKQMT90; z=iTgX3Y@7WkBcE-OZy4$1S9}xC;}S(D0oJ#Z=DqERoeh)oRung|4z8)u%+-Z&N5yvJZ|Xnx_NvJy-;^k8xk?fre& zSOSBHOzAIiF{rMvzGbze9W;-*oi#d78o%&oxiCNw*!phlGx4wZE~mBGMKlRt=A~ep zYgzWZ_W{AQx}i^YE|M(F*aOow+OO;Muqg9xNXW>`n9%HZ<(~l|jQWS6yz4er3WsM! z?gCH%RJi0rTe#siy)Q{Zfv@rB6_qy6EtI#nBg}8=_j_fo7=Ph}h1wW9i|Dfch(ZlW zjt0isAO3-io%}&&81WN+c~38!(t$XVr0P_zdrG=UR^`%*D`K6~m0;S3R}B~U?ppo& z=2qAhX@ddP(1iHe&}NoE;SM38(i7X~PbpZWzniYfV{B`eY6YZn%j%ABJt|qH2?($| zmp@jstpG1b80TW^g3qNv=r_g=0VyZlLF%9JaaNj_vF-m2(q%%0XBA+cbDv(=RI_L& z8GsV(lmFIzSmpM)`6fkHQzmn~O*Ld=IoIpbX)6GHmw%lB=9i#=K2X@RM_T{5ZuhIx zbC{Ls>X~fz%mAxD25{b2U!A54Kag?n6%W@I@JbgGL>X-SyaS+z`f11TACd;aA18kR zAOKzA-G979412piBku-zG=EI#V=HSX0LxxQ-K>}Y4ORdb2W#4{E)-?_8;2u!A~bqV zy%XLquxbQdy|n+BkZ71e@S3BH_o1;P9pjT?TEyrALo($Hy)6@py2o9f&yk@E>s^{2 zteXMDCf+|FBpfl_UBBVu9Sujit+yNeQWDw4y}1yn>0BFtbpw3#?W}N(XMj7amREu{ z-wAJfT5Wm#MCfVEp+*4G^FNOlL0^%a(!P_`P!{Y&__1kiHnYe zmsjF}pTkM64o68h1K49~1+FC7pv*D$T#n0}!vGZ8r#Lak*I{FD7^m)KsB%#-jYo0r`LnGg0L;)pAO=_Sjvce5`zMM!(>yx9ez0 zXk|``QEpjolnwv#Ec4oL>mLf^LBJuC;j6zV@ih9pbxsKYmzvg=zt{6q2cMj?$uitr ztv@|bOG&0HW+wu0!blK?xL8&;GSmtpyxh6}!-0ed`EQ2%COUKDP~I^T7Ucsc%FSxx z--{1m0^m~F5jzfw?;Zwlp^B-k2cH-`G$5zIyHcCzb|{c1BKLxH696g zSR|esGV|i{!4AOBi)njcq9vXQ@m)WFC%=22hj#`tC#=-bBZZh-q4%6B+9r#xeu$cQ zIr{AP?HKauTLAoxm5bEw*x&jlw|ldIB+rlEF^y}!1&<$tjJ~p58u6+>4Drt6J{=L16AQls$OH`}HrOsG zh+ue|%3;9PE&-W!!xb@G1#KRBD6F?_JQcEW+VX~8{!7m65DY+Au|UwU0g|pem4H`p z4h;=`?zda&-}t3iAnX2A3;UBj|I_{awef%4&L79|he-NA`tA=@(=Z2W(~&>9*anO%EvjJxtFN1P>pQT_qJ_@amH5Qm2!pN<30Y`zNpeDiMr{wLN| zCK@g&pCmSXT2E1%X``skxtTD>mERxaB@z4*Kp5Q2Zi2f0yJCw99OvslN>7bP?{++T zj_l36D4h{%bGCnLCXk$;uNK5$7!d14)BkjvLp*a>em6FB7KmqS;Xv|dP<1tP+n;Ry zui2c}yl@#J-JCpRI@yYhHx?pW7HnAk#rgPL2QKO9okRY|Ok^%>k{4bZ%5S(Rg1wCp zoNxXnZ4fA{?))UBQuEH^sWo6LVKwe1`nm4g>(k%c>dB(}%$_I3O)jqICdgQ73*LS{ zuFVF^Z+4LdAeuulf8&2S`ynu&sro>CQ(>&o#;?gPN@4IxY>R5wrMpZ{apAK#;@YJn z^X*NCQwS$}ULEP|kx1NMt*$h@HANjR=jp6j?Ux^KDhJ0cxBGm&Qr0;3E;-PC{Ji7$ z-Y9OiV>yvemh^2NUi!6R*M)0$a7nLVHd=9vrRjVl+&7P*fBUy+K#Q2bDsp301OKb8Fe`fDM3_Uc}F)4NT(DK7`->qBcT<7(`t_mu{m)ww98Q+nbkJ9ltwcKK}m$Ypm6m5{hR zRrs&>il=gC{5Y{^j!@9?$IZsxb{?E>eSJm?m1_6#i%hgrXLU+)XxhC92>}D{iFI!_ zve-{hfl~0Zlo+LXhkp2AN^p#jQ*3qc(u1VcfV6t5Zg-> zX^^mx8lURV%7R~bJ+Y%c62K->3c$kO;ru&y%rnyS5P^kjkKvK(A7&D=_~UNa-+`oy zFTOc2qtq_p7$!?0ZlrWZHae(g+Hb6s7u@R&G*kJ3o1T?Ob=OYbCrCiuxkZs&BnA4E z{?r`%@^i)Bl~OTn5HBASqN8(AJ2JN;!qeh-W7mpzGJLM7oXN4gp8oF5B#A@YKlS+4 zgzp#biF3k-0jebY7+`$h__1BBC8pZy-Gp#=#H^O1hZL&B z<+6}!;YV^6*3qFUQ#qS{$4Eu~>rmC%<&C9NrOR)yU5Z9EthjAf^;5=eX(E#c%DRayzCQ+9#UG?q7wFbo$})FC&$HiJ41HFweM($@f8ba zWsM5yNWuTp`3T<<&=xDVbf*K@Pa-`l+u}I4Gwpx~zg+XZ>9HbFU%0SJA03Wjt@Vxi$r8r}`9%TsxZqa?b@&gbD1hN}-JkR*6A=(p9{RS>pzjjxL229G z^5$!#sCTY;&8)E=chQqC56^8gdb3d0*i|Qc==${!ujIaE)&{$1m8TQ@%ig?vh52ED z-WR(x9%Ky@Pidp}ytBuo_ZDSptEP74pZqi;^0mnL*gEW*!}3~wj{V%uj(ex=tBC_e zAp^uQu`v&+FSau;i2F{AzdPFt_zjuUyaLw2*Ls5g)9aRBRErWfrrxO!g)O-cSp7(46tBL~YZ2`4q<;jfSFSe9^ z;^Qy}-183-l8*W=DrqY?+%cWEiopJEl$??}oh>mm3yT$50-4AH(W(t<{3R$~*D9w#m zyO=!%WY|G7=|0AP(xNPj#>zjv0KawC`{Dvo(N#5VV)TyTD_M)e}i1k%q=`xu6TB^F);HCkygSn7+ z&VvEF;LZ&FMp5S(AO?D#fkqK6McO~DZyBC=T^3mS>l^KdOHvlH;{o1VN3m7=z<2P? z1Swl~UMhObQubk>-Tg-wckcGRjZ+x!9tg)QbX}?Ix|!#`#H>!3_ih`tC!7kBb!4#D z`sc+z1mRCrtn`7GIqC8bwk~H}B3gLuZ;SNu$-7apv|s@Zo+y5&l;Yexh9gOy|uh)1h?;+$)aKu^{beYvZ1=-s`%AltgEVGVkJh+Nf)HSgp+@m;b; zwW-G<^v-SAu&!6SCSN@~v&VBIHV6l)n`91s49S_f7n1oxCZJtzDyulLq9spZ`@NO( zN=d5Npo?OsE@f5*IS3jCoYXD9F>0CmX1*hC$5K;jC+kDZ7IRz0Sk>I(fyDapu&IHN z1yeLUzGXu20Z_+1-^c@qhw!X_OMrP=I<$+gx3B%-UR|TZrY)#m5-L@z09Ug@hqqGW zI{8zd-&9WuvFd;HwS2l-j07t|Mdnf#M(}CX6G>yI1mNR$!71Usw@*q8bRTiZ@v7U; zx^0d8zMKOC{8KUCI^<^1A#VAH7} zdkdE*0jjedMpBC8T~&!gwfl8YS06^*m*iQqC}7R8_qi(nsji#b_g;IvaxF;TGow?^ z{Ovt6l3fN{)%7$XMKS^Kg&%f3+6YV=t(YE5e!AIJpy}?6L9n-HDuCO>-aZbudqtNN z&($$R;y2oM9G_a%Q8_*cR+CVpVG*cJuktOd_eSnwRh_s2eVe;mc1fOghoXzt-b#$@v;sPFQP<~_-B zuIBsIQnXx;o2AIymDyv%IrMy~?R2$z&$OO>F8Zc+Y_`<_Q>~*R8GXvxzA`hN0Jvt% z&!2%kpZBm1<(KulhAuFEpupPsHOPKgQ}IR^0P^&Ju<|)=PT;>J`unSZ2QW9jm3ij; zf6whVFLDDgmhZ1Q{Qv3d|KuC|+Ps|N+%lB=_rq&g0q%$Et{3{>?809*$CnNiHO@a- z%hdb(;ji&J6ZAk*{{`g#q@tSv=VoK*5^VWb)#1PF`8@w?pvFeLAr|+yEl56lF96L| zEgS2rV?Y6oF+XYsWlv|GeiDT9{#YpsRJY@SIzSW9#L(4y8xf=O!0kl>P+&j;WVQlO zT};qF7toZTaC!7~nk`WIR0Ot}ov+?oAOT1{xAC_>C;;K(5oBeQo@Rg?Bdsn@h1T_Z z0<9`3q6#SFL`O~zKwyT}c~0|gdfmMD$luB2eC*SK+Cb9q^N3Qv37cniKMnu|OJ&FV zS))K54d_TY3UH=ZftG_~R!2B;FJdK~xJRE2d|-b41lGMR zQ~4x&`caLDL2yw1>8etoeqfJEIX=OPCj*pbssC4d98jCP=Uj{D5t~ppK0{W9U;JCXOr7+B!Fr7xXezMlQ~(V zB@QXy<$f0uM1gcy&S&`f*5r@-W%mazev1dz0?)eDdKP(2+vVE? zJ=NIqCZe1Nj&wC|BZ2HvO`;WkMZnNd{NcB&uls^f8>jp`JLGX|*AjHFLCV(^Bm4@J ztr8k8!_LbO9pDq5tXp4n0(jo5HNorn9~bXy3FtVY&%K>Jw^_YlFnHBsm%2?hvLn|@ zT4&E%#(@*S>fZvn?f7?_mX^N{(B8}6+YLxSL89Q}qap>%Oh+yE*LkAVw;yTO#ACcp z09@|;{1_S=DejC`LdRqm1D%ZVP8Ht0Kf*-cVGHe(fzlaf;M~#~X?YRfsD;LI(EQt_ zLIohbWdJby;@%O9jx*_B`Mn1M0Oi41|o6t7k&xIT>193 zq^{h~D@?HiD9cVP)*x^2#Rj2yty@65O`dI*+{dA3VL8F;D>qK$3@&`N4HbRz2B=u8 z9#nU>>;0fpF$!!Tla#EJoV2uRMz+J!wLRr+@0w3WZI0nzi`mqlus(Guz+L||=u~}j zZKV2z)Dq{G5wiMm#TXGCMzpgKFIme~cP?0rmrrPj6;TM<{`w*J`X;^jyH+>V1aP_m z3wb0xpj{^!C}S%<%LSI0PvlkdtWJ0nEOTZ2W{92gSD+E7d3|+Bb-XDJlq=Ntb2QQF zOVwy(^{uA+{#t<}W?h}lo28Lsp`IV#P|mXF=AwUSG1o6*WOr^E>kofh5uRT`f)ZW`KlRJz*Rq1wA{zFuTnoVLL-fl3P> z(hG@OyMMOiW60V^zR};IeD`Mmo278X@!+o)8Ej|f#e>K2m3$G3A+uq*2}<4zKBwqe zH+S9=eARRT1xwEXXn6&85l~{a%fmb)L<5aI$AAVLTfsB!*H4n6aV1eq&d%t=CyPHW zspD$LTR~4hsvkp!*`)Qq4;=E}`t}rlsww3nL^Tx~08vu%B_I+3(q?pi%9Jtpc?*w= zaFaZC!eserHTolnaPOkzN!GsTHK3A%!6+Xy2fFx{ z>)(CUkMh}{bU)+C*=KxX>1%br)~$dtue*+HrA&S8&VxW(5fLRA?jTx|=%X2LETZxS z@Zj%a0w>=oj}_dj`11Bn3($yFaysE_cM*EeQOKZ{ADRWih};6Cn6`qBI}KVNLouXc z1sVzsT2bak)v~RF@#RoM;^;U_VXSkq zihJRd`KB920wz!C$X0Qng+n@I?ST+{2OZ9lWQ~lsU8g{Qm`b0^PN+Jd{CrmSVNZQ$ zXh(lkdy?Xvhs$YFdR(=*n@^G&=5C`ghZuQ!LGv%acVN{~)S+!AqMf13Q>#`Az9V8O zEk0a?ty!8FLlCn`TH9yHaH6lGMDDy7&0u{Yk=TBtza(1XB+FcX^&1H0uY2e-Pz9#frQbPu zbgx>dXlZV}4j1TwvnRYv^#;NoHlf%kLdY!TIL^F%0`PnDKm$?%|CJ0d&1SkOfYOnD zu~})R>CCatu-K>D-vDZPFwVPank@b{a5P=)ZHY|d%<&5wOjb+9L$;I7l0R(XRm91J z!jT}I&j82PwCGxvd0nk9P5yihf^!zuh+twWa8MCtadBgO1yFMX;v1}M#ENgqVN7y+(ASW&~Gmr17?Kn7whv(+a@Q;pK5 zX4LOHK3uwex^C4NEKx}n^U4YJlov|M|F!eE_{+{GUTXC_!mG}%+1g7cOEwleeUNFy zK4jo>ctjzt5i{7dvYtxnewoNv?#`&HcQd|xd`DgJ@$K#XDYrtt+18W{o;wa0V)Ab3 z(S(w(`;$6*rkfUY8n^b?Jq!}t+j4blVAsuFo5ob-GtIBs2t8P7T`7AYsb9{Ys!mWm zCs$uJYtk-!o_sf@L424dAo87DF99^Ml-zy)$kXuTS_-D)o#4VG(-LGPh6m)+Ydf@et7g9g*#+O4ddv*we=WYiB%G2R~9AnmChf zNU*FJHLU(5+n!1}Ium6m&&z#1Pr4^644Qg`ib{19^ViK;Mt(OOer*QtA7d%@&`gWX zn$%PC)aJ4{UN&$xtNqEw5e?r@#>W-TFrN|hI-Bhm64*R+F=6N{PuZFT$gy(;}6}^6ucn*L!dCS`CP5}?wfmdm(_hx z&{Dd9ps%(Mq^w;Z4494ha54JRRaxBW!n=W%gGrI_9=x%x>G~B<7n>8oSNpuCBA%w) zANGS!yY#5%8D+-Y14!J^naxg`?P<`gU6oRYR^wL;S|9RX9XVuH5bv-X`gKSnbbQ57 z!(+#*+=ZcMTaw+jo4tzfNjnO3GV$6oRF0is9C;=qF^t}SP*#tfbf2jWRH@UnJrhnp zSwrNj1LfU?$u6{4*QeOq$D}t|?LhgV?>dzpTOt`7L{i+1>q3Vi!Qajf1}=OostU~9 zb?S}#MSt-<*jIJKm&96c)vG%JgnWHL^&5T#hqj3?hr+Ybvdmc|6AnXdA*_gALShPX)R`7GjMDX7li+qcKB{CC;bH9;EAW8IXl(movJ@M!%5P zm!dz|Xn*b7URe^d?)2o>dd&9AVi^DxCQ~9z{pb>vGg1j@V#7gpa5z@GHTKNoBhU|Z zbM}7S1qUpTg`A^E-^G2c#sWljt*-mKlK88;U@F|simDfxV(mq}HyzKVUc27TW0vX4 zwfn!8zfy4hawj2;_^i9LZD-&8)>-=(IZmpLRZFL*uWiQ*$aL+T?~>H@F$DUxaxTfq z#QDmIG4$)9yo99S%NEAuEu>>shSOFByAOs{hEF48!;65}-n`z^7io+?g1B-&Nh>^k z1?bW=16tH;PHEpXFz8R1QWxn@m`M2+yGO0ZoQ+v1(LXSwdu&I=b?d4w?bRB0=jHK+ zUHcJPQ_h6c_5RGJ9M_5C^JdG`NU0OWky1d%$_jHFnpQKJCj@z8v`d&_El;u8j}XOU zk49sVPhvE7s)qA1pVqsMi*t#+#otwG_}t}{Bx^aQ*QI~;M3rZq%k`?-IxN%np`)(7 z>5)z&aWJjVUZG6$CW2J@s@Amqoi|iPTDCFD{X-c{NoZctGjhAJQ~gK%RgIFz2<>q} znP-%0*3`SBG6gH2*oQj~7;9&o@Gn?dzpTf3Zx>OqKd}n$ViKNCI~8QnHpr3L@_D+P z(;&0+Ywc;}d*Y^X;_@#_6%zfxw|r*jokNC7KUS_9pI9sHeFNGCZA(vTRRPGyWZ4~y zW}&)oiJg0-0pg8bgS!vjb*9p4Ot3jw?ZT+VO{6gE;K=~HD$dsnkEE+#i0Vhq7L*NQ zE5!SnjxA-6Ssf;Rzg3vhGpr0Mpb2snowW7`NB(gC^efhJprUm(axlr7QfqPh%uKC$ zN2U1z<*UjMj4zAg7Be5`3yIw8`{mA*pma@JFFMQS7JsyLYXdlioJ31`c;2<(Ei%sc zAcN8?HO8-YIb^l)UC2#zKh!ub{TVw`Vn>S!NS&Is&l(g>8G1JI@sNP zNk@MU{48^Vjtzs)P5E|7-QsX!K8x`IPcAsTHBq$8#9%+l{5&$~D91xxNDT8uIijZME}& zht;3#-f`~~&}cN2KC$~+a7R>uc=s2Y9G09Lv34j?UsZJ?v9|T3Xv&){dQ?x=;>x*+ zpDhP)4_1w$>w*4HBbh5kySt~WeEa(8msZin6H#;fp@i`~b z#({CxGex#?5hLO$HSuF4ORXmE^?vENG2;81(i7Ki4PdI!?;+?;G{&qgkp)>9a@10RWR+U16gpbva3 z)HpJ8q-OWCiCXnk#|EHeR$BNGQO^4O;at|7-4qpm=7I*+^1#}4KcYMp556Z${+nNLP-XQ^+0y_qF1b{lWGDE$3$p~lszVDGI z{Q&!;F!JY@%f8D`dSnkY09Ct2U`{*6a?$HsYgyw|Qqw9eSfH^@gk35Kasne|9i>ZnMd<&n(3*a(2nSCg=~t&>g?;+w9okN zjv_ea16MuLE^DhC>UR~Sd9Pxr+OzVvx@Sa^7BU)zgTcGXUw0d!9-UO*p9^7Q15jBn!YR zh1%es1}di!mk$JEv8SLF_ZQa}CTy`Jzx97rHp(@)Q zMlT;!SJ^D+`ziWhoKhqaAKhshs+o7m#~NsKcQY0Xu8c|aZlWKwe5}x^3Oz2Z7vcnx zZGIT)?l>*5N+{{)-cynC&nWG0yR*IOq}Kr}b+eHdNmHgJO+eYmu1um;SVgONBrv7! z3=oPFYCl{WCw;$blN>wHbi@3{sgRtP8mwj6!YGY%-U*HE&Pp)UP2yt9<7MnmGoUUy z`aY++;CaRqaglBG(OcPjXt0c%8+vD(I)|@hX#*+F1ejYLv8yHuGo@^`5+16D5~yGO zdh`2ybz5LQI!3P`;3^OfdILYFC{!Jny8F`qIEgL$pwRialTGS(^Qxud`KbZ@jh_Vc z3yIO%hQST@YYxEE4PB2k4Av|H(NFtB6KKUbR~_Uq(MbD_jSD(lD8Oze9o=&J#Vm3X*fJ4!f?hms0f& z->5E6?Lj|QFnu3~$lJ2iqqFyWC|RYomVaP%-l{iXlx7*LP+RyaYNn>hJNzMxn7_L2 z1+$cCr81dA{tiZasC#SN8h^D9VDG6&+?sJ(x`~`9Nx_e?9-l0U5K`um_u4DIMI@?# zsi9-&zRh#N0 z^<@R3e7iZRPdt%PJ>yJ&fB)9&42V?@3V0gEz)m-!?gP;0o9*5IxWGvG^C@zyzwXt# zw!SoO)oMk~!_b5&@rVa@Q))1!KQ@Jw&WT{@n}9p1x}$ z=bWdd>Zmj3#>4Tl` zw;Ve8eFFcwR6u|FK$6;EuzbG3a;w%sRrM$l%oI%UhUwRB4ykguZ)iSS45K~Q;=4I2 zQ}@B~Gmq_1Nu0gTU)p(Hre&AUtuE36R3_9OZB&fW`&-b7rLo_)Kpz_{K$9}}CoOzE zuKzt`G2g8s*yz3K$!od%IY)rVH1?z=d3^j&?a)&{V8sY?jyCYQQ$?`My)UXubQG_p z1qgZ}W-wLb{(GI$a-!Asr^SH+w65)I?VltI_rP{c1V%}rL@1=NI|4%TPr=mpS^Rs` z34CYmia%y#&z_B%T)aTMyH%m#!VxKn&ve0UKg<}`x66T+>kEGBx;hT*9jLb834)Iw zI+%I!OUk!*MU$WyK2mFgUfuHN!#9Ve-fq7g@;)Vq=jWcNBy%49)PcDo3qe3T5SxUz?ad1gsJ>#y)L9)PTl zLVnBj^WAUfeqg&!IASpJ7sXUdv0KqK!NZ@onDw}oj~%*~JaRInv>@x^)kkW3%W>Qn z&l3fuEYhESU1$a9)KLelm8bmEv*}E;w>R zCqEtlro?2p;EmiN@$55)`ZBJBoU}L#Ji26ae<FA;EaE08H0&gPEMl>(NU@#sz zgW6bI-VzzE3^S{1(RY2$zhC7u{jLLXBA*!IU#kFE)bdnWmJ_1^=f6O?TQ}|K)4K3_ zJ?~(W*FLnm02j1!{fhK-zTW^OWT8ObpZdHRcoxN-rmJibRl@XSym}TM{%$3z;mz}o z*`Zs>{j7?t@Ju^WfGU>xENrN3Fuh7!eM&|M<@%hAZQbR1c0=Ox=jeT(cUcR5re{1X z{wS#=Xy7wlmjWNOgIrdJ%;;Mu_fTtPtjwu-{96Uy0-Q2)G6~_Fke7aUkJ!rtXy``kf^QF(|dg;|z*f#YA1MLc|5_h^%6_-L`fa_BOJ&zr2Cqi+qVl#%w#67N>x;Jt1b%KE-( zkf3URh#ZT2IWt}JSya8dul><6tWp`%@C{6BMvM}7_y)_dI!F=NA4cY-7#AROr`GHCW_;ABG}eZF6FNpen|?J?FfHd^s~HR()1yV z06FP@ultEdcXlXp>$QPS`w*as4s`6T8PE>6uD@$Xe#0m}88UUq0e%#I-RHcvVYnqw z?NR_9FD<1H>f8H)xc%14sxD807;8U$1+zUAbNLmB{ynU#d?ZB^>5cM$A71t$t20|n zDn=_bKmQ{^x-%noPYiel+MZ5Og~ftHAnWNDXLj0$?C9GM0Ln+(&&Q$M?e-Xt7Z0Wa zd?Wt4aotD^i8< z63rPXX|KpF`uSHaB9gu^HE%uspm?OmY3j*RfrDW{x9ZvdN7P$~Rk?Lvpn%k-Q@Xnm zL`1qfr5lkFY3YVdOP3%i(%m549a7RA(jhI#UHg3Zckgqa=lpT>z`NhQ){HU77?Tj| z{C|v)h@Do70lR9C=hU8VBasWU-l+KjsPD-t3jB4u?wH&-^Chf4jyTY5e>?ns{GpHn z+vki&j+p5Oq16(!?tjJo+OAO7Ya)9=Iv6-P(nvL2X*5zi>=9_MQ)!$Gu-+l?K{XGc z0B%KmgVzVY(OvwuFa@-drIAB{9|U8KQX9-)EqHcW3n>6H$k?z&V(f^OR`;6=#J06l zE;jKMTtS~*1D&DV;khMJ#7(S@kFSKdVtTA}PWzy)@O?;XHki4(GRESA=KT4JHVh5q zLhc41u=Fi<Cy8P{WTkp2uMQ39k3Kx8J0tWfVLfSB@W_?GkNGu&C zF%crk_8u?KC=sQ(?%_vFr8im=940eERyqrqhn{Uz+F`#}@~=?DKd4QAW9^{C@cdg< zaVRbQFfJo!^x0bdyHspnO0H?}JJ8?F+yXwCsV9Y*L#1wKOo&ICe!6O5Xm&3*tAx3_5{j|}jQwK~A-;DqO0 zFRFYZWAwx0%~}GFg@P6K)sowPcX}m3@}CDdd3xn(Mbr^|h_pUAU$mFu2*s-`3*_Qy z)e26oJxL)7G@0Km&oNl{`MSkS%^<>)<9Ro|KeG)=gP=kb9syN+zBRhOe6PwWC?mlP z5ug;SF1PE@2L0Diw!4gvE=km>_|9+kF|lP(O94?KbodUwlHCA10(0$G9FJ#N$6qQi zcz|`l2k6*5gDYkRglN$c3MD7kWCLM>Wjd8v?7CHAkCQTZ_KRc)>wzdZXKA8O$1=*) z7vdGUbVNb3RxfV5-pVI4gSrIYgoh!PN&U%SNvI2HZ^ipQC8O0@hf>5dcaF)UV_QCZ zQ03Eg@15OWXc&EKL)-u)MZ|`jFAH>-0%b?L5l&s+1izjg(p^yGt3KC>d@uj*x6MJs zpW2Bt2DMsCWv%OWP+{?aiT{~!b9N>s2JTPrZE)z-{kD^q68&0W@g^5dVQ~ye$FOM| zIo)hk%|`U$OcG3lwP_4ag#xnU!$;X}D_`yx~#X6B{ zBHU#Vo?C6Sf^44RQ3@S4a%S4=M296Wr{~BMQ$+1Pd2u$-X8!9udFxj6zHR)Ax-pwo z%%8$n`mBL~qEN&g2^%G2mC9jeQ4BvDM9NEJNzaT}aa5F=mvjJp z50E_*0h3`6P$%-iG!)m3GmG?fQ{9JI1whWA2eMf}4M^}f&^$_oFP7ecl!jb2d5lVQTg~kiE3dDe&JO#7!<@ zpNs}=`U%_*29$fIjpK*vSAe?_WxMOYMqHM0N{2NTHAa>hC1xyvH4BvP)XRmTcndHr(Tw3%B#MeHeHb&D)#>9< z!i8PJ!X&cW;XizVPn^0iyp+qhOS7XxA7uyiRv27^k3 z1MSzj?G|6crvh~)-K3M_KXBMyAvRg+KifzrX^0Ky0#(~r+*m9r1OV`A2<({3-KK+M zC)T?@lW_%c>M92TN`+$JKbbDhbA)tvJm{0#@nT3m8>OuzoKm&?`l+!(Xb>KhUWgn> z*Hl@O0}WjzTDWj7kdw~=j|UzXeTWcP9d&vl-1Gf`sY2OF*xopeefszBXQPxwL-1Y@ zm}_w$Vrcos5x+4mE7G7gPjFNoK3HV;Ab_;eli_-f%5;O(1pH+fE2U0~EQr?ncQ6bQT zC^VV>wB~T|{aIvdD1ii`Co!tsQK^;mfG{tqB(~BlLrJo7ZRDO2;~N(s3ptvvmQ+Y$ zrU7mp>d8UP0wixvfMBj=Ewtj5wA0ONp>5=J>;3aHPqnJzHorC3I} z#&BYw_HD7i2f}-61l0{l>9C zDNV?Qmh1Wh6SEmVq(wmvHT;WcI$t1QNyvk{HCwR+y4~O>3#*;D}(Xe`EGO?N-MwF^cX1v&Ki1HXuOMsiM_a+fC->XwYWN%)u?_a{SMG|00mq38LLjkC?9XqVP$#RZA`IRu0+5pN5(4% zA{P9yIFfP1nacMxIyid}ZwdWRkK`w7QfIm`w17$ zd|P!ml`H!QsQyozbRi0;u02Fgz?OjxfPa5lhJM&etWN`7CLt-RyFDp*v8H$cH(z$$ zOW!AFYM_Ppr-|dWQV{?=6PQSJ)&I%YKTW%7?Afx&?fNtLfu1nzuq97VTJUXmQdha3 z8pG&e-{s;ZDh!+Ph!d|b*t|!xK$zVV+1!GH@*{SPIuaFR?TN~ zxQ&IN7_{4%`-C|9FzO>UygTkK&|+);)&leV_nW;DCkH8yP%*eNCe^64&!orhq4Z&uI0=U7b$xy6J(9{FNh(5zTep7!rr z`a=u?Al5zMefggNT=TKfo9{EX5pI<^SY2A^-&b1$bFrOJ{M=u>99fn@~^Y z*;1WUa8pZz1d1O1^AZZI!kfHc@&##`2YkdTH>-OOsB|nDzX}lAI|#TP%oM|Thts~z ze**ydPa%O`etD%lUzGa^m>oTE#wGyAifRlk4}iThf;)6WEG0|v1f=P#L_fi=(!%$6 z+#b&~h_iiWELH{1TJi9Q+YZa7^;@+Wa7vd4KI6Nct{Uy2m}&`CuJ6gqciJM>m00LL z3&kXRUa@rxD)rgapAq;>?0bFb5r`YQ9k{gcl1Rfp^JGVWdbwlR&32x zrs!7?Bc38)0xO*hhrdYFv{HZo>x9;rKAFE+EII#indPJWcTyk%>?4 z>W9gkK$=_N7BU3rklikWDJ~mb6@Yf9QLfLY*YGh-r@u21CVLWv_t|~AKji8n4dB{6 zs{;(55pc;cRbf>$47BjVreV5Yp?ZX0)vrCH2(Bkz6BrbghJ_t21e}s;?!Y?=m86`g@yTh*tXw*#!= zEOv`^&gYxb^;?-hIPXYBJv{Plw5*(_Olrw)U>t+hcs`xKG(1DdQv+5TZ7$-jselJY z#Xz_nrDMXG3jmdL*x`4YcanoVJZ+8i-E_Nvsx^;A7_271+^P&BSZ1&x8>~KfqKyMi zb};$+#p|%!V|j024)JlRK{u{OY~{c^wg zpBz8H_3eljba@D*@GCTe?T(&PJ?Le3D+hBMV0!LsOk?~Af6lO@B9<-0a$6p! zFqbK9a;WR!6k(NzS+FJ8BARuZ_&?UyPP=*w+Ku%}T0PvJudIp+0(%WdDtO+*Z5TA5 zPV}7)grCsA$hZ=!gSk*vYworjXb|LVvRD5&a^r@3ZX8s12Ey;5bEz#z3>nzwO~CxA zDg0CJSnMgVHo2t)KdG*vI+aEe8E`^B>oDboteW9?i9Q&ZR%85(drFGt=nOVRQ6}yF3AoL(9UCjfJ1i0^Z^r7M=X!w-)DI z>|86tE`t$*<=hY;HOc{#!Cs~6Ai#ps)w+j7ASM_t?Pg4(E4+U0$pMD<4tgl@yjWVU zRdcCqI^$K|nh`LTbjgoe&p(!>hMBM7Y_(zhiqq*YFU%LO0#UvI!@ZKYm)m!}ZwNP}$6iR9~BM^qp$dh3zBd1IH;yt2^LJo5}wQ*gF94smQP| zrk6$voq7%T6MR|MJ-2mP7ix5WK`9+dlCbynAn@kX?1djMC%K(}t}WKC;bTREqM@sV zjyd4LhrLvFxraU=qYv`&AutEISE8hT?u!hAdx7!zjSUzEO)c!RW9HKgG&o=DgTLM)~e6=NpOJp?W!j zNq|?XzC+)r?bg(VYAyl+tN2e)MfiLTs6B+GMJ=AUWwBAuIh;ReSgt%)N0sp#h_if8 zl(+N>X*$&F)(SF-1$BU=*%ya(gM~pfG$x5Wr9gIi~rtd%uAew-V`UL3VAW|Y#u{Q zt}3tf+r0>daG^Oq;#V3=jJ;{vqEYg~dg=BL;ijvDM#sY*nF5ScAmo~u>$HIKrlcgj z5wkiIb;B{KYStk6H@Q<+EY1V*x!X};VVO?}(QkJ0kG z!Bg_A>JnxOh>3#z1}Ec^K`lP4XO`W7l`V9H|q&)lSvceZDGjDdToz! zgt%ZSDw~hKtv{6z;ps&_eu-`GSS_>f`0!N*q|9nm>y|h7>S6TYgv1;qt{U>%x%-1^ zt7tjM&Rvh%D>pqf#T?P4Ze;`ev^2jwjd?5*69H7~0jKzmmYJYUqS_hZ>L7S1=l#+P;F5({!BZA~@FDNIE3qsdQiP7CM>YeDyx~|RO&8Set#}k=ir<>F2H0B;M z_eUxEUeV(;A!g_mR?pFpqT2GcMjO6>4kxp`?RRDtvR^mM#m1GS6ed`in&PH@IP!n` zo2^yy%s?FSPP82br{FWWV8v+f({WMk_7NOaf2PaRX=eA303z|oy}p;LDH&omwxi~RNnC^+m{ z91c{d=i3@qBF>XrxYPH-7bJy7~1m^FQrN{n%L3j z8U=%RJ$c_oi?%M2?g<6a-JnMWtu^p0AG1di56{(mx~e6%cioV;&6Ci>9*jP0o@C;}WD(L(x9_*rMg*$;kIX1}4YD{0S?8y;IrQ&~~?D1`v8+WGdd4tE|n z6perwI%A;QLDPQNmJny0D;9{5BDqnI=AhWNkHi427k8WrOX$iY*o?EJ(aIYLh3jQ2 zTPuo*SUc7204=ZWkS)qaF-W(eH}W2gJ9BfN=wg3`d*RF81Lrjpx`pKS<+?_uU^3(g z3_GKFoY)9x*eb@aDvM|K0IgZ7pWp=dAw2wDDFXL~wSrrmy<+3W{iw%^f+ ziPQcGZ*v8u7V<~i7Vz)?S*BkXKqy*7b)e$4rK6NskA~7$&+COczCR2>41@?AejZg% z=Jz##MsDOHgtl+&zqiGyZ@)iaQ{+>UO=_iKb{e}}79Poxr=CP@s!<_(Q8Tt=V<1jE zRLbX|T=rGY%g&&F4E%)0J8BIZuK@KIL>>}d(`d%0xoMTC3hfufZ;FipG#c>WsBZe8ZTx~3|>J-c+kkiU?p6UkhFWmmytc$Xs!En zNkYu~yq?_{8A$Twf!2f+O=)l;aG7HDWm_1YgoHK7x$Uap09*itQ-N^OCGW2UuR{~` za@Ks_poEkgtVaNRqdq#mlA0h3!t=Mf|F;)R2izj4aVQV#{IXD)WUPrXj1|s~q+Wl~ z#?-Toa(wMe=ze@vg{FX0yuPsg5sSeDH|b$YawyfAxvPzske!;?CX~xllXYsq^S)a$ z{*}nC+C>OMk~uZ}F9>m81{p+tk;I2446i?koMsT^oo%>X7_~!EE8!tCWmc=rXzsT1 zy%0a7Nd`RgR5H23qIDco9w?x-gM)uff2*1z;`364=W!&YRP#X>N zi?9US8}?Y*Kry=Y(Kp92RfsgiDwQ~V{G*$=VkocltoK@hBRw53JA2nrwr7`nqnQD6E4a~(*obAG; zyUOp|OIOekl&`P*a>SRC0Nc{~mbi*g?VT}Zl!!k%@f@HPdPoDw_mggE>jF=muUpta zhsfGWWyik{jrO*_P+%CzV=u*FgRBf^=H6iA1TdSA=kxq@%rr#!Q$`-$4>iL}ppi7* z>XuZN%J|WU!B3vR@F7&!OOrj4gtx#8f-T29QldkexzKiT>GP8CITiI1eL|K9|6gFz ziAn}<$3H+qBQJf`vzVjBn-$uT{E@?i?l+e;g(8}_LP-nboE-}y$a1>AoxFKQ})sNz85 zKJ$SxDdium8-K+LI77Bqr1yREbqN#q59ITF{U>dfMad#E5d_1 z^GWT}TXfYz;9{ny#n>MsfcWwuOyoJcE;VR6vCh4MnA+XB?f=VdVh9c3p$hlLz81cE zzJVL*A2wUXNLN@GPf{MJFG%3jc?!0K1e+Z{9@S33Y}ZmMD1|(7pnhXjKLZGO~JDs*1cbNAq!iAt_R;k-n?*wlFENTS z%(sj*;n<$gIC#kkI5@WPy>~VC`9%(x)}IsDyj)F%fV*@61*m1|U%_ukL)R?)pB~+d zUg9c*PV(?J0MFxOQLLekC_*;N+a+~Hw3x^g;#hS*Oy$XbnH)b}LRwXrI~ku6eWvUC z+qOoRJ(ZTVtcF9Ldr9L-;4fWrAgLRC70Q}{YJxQsNx<^0R)($n|F{5t=9@lWc>iYwgfgT`|*Fpw75d`cbQhsyhe5+f+~dEa$s#_-U*v!+ zvNB!_y@gK=p=W8`0x@K2&~bWQk)WW``u);#Y*&w_KcRt3z_l$2B{`+;7unXj0JvtO zxSd4YZr)>l_UC$Gisg-SN_Huz&tPgbi>X`MeynKiDmVsBJNNeLo_7WoJ>>OOjdOEK z1f~I2#NeZKxH?Sy> z8j$8n8l(*FlGf}dKsB0>(EPmLt^5KXeIbdPia~*(#bqAt?jBiRfvoMO$PQ%^nfjrp z#T6J(7woAu+8gcAytmay4C1Bu28Si2Vf13`>k%0J=j-8{gg?9$NEIp5mggu~^K7&VCOpoA$lSi@vZL>Ru zpQ+>6rx+rGc;JGS(|l2>Uq8%>olrUPO!R1j1Mw%>hRwX4ti+s<=S5SiSp*KhCbh(G z@{2bX6In){$ghXj%z9DDX(9vJSUq6DLXNr&F)oq#ZYAyj86iqg$z&aF_80YbSZXZ_ z)ZOFryYi69B%spW>2lD_=uc~=y+AD{ftmhKWQ*z)(^laIU`FNSP+R?QoJSr-Ct%LG zkjV4*f5E_P;}kisE-wEOAe?mrz{y!{E zi^u+B&3zP$I7Dw+U3+3dD)JxCUDf!r*7cflE(fwX5l^6<2R#Rv5G?X-rtg#TQ%WP? zh6HHWW12zae3a42;2ohTLKjeAn$G+&^V}LH>cxpaZU3v{b#qdVHvO4gVCp44l&^;l zH7k?*BjxWzJvw*1)+Fm|#LkDpY(I@(Vx=Mn*cnMVmz#a)J2B{uoV|W@#$REmxWs3s zVX|)FMdp2C5&-qxGQpQ6%W;nJP2U}sn(%I@kQ-M_j>pEq5_h)(Y*xdni2>14kBA0u zVUQ%q$8$ZB8w>PIp#4n_XkdwCJp{-y|E!T*U)?2z&E1KTDS$>tWIKs${JO3Yu~Wa4 zJ=zwB*HWpY%N#d}Q8lX!O8=fC>|PE%O$_)CBk3J?}zR~jUFQ^Yzxd|UWP(n$%% zfYTHJVmXg(M@u01spNnm9c*odR_?%T&9Gn0VDWw+)dgcBhF-~U z%WOvgY)k-1S4Vo0Ak#AuFyYWJLPkL`n2~YM3ibuS{p~AwpKJBuG+WExzvTaKdXr1l z`y9M8+yvP428q!BOb{>IH{BMo^K`k{1tANZj|$t&GiKOn;ik^nQ4ivU2o2g~F~QBv zf4**fkf*l7O*)eW)uZO7s5S@sUBMKqP~b0zj>p|Q!8j|}m6k^^;4HNIw41CXIsIi?L+TzlZ|YXnn6kZR!nfI8@neGc)#f`nQ(InGV^ls~WK zXdG}t?Vqg6aMyL*Uu|oVV&rZAQxVAvV772}`0{tpZbl~|&J&-HlKZygI$a^wv#V7I z1?pADLZp&Vy_K<7Zm<_;Dh#iD>#)>N&TiZ~fB$S@$`Zl!a}lGvuN;nqnmz#G0f;8! z3>=ez=Dm*OOA8eor-+CMf(iiYoNsoVePJg6XFZEVg!&}y0X05A0Ag*D10S=Xr)qj3iDo0_XtK}PUby4L0Uk$1 zCI_H8HK1bClE))tu(@V}YXIT(d6hA+op?lX45E}IOacd-3CzErV`F2(W@sK>UT?ZI z&E3%Cz9loMQhHwgjvr%qZtk{Ao(U56PsBQnoMqm%`2MU2CX>Lxz##9))YQ~G)n~9K zz>x<5eAHw<+c~*x5#RT~ZdT=$;NDlP*RYKjopxYoJkBpZ4)fM4NA>XbRs?bs-Q?*& zE3MLb$Q<}nmvy-pzg&yJa*OFAb-Mg-t_L&n$xXa1fjuBN5)wUl{5NO|h=;pT8aWK~ zW)y`v>cdu+fLFnW1G5hS^f_DwV#LlufaD(-IkM^1(Chs-QS5nI zCmv3-a*xxR04)9*R?1}{s;ygqI%`-FcQvwZ5=_To@NaA^K)~_wVUb%39iBc)X8hHz zYJ;dfM=UcjXbcwrJ$o%5Ylf!qB|GJb*7yDrHiCgI^+{9`yf1J%oa10IZf&CWF$Zoc zQ($q)Z?X(J@Dxo1@JF~NKIb8h6BvuUX<-xj;8di5Q?U-#NhI()7(-A|9tj5b?0T4^oFb!ZL60wUTD{%RC)hdM+|b83cP( zF~vzE;FqqY75-cbP6(K=_ew3dQiG>^mlU9Vj%hXw0!aAJ=BE2~{h~%e9XH;rJX9ox zm#fw0VK5^X3abt_s(1tsuM-uiJ?*yaVCEyU3}s}CFOi){#mt1t+M9w!4%$Z0+P8En0e0-~*U&;3%pZD9&F z=OQ*v?Zx^JP)xkbf`k48eQ3!$RvgJqzyTxa+!fn#zDBd4K!3N}hF1ZV` zekI!I3dEaV9opl?gOQ7WE^BG>M9S-)f;|?rs;RP|YWb2LKys~=_XR|unN&d{RRnF^ zFRz`x{aYC>qg>Z@2kK%%uCw0&P1HJ?2RAzakz6?k+`i^BKs~Q-vEn6C=R@ zmi*?Aft;&TU|))D$l@zN(yagJj_%JJ+AN+M!p8Go#qmv|GxJ8t{L9z0tX`=duXWf` zBic5vs{~MUM}~!k!G?ce>4z;TJ8Yc;SP&p@YMQfo24db<%VR@k@NNCUf9?0sRN{#B z6?ECGMPgB3;Bp6aU2kS%ckEcJgry3Gaig+7zNk8lEP`k3ht1){EsneR&gZ>hDk7lmks$#a}@nw zavPWF{Ot=i>2xRpHfp9_qzWGucM0Ilq$jq>IEA1HX$0ZDdAW;H4CtLfdy#~`YVnY} z1hDI;?DY$)gtxvT0wj2jWZ}op0GKm}QB$&qQ)qhC7(7eCf%U<~bht|EUwvUws zKn1}5IF^cE0pTSs;1Ob-^&Je0)i(?PI#l~aZ9lmM3ao9gZ-~Ie#AMnlQ$}B{`%ac2 z4>l(Sag6~4oMB%Qenp*XkT6Q|{rAQxhQMSGuueP6J4Ty^JvW5pJB=VsJy`%LR6YOm zClPipK?H8L`f=Dd>>4lj5_eK4nLDF}@!%EXdV!V3OCw!!V69I*jJW<1p)d+&Ne@sb zR+s~nbN0zh7vKvZX+_6^p22kI`1n|5q}Gc=9WRL+O~~hm+i}S=Dc5Y!Kt>f77S1My zpM(OLOfm=qrBssKn&jfkbi)n*wJDwPh|6XZQiA}p;BB&cb0J~2MsG@su$bt`PIAW< zKQLv4;VBz$j@6$bf8wm&jD^RI0`H*-n|nhTK_!|D)ZsN3Oa6tFge!or!U3c2;Vqv% zeV0#6-|9wf7-mLS(Dw%=TLM`0B=HFOWsyV0MhO=PJ5GUXc#DQ#B%utp7wKPk9T#y~1a?YByTeBkm?>#UXrmL0IM+ta?{;RLNCz+6UqF1GA+v z3?THHt2C)B0NhP`2G*`%mI6%GX&T16+uNdI(D2B$KU^pdWsCYNsYY)liq+wIJe->Q zmfbpSDn2xwAM?Cw*L-l@;(~aPQqNxK)GB6M{)T!Nkc{*E`aO(z)MmBn?_Q>1tgU8AN-3Ji3W;T+c-X$rRcr=>rcn)>6Z?&tE{ zY&B06`!Ib?y}RstYs1?ST_)Jhxol2GXiIPVqfFgm!RDtb+&er!QBa8KqdjM52K(Al zfpeGClhixMUQoZvm1>tK;VT2avIQuF5dv(rSgvhFltREgXJ_ZxA)xf!3mU!I!(Tb;y@zEhScg7FxT*!COI$R3 ziw_XQ`(0m?s7^pKv=suA|n^dC$tfNXfvF|0z7rt_+-C(ifNpKM&{z>8wk9i znGbYOdBZcbiZlch<)&t z@|>q?au%x}O5}_+BzB=WNVLLMr=2WNChL+^n+FR+G*8~UzuGqGe`BWmH-Vu+G|t^h z5i!aY7dCZqEvcy@XJ22``>9{~%T7A!Kv4gz+8#k^10ZKkfyq=Q3rB@Wad&ZG+BMG-+0Ht@63&n zUtAU!<3l{w$2mEeFA^vfJY~0&qx^`ps64)9vC6Xw?0aiGDvJ-b?#)pA z;s5Hm-YRBANL%J3c`l<+&Mq8@mVXj`WUAa_bOsnL*FC6BKI0EphJTAd?^ZS+W0Hd& z9S|0B1~=8c&V7}_VK58|n6mai(v2H}N8t6tY~AiBQ60!Y<6Lqr>B5QN;n4=zqkP3c znZtI9V8w&VTe;TSASV>PJ%gPk5|6Dlah(O{spB!>H(>t4UiX@NUhdybK}Cvv0ub}A`9iCD@(JKr`AVe*qX#i ztSScEvs66^X&BWHE?H@%&@*9-9=cYcZoOs7eb zu?71F)qlRUw;T8m$mFCgHZ6riR%{|~9Y~NE${?yiWzFJr@$6XyJJ+KedT*@=w-SBO z6k=N5HCw%jDQCML73RIT3H!au8~@R5V1eR5);NvaLc-8q#NC(v=FA-;Tp8U_aILK` zhj1nnCOZjww=4dQ;_vwyVxG3LBnlFA`3TOIoxJkALgk?Y1g*z5{;uYB6jW@iUPsRnrvwu%m%ZNjo z1>#L?4)@FN4I?{r3`=%POQ4sNh^*nJ#=NaFO^Sk?$;i@V(5CS9e9aFJg1OgoP=f>- zPvpJUd8Jou4h|0WPhCHsbu6ik;URn$Bu3=XtQxp+c-;b4Pi6KIcE7HE!N?r)ib^QD z!+9t2_WphtrYqsc8@}iggcvIT3X_Jhq9Yv`8P)=Kx?oDVld}jyT&oAC?J|D94Chh1 zu760*oBa@RKddpwEqlF^d@qTRub1nK!LzZ%W1oZYHmBKkD~dMf>bdxDYt%Tc1W1+~ z%ib$yj+K-~1tn;GA)gBOE|9GHAyv4{TmDqN{n6X<3>{;G_R7yk2mWYihDqBKxv4kg zAV(e`1xBj*0ZfDGo*o{|r-|Um$b;w8?4BRoHVk5V0LM?u3Fwm^-ui(d5ezL5!5WqV zUt#3fmuAhnV3kVcmF`58J@hARHhwneEYk@n10cmNqfCHxRGQeY=~$yGq~)l8bd&bn z)NHrWWm93kOLWI}h`<+&Aw5|>cF{1^3`7kJWxo7%){#U^kWSIzWVp!nvC=zTc4p)5 z#U6vhk5(_RZ1b)dMm^NwjaJBz0S3|17@O#uql@Z~gMXc(*WvLME8f38U-+ zlg3{?ooYx}H18ZQ9Hl|~lzP!R)k?YniK?=c5f1Tvm8qcS+!qz&(hw8C$M5q|&N}P> zvsgnIXW5Sy$P_XZMpPtwkGyJN+LhSt1oDld&zbUH-VyDs-SMeW*+ zM}6RgR^k=R0PQr0)wYWR2uc!!z1_5&h_UHun5i=*fEZ#2WG>cB1Sv$JL1^IvsN;8l z!h_~sT$+j?MO*>Nqj&m97EbH@{u=Y1{N=}aH5fkY9+M$E*n zCkA%9E#MS`Mh-u(Q6KOwES~;7mXLH408(wabEO0hv}*+8S3ju#uMEpx56IO}3PEvK zoxb1?7xl#DT`f6uXw9OUooP-xj}SvPT(W)T+|I9VqF**i24fe$pXK&oA|x?43Mp6c zWE-kzZ;|-y{ddDoob=iDK;7J&n-N=4LqS@75Zinum2uejgx45SzntGa!qLmq_$Htt? zk!5N0@4^`>ss7*h`y~kgQ%fqXZT{>6TUim_ie2hRS3;k@cR-6+=~@5u%qs_gn&dm!U}BYJCSFl>SI1Yma>=(uH}vj)c@x zA4DsjyAaHTjVLf%@bE_3pKNnfC%Qesuk`Q!Q29AyZ>rzyU-=cP!-Kzlt+9LZ0~Zum z=C}oJx2HV`1H7>WLxxgl7VPvkiTQ-|57~9*2|kT~R&F}>rnABZt!7O1p9wFkRC6sD zdg<(Q2u$t99b#9Ebd{nc6aC$=Jw>A|-~S;)N@QC$LDQ52T~5dUs+4q7+fjMnc{4sM z8$6!Ku_`^!=3Vidh4*H3WK}Ygo7K2g*H7Ypn@N?*2 zB#Gkah=zD3$RW7M`n|wRSb8*^;~(AnAeo=;23gT)Ad~opUcN%|HrUHLD|!iFO>F_J z?v{-nLVVvat8urj0itJHi64DD#manhl|m2&#N&QhkL+{YN0ZqeYUQW;tZjPxrsaZJ z%K0xALs{cCKL*>_%H*G4We$&h`|sq&>jZjsq{Jf(5Ft>N0Plh#riB+*=0hYmqa!z* ziWO$f5^bgjM8%bWZgb7x2+S#9g#R6Yunzx0U6qq7ggd2w&=q@wq{Fop!$?%K z&A8T7d11;WN}+$n%h*=f<|&rLKZ!Eg3)CQw-KNWM>(z*1!SVTn?IK%J94*1`U@sr z$uG~RB%j6UlFjn?2~u5#eE9VEljqy2;KnF3Gfmm?FOiOelWEer3`v&*&$Q=t2GqwR z_1HH*3Ul5c*ab#mhr!*9e)(IyJ?hO^@`DD&*xHPd*I*)vld$E?MH-=i*7VRAqUx$S z`&J^ju+S?U^%MolLCeC8ai>!EL1_+EVev+eJnD?k<4bp4!tYwku>>&fI55u=aFL0K zJ5!SQLLu?nv;kGY75$gi+K#D=_b-M+8d6ZIqXDCrl4KY zEbt%J`DT3F)LeIj_Adt0)U70Z3TN(vnKWYwg*7cqG|hM!e_@ple|6&QKdr=>jI>~R zzN`62c_;s9bH&m6jhhq7P7s{i{2U&NL$~;TcQlQ)l{f|LAZ57y_60#i@TX1D95OEP zJ6?_Q&N!5EkQrE=kvbS`8ekUUg1pm|8-rx+d_fujMBfG~8dh~$o)dG)DZ|ejxlj+! zCNW#p@4!8~^n!IALeE-5%pU>Tkr$p@9fXdGJeWSIgJ-9e|59!=+PpW-TbVb~4ev{* zN*>tlI~i(IX4YWOeeSU}&Fz!hMfCOTb5Md!BO<2|0zk{mPe3j`O&gMT}& z_{P9co+&h2Z3E~BxY6MaZWOkELcicIpS58Nq?{Sy*xmv({@&#nizix?r=(Gt4YZHwl4WgQRv%~>vvl-$3=4mYYoQyQH~ zf+dyDS>PhQDc}UC5AkduM2|fSEmV}tDkc&73Ih@UeYO{8DdGQTGXC$IeGRuzar60A zv}2`c6M9cjs&#(O`ya6!MC8Gs`j9bJbKVo?y=Cz1It^ltDEDNiIiG2^G~OSXRCM!z@8}I3k0GLYYyE_CWLlSGs#5BQs57;_fk4EIPE`IuyF5|LyuW zPLc$AW}l>#=Dsp9e=hat(bS6KbK|%!qo>{!bWIHDGB@_mc#@QnVW;?-J zWltp%VgAy+O3qRdBL)_{!YWt&kEgweacfxF{GfbBry+VPlRy6ScC+Y!b!VTsGH}t-mI+Z%uQIP&@2?)e3Mio-51vA|LCQ&V(<2KL4hEWiobmvQ@(u575#n#6P=Tp zgi3SMn?YiAiXL8io-s*)%veNFnCvXwWB|mNS0Z51a~W%2*SEK7H3QL4KmW)kw7NU{ za(H*lvr%o|Q!m#>K%@4BrwoJ^a&rhmu;H*7;K?A46Q473wokk*ty6JXMWg45lTKrX zn(ZEPs+&jf17rm*o_DRM!JQ%yClvNbikHrpLKMJ&PKyP{^o+dwM!YT%$KsMaXiyUj zlUq;~U`?>kKhL;5>Q;t2as?PECXooks*ZU#m+)G61a|l|3TzYwg z$_>?8YTe^1!vtq`ZL5;ai#}TAAU88{pM&O_!je~E0;l^_ntdm0I!_9j#jcmFnfU2S zYFeA0yXUt2sT6CBhc0bD4eSYOYB-dUAk#pC5`L;}*vnY_=gOlZ$SRI2l?Y$X z=6r%*NBD$*6L{V%4E8OEeVjsLr4}U>ipP0_GC@!5$3&}%nWYF|=GiD#v6sWyq8(`2 zAR1Hb%RB?F^grPij6xl^F{vctgb^9cX{Eu(1;b%x2Zm$Ci29&}TEG#+B&0CVRJlhE z`Lkg&b<|Ls4?16(%>`VMz`_*+RR=B4q&No_i_N4U=*sO+o?-8#gJ-H;ss1&~wCpE+ zr9_*2J&F|Qk{YMt^~icY0-c9V)p_h3M|=!@?@4H>-x ziYd$gH&WLgr40)5*#CL$d3T@xsbW$O&5HG-6UD~B+M>CwmNea1=qw4NIa#R5%FJ$5 zJW5;uK0a808=ey$X$kbAnrjPSwLQJux6(8ahSXoR4|D-iWCbfs?-cDPZYInk#sD998z+j%o3t-0Ccz)>jz zm+AXbjlGwKeQOMN6~J)~G#~ri+jtg62FC)v1_?=-kt|Y>c_~%aSQ6CKn{(SYyj-By z3wrN-m0eomYn6VSYxq4O{X7WISX5y1JM|0(rDQ{T zwZ<|aGv)Vu!}@?>dwke$GSZp;EWfJ`dDC}FaHa>R1euG@+_iBX^Lz!TJE0t4WCEhQ zt=r0}zZO|)!7iMDVA$?OZd3Pz)p&j%TW4lH{EN`9bV7+Z6O>ufU=cLv+788w_hX{o z==Na6=tKwWn1wI1Y1>M#wX@`9ep%Qu*5s@p*qBtb z@hLBaA}&XQc%L&(@>Vn@!)#*N)4otMTVtiy8MMjLZ}|VP z_m@#wtz8>1tO!aeh=4&$w}b-HB_&-FVo=gZmy{@|bV`SylytXssrKdCoZIaY)AAv<=3-%#I$c_ff%#c)IcdqFMn3ac>5Z zCm^}OU_m4D;C?+mM)mPgIo|8QVsV)PKfIaxGXawg0f=Tsx~@0&<*WBx_flMMjsv3obblYDkMyKVF^I z57k#7e3fqAqrHP4nweM!9W_soSd7f1_)?Q1mrLEp`ha7W$b@jwrf)rbz3A`mx8}o|!@R`&005@SRv@{Ko{Mq#;a6oEYbS;q=I=s1PAFS+L*s~y z^T!y$=1jZeoN_!XygFI(UNY^70l}_w3iwT%dg5)J}nYbR1vE*Ye|t~ z8p*ewc~3$$YIb!o?_XvG*&k3)KMmS>q$k5uMV>~ zgpK;lJWU>?^Rb*BA=rvNKT4u__iQP5{@b>6KE|JI$ydOnSFl_auhFpBt=BWdO%2Rp z1}+|BKZl3c4cmyfR-itcL96xEY`bhtcf{f5vuMGI!F$z?%uN@lBWy_LsC3tTom^aO zvI8ceVu=K-22)ws_5&P2w5X1&C~;_w`4`*kD1*F{rDLQq^%;`ahma1wn1~b2F_YuT z1`dH5Wj01(+?Ub_19dUBfi7T<;90Ew^_a^D!lN$$w2uyQ_2lJ*5};zc{t-ubxz4BI zUY_0yC(S}YeS65{8vqmqNg48d@Z~{6z;Zr3$XVdeF9D*OmLw>t*PfxUYd((nV&IR< zO?6u0s^t&B$?Iou>HHX@kW6dHMIrD+OZ8W~c3#aNWFvo6+YM--I^Sv-ZvvpBoM$zl z%98@r-+9jVjjP?z@0gx$!=!AErC3A_pK=ByoQB4ktj-dmR#a9_L-tSo@z#g;(EhRP6D-j9^rt4#&tFXu0Z_Gvrt%0( z<^O)sAj(Qp7f(d|m)DCxeN2y$tq8{a?+5zrhY%zdT*q>6`7f_)fx*rTyEF*hod5U9 z1Kpe8W%fO^n(6=Y`g`PM;pzUE{@hKkw<0Jyr|UUk65((WFBm$bgWWr6?^sH8)GAm|~04g*k|3;?jZ zMgdvFg(83ag!;uqL)e|%Av1mL0Df=>3TS1w#s54f?_+sfx)2Hl8EVKNg#&QA2>_wf zz_uYRnb%FuL;#>q;HaSnFhm4k^cdG(0LuNOypCjt*JEn)m_C1^Iz4_-q5a35m-mcT z^m**|s;ZpYr(RIm*H0d3gdx>`-vpIuv9Q_Q~1 z4CGWw0J|^(f*MYv4lmN%c2xsCi`5LW)dLifom4^A98oeIUTcedo_l5G$KOgTMlC9} zOE>6-KH-=sYS$Oh8wm)}=&n66Jf{EDmOS#Kre{fjAl9> zmZ;2sa+@Sgk%@yxJgBB2#J(x`Zp76^{qGGa`j+{};(h*Swd0}e)+^|(ee#ddD&alp zjVX=5o&Y<_kSoFAfO8!%Z}Z(5w3j&y3V27sV{CdW8?6T&FnK_bQh5yb+iV#Bga+XM zc?Yiz=5y+OnDZtb6dp}9pHxi|e->k)SvB0dzurysV)Rkgh(&aBss5c;4}+N$T}mg9 zRm}Mv7^-&2WIP=n<1f@P;;Nr@YEzp;>0uMu}`ef?#TV@aowKVc}4d{Ri6Hnwl> z?#VcAub4^1rK?gg&()j2Gw*jD!TkmE@=YN=WWyIAGq z6Fh6eLmaWYQ-K7z4s1dxjA2=%3k z&9VfW1T+N2xgODwp!3htbZ)&e%1uk zW@5_6vqg4YRseEUP(~zU0rQCJKVE#gvjg5Qr?R=5JEfqRs%^$Gr zf2fJ2NZ#5z>awV9+jSG0~k7QT77>g=UV7t*V2=a-#bcm6ZEh$>kXPpxj@>C~(u zi-)3E!ZOY{C{oiD(Nl}I$LI$pDmS9>9{Jj>51gVO*qB8RxPIe=w`p7xlRuB^bd?L| zxcXf$tc;Ij<>3*_wwRg@mxS-j?0v0OD8(_-xm}-qlhoLr0P}6cEUS(;?ndTa?2@>s z9+g$M{OHH+o2#pVz=Ov3d@FEpDWhWLE zGEk|g#>{&%BU^Cq0^S^vB_qG4-xi~RaXa($#q~{!F4IvB!BW?iTwe`Gl^q}9OFd`M zI<$xb$#qQWV;hu5um~}MtG8TkIM^8{UZ!}?S#-;HFH=X@=q`QnqM*G*)!v?0vgDO} z+b+XG9VzJR2Ewi>7$)+me8OlWG8E79-zq(@m%^*NFzQAs(`)4RZCn>Udw;9NiO_nZ z^icAqYqi#PT(k}iXx_-f8p{f`?TQ{Ay(dh zh9|k(yL8HpM+s2OkEeF057-?KrS_Ke6fP0f4*Sa)5u?mc4+ez3-WXe}?)XR^OaWb& zCD%73iRGK8_BzTFX$e)bo^fec^@H`ls*qO{-s*`*!?P zxswbU4>kv1L|c7gy(Xqr;Ng9#A}sxoHH2wzDfs)?6V0*5g;qk>sZ*~X8@wC2=#?LT z8ZhV*=8Zo3D-6scD%wK;N4M14T@e2vF7?^BNVnk}b(wWL=VbqE)E{ANO(nXRrUJ0R z#<{FF(Z0L|`38OH)jpRtF!N|=m2h{nQ(k{#Cc!cln9`)74T_jQvt`>NAbCjRCR3+1 z;Yn1n*z2O{T0Y9S<`Z@PB*s@Cd6`hSwO_9m+B+E071r{RGqgZ$Z2YO%l$&6S-i99e zw1EJ-)D`zi#MeMnBB`p7>sagBPld7BXM{XU!jnFgE3kR>+Ga)G!_X?_aH1POvte}2 zuKfGsZU5H&OwWSM*Nm+jBz*KcA@RJo?jV{?-HFkg0-_rNwb8gS0+(`xFRt8FYtOJE zLMzU2^$4B}As}RpXph%XT{|W5nHvzzGCQiy?cmmbDC_aB>^{G{PkTzX+6 zfYGx9w7BrxRELn_Krbhw>Sndsu70P@=h&JK zUut=23!Z6G(#gW1vuSjPyKbC^H(Ldx@WBBMrwXaatpkU3marXq&Zj;~DUWE@7(!$n zC+gU!`p&(7q6Y;TIYx9r=c+U^gJJG55hRWH0rK>e^>+e$dU<;dICLM!-MUBPOH|H8t-#4A?fyxAP(e?8qS)&2Yg zPV*t|%K|PXnttWf1vh9ATC_}Tq8qI|?V8wa%_E*ltJ*T-%w{#7PuU+YVPvSSND5;l zK9sFLPVz2%cAdg&`18tLsXQl5wOWp%0N;eJ%?y$}gJae1&m~1EdXF|9K42GqbT><0 zeSSsoZYP@po!=o=f}PLe2$a? zyMTT?@L6IKSaXSf(*c5puDow)`0I=Hp$P?6?fOPMYwE8sC@kbZ>?oz|*Iuh%}u>uGG_7Ppg+KG{k0l4rm5h_34kf58ji1ij5b5i1j_(sM0=?=YH* zuRB*2g^2spRLBW8JuZL!xV>&9=~)GjdEK{_{u09uW%~U+Yq39u(1#ucI--yk5(XM0 z+7G*@KoNq8RrXCnLBTx&FldY4g&y4PswbB2aiV*2?f#oZ-ix8Pi4wJlyn92XpK)kn zd+dBYH$x{pH{^FBjqPE6g~Q&hb8_3kaCmL<@Ez`iB}5I$J7=d-bwwTWG|2>-&~v`> zP4KEZzx_6s`}XT_*dsa)8Enfy$0$ZBMiGU^UX|$zjeFrK^UTpwx%@n290E z$ijj1UIJ~zEq^kqq#o?UwmvI0Rdppw9cId-)SF)k54rii;1ijG@J@%!lQSE!{7t2^M`% zs}&!{H&=SMPq|h0e6xs+@7Gp+{}hzkW$-?MrjRDlQvG4Wx{mksz6@@=fn7GatxV;+ z>gRz4I{DkLb`q<%ddHej186=6-v7S+^qtkTT!IQQK2_ZZ-=R*Qz;WALO0wi z%jyXD8vKIDn(l#4#B4$6?qgl8DTQHE}@laJKN0J(e}r0`}u} zn*fuDj6aiqCKzILbi>f(0?eB4o3+bjP|QunAH67pZVD#vb0kPP=#F#hV3DZQZZ2cM z+yYj*dARnnpi34*wvk9s&sfJ|Kxl%3VEPYF>&l6aQ4ffxgfZI~m{=qO&yKe|dJ|3-_Lb3J zv*I+3xOFn2;PE?(;P6R4lGjry73GNV%0zT5MbDKUV%J4Y9jC6dc%NwA^U~jI6~s-V zuM<{*(o{v_$@)Q_tgM**1CO{+qHNu)2De8K6(6uBE!3)xNK8vEMgogD{vrkL_~(ZM zago$AgoTEn1wO(gGkf#TQ_cqUDt;&;F)Qhd$pvq6dBwF66SFxH3oPL?O%)dp zo!Gi2X>FM(53dj?KG3UIxc)vj_on8ZhthX0MEB+TZdz=%F`_S>X5pi|&~qn3n|LY6 z`-kuK+}&QWc+L%RG_LA-DipteW1Qj2xxj9gt=eF`Br^^6T;6N^dwj(pKUmNI4*^@^2S5<=$&`whf^4bd=B5U zspN3EytslD-7gQ9C3i8zQHzO~`>^>{6%f-hvAn*B8o8~y;WdD&qqTYo%a8f>W~#bE z)gx^xG5z9G7nh~;v+cbWhEvX>3ma=kE1|Tr^>y5hNIvitWD*ni@kURRxg?ksx^yfj zMi(zwiTDsv_q^%Lx2}^fnrR#}BtGZ5W#h-aaWD{v(H;Yxcu`jEUWRIbM2Akmjc3iJ zND=4nFLx4jFM7IIK{uWC+4k(`cfhL!qW;!%A4?4`-f;s?%C0F8Ri}kW)cW(YzQ;X{ z*KL-cS7Y*QxP8yoyB#LBZXn0P)8QytQIfmF9jvc@X(yZU*r$ETdwpt;QED?ub<*_~R{{{x!$Z6ROl_9b@imbkeA9G{v=1wlo)LsCGWRb6dSCipT!Xv*@+ZYfq)gdOxH?t0Pxe zLiib1J`p#9QoeHCn98L07RzSyB6w+dmWEJPV{XqXGW2YTy1s(fUcPa?rtr@(iA48e zpc9s?4LE^x`a|y=`PApd=YhpQXdwelw0Hv*6&0=iUJFpZ*$r@i++OjM3?M4*?9T1Z zdnl;CM-XE(s3M@lkoawN{A2pf^Xc1PJkcNRDMIcmh2auXLzFy$nnO#>W98-8<9YYo z0Dz&JA#!re{b!DTOjgNx4_wa<5PVF5HnLzwb(Rkyc2U$P9q6G&u@TZS$q0jp=#TnL z9~Q9If8^)zT{y853|SqlWoBlrBF??;+CM8s?gzN&6m((CT58aiMGhpq5s-%{ZI@mu z2!>RILyN0A>ZlOfQBU}+0ww&D+~M2?L@=-B~f{1j*u z52jb9S=-v8F1Uv7-VXhwdO**Qq^zufh5Z`+$;Fe9VS?*@*z4WCJ*1ni_TIQ7<5>&T zI)6JhcCOy1prhyl{+Tzo{DPrPyR27wspQJ)Wqzju%(L2MHfbcz2I>&4Wr+d8*w*kB ze2eLO`A42j`-M9TlIWC#F0k&QY@JF5=>4@$u@zg;{y+;EHRIcEZf;URSH%nb+dsf` zKu5U1L7Gy^M4Zv5v}<66dO<&D=;fO}uRks;Srduz&9eil+`#!L5CrYjO@LjFb!?Mk zfSqVL;z$3n zkWXN6GoGZTgEIl)D8I_8QxS;pl|xr>tZkJX2P@U|t)%|lwaFIX6`_K1xf}pRa4I@M%Wf z!$@Km&UALxvqb9WbYPHyv7M+imj|Qd=E?VEM1cHnOD;0Z8;CX=>WlhgSYv|Rf03u} zZ!a;9x*ojeXzQe(?aIC<7|G|zo-x5Q-9MfzS@Rg7MDIHVd&L3~3v3MdaDR_!w z(x|JEqd+EpALr3PK^mhh|E<}V&bPr3qV5n+#V7r1d`np1iS?U;#a*5}nTDPMWMlT; zmE~+7YG{xEx|H|pp1?ms86`1m2@}mGej?s+3bmFLWEa|AM?G)x%K(CmSrEAyo}OvP z_*Xf%f)cl7boAH^7Otw&G7vI{`1zsMECFA}#L5aQy;5?{+M^p_f-Fy?U|nRvXJuv0 z5cs&8`A$0QrLzH*d`ywyX;}8aEJE3jh4Zh>jgJvZu0JXNev#MFbt?mfhw2^c+eFLP5qc8X9#XjvKRtB1N&K%GJ$W$dL$kh|aWf!h&}G8G zb;YIzy_2pvgmAv&jRelt2=pT|!N*{HJy+Wa+4ZZX-&4unPR9>`L&OQJ_wyuqX8?0> zi6W3$>ju(x5cV2BF2}3=W<_KS1&#t}VVFML+FsI%9^g&N4RDCSc;kpvS-Jc{2%MfTZTwiQE znw$a^0jZhxcveK{;|G`GSQP45N>`%jnJ4QHFl2T!?U#o%o{X%myyjHL54FtJ$hq%P zHaZ3+nBR9C5QI|Un!w0eAY40a8b|w3W$7q1QP%*fZn-sROI8A0%Haxds5jvVeovT5 z>#y-Rr4Rh{3=vYdBh^k5rF`_WewDUosSP92c;QP$s4o!QhOcTIL)iS#roC&%5I+=b z0o`SGJq3&R8u_=t%o(0UxUf2)-?i~$ZfDRy+}=|0C?brq)Wuj%@tZ+_TlFzV$px&^ z0bvq$g&X`pG(5R5nK&U2V3DnAgp<QrukmJoKcL}fTnv<4kmlX81IiQ6V-R`0 z{DNiC9zA%x77OD!nIvvxLGlpnzhX(JB_;}ku^UdCj}C0LCTrwZPiZ_6}!;aeFyiQ==!7S4*uBp9r+U6DLL(f* zgJQOLJiM|Z&>J-HK_dzSIoJtKOFXLk7f}+Y6*|1Ae56)i+q?x**(Al91jmgTLTp@2 zbtsCJTjmC@ydijF`rQJI}EOkaP_llWQ~AY#lt)sXK? zQ+prfboPK+y?n!*ixnv%DMrFrUy=UI+XAqZXBkxc^z^WTv+d zuu1vu1cvf*YUSy@fF^L7x9?H|aLnOARC~aV-tq=4dkM|S&%E?Z6jE9Q0t-4iI&!&& zELufOb*|i?VUea&S>}pYX1|iPvV_oXz$NOxVRUS_f(4TXh_|i{3vtXJ10hMij`|Q3 zyf)2{Hc*jXpb`+PqU9xhajp1^>t+q_9I&+EahYrL`V$sry(DC204bYLaM(4e&IV{^ z^TEkTeiNgH7lXoVz)h3Q{mYDdyAu$Fpv6C#(tb(L3$zvZOn1J#9S6?QolEUo&z(O* z`3?z%lrrzIMbEU?|J%0^B@eHzJ{|;`Bg7w=0wNJP4i1$VES+Fy=-FQXDnZ8cnV9Xo zrStw~+U~)@0>NC2v1ber`v`fVlGA-ypyQ*ET0o88Qb|+#v>WF*R&$b|-ryCO0vV9i z&}*Tvvw-sq2rE!5uKfU~{o}&Ao8b9?NgXUsjk4RAk%jVV5U^q1{3H#0u;g&pJ+sJB z^WnlC_!ZorzF)&wZI0zFPQPgnwb*x&7imS{WWIyamSafDV`fI@)pJh)atE!p3+4%6_3 zHON%_X2%!M>4Y(;-$HX~Vl6F@&*%*Y*_k3o5JB5>y`db0bPLF^HqWn8>VfH&i?BFV zfou1W>8qIp(;~RLHBuOLn{ji=cdo_|?zEzx&;3_<7yFkJh({~O1S(*mP|lTlj(s`F z>?Cdo6yIqjuMx$m0{IK>=~<@Ja=`l+{L3kfNnel2zc#@q2_!f7v&F3DUiWd_4NH66 zE=F<6aD&@cH7kbew_x%FMjbwU)PvR2XRr7a7Z)S^S5T@YHrsIAUW~-c&&;i<5t54H zOuH^iGIBZw0{5g6ILSYZPd+{Y!!dgmSK0ZkwPKFz4d}Ht{1pDrc*DStVWQdy8-c)- zv?RE3;iki7*EMJTa}8TxQ$QyJj;*%5yu1+**x3L>nC2|O$o)c--f%Dz7&9Yj%Cx^0 z5ib9a2}bx8YiZq`TVP3+8m^1{d1i4@Bw$IyGDZY33o}7HO&Z8xhxA@--#fhvP3EGu zi@*G@WAeoy82vPivHW-jRfgqAwHRuon^hK^uJ_z8Wq&vL@N-Nggdz`u!Q_Vn>;Wqy z{Fz&G0jB&|q0O9Bkk?-h#wq4${!4?LK>pUA#*=%T*O61zc2r& zBN=1o_9=nI$W=K^Y;61?Wr**uo{5n7m$T1J?V*51-HPvt=ZXreOc7K;Wrh-2q7~dNm&Rfp^^>-o6Djj6TU9EO6yh2!#VG<3<`Q{3#UCFH0m3VHAg`l&nl28pzuHC= zzjM*i%D1RTHvSL+67NAZvODk&S2_VSn%qVIelUMes*m!4&vfgdLzWGL#X;fGZqxOaQvU6Fr%O&F9jzV##p zE8gCpiMG(IaX(q1CRwwDzNy(S^L9DPI5(>iQ<{u*_f9zRour*-t3^uv(oJ*G@21Vi z?!IFCNdttAXY%=epD4`yRFCsp#oLe`d7cqv^2ageA&D-{tp}6~h^nw{LHV=l%O8hw z+JIMc?dD_hliCvRz>kIquW>C$zAeivz4QAb z!^`4xU!k? zw<`a2pKo&p_d_V`%)+n(yYI7{D+XHbj)6O-LLUoHQ}yk0+nnaXKiVz@~JoP@OEz7 z-^|y>`oj*sj&PTyu()-*{yRAYiOP;)+PRol7}zQ9b%>^peVEZ_}M{e^o^kMdo zj>8V~j@MmZ>Gkk*Oa;EBj*^ZyenM3#v2WSlgm4D!FVE#|wW_^x#dX^LzZ5TNW|2jn z-=(4Dt$SAKd*y`E^?Ry75u}oeLLhc6`?8MlPzp(e^wXrXBCP~E(kb;39rvoQkB93? z#!(DpaTs0(0+sw2jDH0{k~O5LSDEP^SgNqkL-+bng`;xEJ9$=6MShATph-7gh6@0B z12YIX;R^~1HUYs4lFhvn*(mUBJn>Z4uG<|ET}Z|vT#aX6^Eq0*AmUmK-U6zWP>?Q` zto5P@ks>nuKGk$#%MhI1#r(iyCy!Z$^^qpZVcI&H719wrT07$IhSI^= zzh#fR(X-KU^14oz6x{36k=u{+&0*!_{@G2XZefBSTDx$fwxgbuh+6sOZ8g6)*K7AI z*fU1F993Pb)Y zSwl!aN5p~;@aTi@ffsTsx$+9VDc?0<`DnCumFBnq)juGQeGZ8{UR%wUm;*rlLY;p zX-ETf0Oj?TCQx8gfkM+fABlj=ol3PT7sox&*-TbRvnOSBcmn^*fYS+m#H!^02h1S8 z2Vv3t3Uv4&Ip?c>3*FJ`!M|usY{hc;3rb?5Gn%{-lJDf;(AXG6Y0Gvs*xI`B0j@YA z4FV^=(xH45r&M647h&)Tu`50TIdE*Z$O|4ObHM{e+>X7XLT$RgIfqb&O#5{KlcdM2 z`W=wXsg{~<4(lFFd{(hERI2;}*fKyO4Dl+0S?=EK#@?uL@gy4ffQ6}+D1wHtKZeR} z0DFku;!H04bT7S=Ag*DR@n+bj*08+1xgQrr)n>+s{QS`!b(Vd!7up*FakL~Y_FVzu zuLs7fab!r_`IM-v&(nwI1}4jftD(f5ZcHW#nJSMRoimG4++=QHW5bifa@u|BSy80oHdleR|`qKHT=hyfU#=6jpLzx=+HXrbQaB4s%@l73- zS3iZjp8eBrI_5270=y zG0Eg!dpEz+C_gWsVKGkd>@kHN5T%iPWtTk=0FOD&>0eH*cDTDJDz#rdWXDvwgSX$1 zX7{G6%3R?xXG+DM^+~sEl=pmT25Mtr5cbW{p60O$bT5a{pT#*Z$$0{X2t-%|@T@(x zphl?bPwn;b@&yYCDmnssB{8`e?nL`DC#70GBMIWL46N~&n&z|)A1U}GICLe0@-d`| z#|+0?7<~}(G+~blfkBVo zG5^LnR4SDu-K`sgtWsK_h<6QKtuSIfd@l>E&j%%-va+K;jiT5qo*7QYkZxQ` zo;F3uoKhqL>?!m?8INk@nju~&%V;@M>2r1aN0zEd^$>SIi;cl!I= zLmaYh7`VShtZ)Q!^LsoxAIgad(eygB6ShE7H*Dt%)FhctBzEzNr2rAD{IlB>d<>vG z+?+8DfzLX^*2-~)z#HmO6o?Xu>L*`9L06Y)v}es`!0#1=Prs{sBTqe1Q2|9JhW9h> zz;j%#*WsaB&N!u^3Wa8n9Q1S}I{PB;@sXv@5(Q+fAXkb#0$m4L{gAp&0lv_Ac^H6O zP65y{JEHm_df1={@7Y9yUv@hQ!W{AH-7$%_!v6FkYcmo0^Y3Eljxr&f$@Z^%PVS59 z^H3f6U!1*Ly^R|Ro-;$GnkJ37cP~Ce?d67-i?)wuCczWmTeM#K!*-4hp(d#+uqU|- z6`1t0AOf=M3?h$0!e<)^^(1S3#lEcCd2&eu7ZX!$y6N02GiD6(d*^}L0V>`J>lZ$d zFH&6W0qVr=(WnGY`(l6{;Yd!YZS>^{9SApKd^Nxtd?)vz#+1U9mFqgEdr3{# zG}BB|RPv0OkH6+ew5#B)tZCJ^gdM1FURVdxH8iRG>tE-NCJ%y(M$4!Ar>(;y=$YS2 zHf2(aYS-h&1GigR`sW92&iiLn%MW(CtCb9mN%odp6lymyWNA_v9^ZPfuvQ&Ft&y$g z6wfO#ni%Y)`}rEW6mF>Rb4+{DdF-G&crtF64^4S0#Tlm3$?tG{wu5@TnPENUKW6er zb&ptrydE{>OSsp&HObmVfaZGnKo2Uz+608%kmn+v?nViC1E~j0FDfsEucw+GUpy0> zE}GhDI#%N$T_KvZ0m;aPY!gpBJbiEyG%NjaZ+Y=d@KI3Z8h58VIiq_coe!@PyiDLg zVsaY2ghOGw8YlA5{4-FIILhu|1_=?32m?PtAc42~H`S4A2L7BpQ*}NUI?Ob<`02TH zBrYMIdCJOs)&3CfvKo!^<y61PN~yM%fBSL;_V&%UXMyX6=V{59IM?cG;$Qxa0EJF2-d zy@IhrXR+}J-?MEVkCmT!K!aQS7N^J&p`p({{YO+p?(zESn2M*;ZQ-$!#0&0$XWEk? z{r>!wAX zekdv`mUN9f9t9^FK9kBx8K%JdXaq8x?mlQ@xW1lyr#j9PsEE{&%Th?QVq#$url)ot zZn1TY-YCk?JH8m+&I3-teHGg>FkZ2XkNLc#(%ik8JW?% z7*ql!#+$Q;2cyS!C~rX|UeLO88Ildgka3D628Q}%QkO3m2l!TNPB~4ypk=Q%WDGVv zrW%YHCVi4Gv22>Z6K!7B#8XfDtU!WkqjPz`x}0@C zj+$s?zaLpVe48)QM7G;EPwY&*{E0}%=Eg{s;X1#QNs0wo`r_wp@+-v-+Fv5FGoPev z(A`Z@3S+N!o$5PZSv6(LzqWlrIG|B~rCm*Ycx2PAuq+{LgLzQwAbhQO>dsxgUXH3k z2UR8wMamJf@vcjKi&u_Z#^bj%cY0$~Z-uR75O1m#Te@I`)+$b1Ld6w2aq_QkHNz;V z(#vsVy()&$iJ&6#x&PYm4H-cVUf`GC_%;D0yV#G5=f$1*a&6{@xcvOLMa*<}08-EM zZcsPqU94TyMs<~(Jk@BB)_~WO*oA9nQjtC<+>Y|2DC>N(ehfo?ArF6wN&tDSynX)Do zNTB{R_CJ5}xCVDuD>Q-o*OdJB4<1agyq8b^r&oO7T~{Y2?*E6^koWvO!M|VafA2Zr z@xL+n-x$Dt|NqlEsGz@QJFoHtq@mfEj}pB}J2VVq2|5KRms+#tavMmoZY~vc%5{SF z!__5*iOCQiySh(SD1V)JxVk4%QZ7C1Ao=+RbNrI69<8lr#_YO+0>DDeRcy4qGKvqf zGZ5AQ$ewkK-9+6B$?})GyL$d4k8@-GwpMj_+=b}t6KsB-rS3#%=%Eh<8BwKkxl$y4 znUhuj6CT0#GcTpj|2QjrG~pe_W!myo3NI_^`LF)Gl)F9Hpb8Fkm6tyZu22`4KgG`T zbf4n;jWI>MjICr9Z5l2j|5; z@X)2hnIc6$s9mr8zTQ$gk5JbGj16PoHBsc5O3r=ZSrY`ChRD#+uF|WfhYl2(5#?pa zyY5RtZM7T1@B*{J8w9L6+R!9hbOEzFO<4lUJdK5?2VD*z78f2BoDNmKdalg$JJ6bP zU)Rb2aDj18W*S7c)j_RFvf`?Kd zuV(ip7d%@2Zd{(5-4F%EV^$ z$quNyNKwdq>v`vmug82toq#lGCZV5LdJf9%fbT8`E~Rj6g{- zm7|TD^4YQh3)-)n$D%`yg0J?|*bo6Eza18%?? zRnGBipK(fnz`i5|^=EH#4*BH*H&V*u$$GTdgW~GL<g0&iL0>#{hBFp{jde;=EG1 zV>4{qI|s>5TIvD?rfXvxWTpYUn%~4k|C;=(1mp@YReUFZE;UIuk2|NO3FW*nxlm4} zfN0@>1_8cNzJd{8&2ECj$07e&J^{aJS6#<31wn5)$Fz+G^AvUG-2Z^mFQC3>1PA`t zN$sD@v_lJKrgAnuL*mDW)o3WQQhM#r&>Z zmv%_iJ(YDQR|LWs`%it_lB41m?;KT8inSIWVAp1g}@%zCPw^++I&B^@k{3f z6;Q#{NZ@=>iwdQ+_WoizXCihzpT+Z>gH!_|v|5L{(Vm7Dtd*O=rh-&%uRbv(o*{J* zf+$EG_$xR24dc@wa{^kr_WrrYTiI@DuxYX`@p4HpW&x_Nt(khz_aN4OvxP0^oLWsJt-&Kc>uWj0?;vMb+$ zQpj}4ZRa=9?@=&Kw~Y^8u~uDPLiBHr`DQ@W7t+ zxoh&^5EAQZDd>9mRZrX7s<_rewcEkJ;q;QMd688bsI>DKTR=M~A&d6w$putt^L;8M zte^KYf=8~f-s7Uk{PS0~i$1mqdKrF9RG9DjO%@QkP7~k}Z!@MEv^jW+Bha|j1q-i; z-59(?iU#8^3Bvw|FCJc|n>I9crdrL+Z);ZE0b*3^3fp$KK>jTIk7v1X>7H3DTVema zKDrldC49mQXDEI;k!2{k*rBG%W?Eap>iNCH0U*BxAm6ryF9|CR>BQs1SWpx!==YCV5N` z*544>uQ@LjVmg%q>2wpQTGJ9#A~t8I`k0?#3!bU}R8o!S1PMIJ;?Rw$A_cF=x4kP!=Ri_lyx7Hb!vJ*)_xEW9s9w(|7;6RmNZ ze*K81?eC6&d$Km=lTDzSRpfJ~Vs%E+(1<1Ou=|=MzW7V0OZ zkgA^_x0)by^%S-9Vl!!V^9X{5fm%GTh24e)!!Y1QK!%U{n*s_oeKCJ1V&tGELpMdT zJD-uA4m512(xl&Al-PgXsX15>tA$pDi$8yb;*&~3=-^9NfDlXIPIl3a0DZM##2H9aFnlmG^G7nIi5mEDAf6wTdCns4)+(b?bYb} zDb2Uhwaq_opS;3rXF703`m`k&f$6>izK(++A$lb?|DWdc#K~%8x%~2Lj93C2h2~&M z_W-<$w6NrTvm+RP^iZnz0P}_*suWrra0_WYl)7wUe0~n`02U1KA@hl!zm;K8u`BF@ zESY|^#L7tJLGt2upBhxnwm`XOhMUE*Jw;EzF1$7O@JH1yzl^afdgFUT-x~|yCjWYt zYbfwzun_&Q;GwqQ&t>oxt}brT5c%sly=tJTBlgfPuQ+6hO|PM9cj8r^E{YZ$D}es# z2q`liDp9l_wvB+|)W!7@M0qiwvOi9y`f$m@nz*yv-h!4q_lEV%ZGf+^G_dvWD9i)t z093e0Y=-!EhTZ_`*E@4O!0r1R0hsg<()iZgQ&_w%yE!_J(*#d8ze@NYhBIr$SR9BV zIUVkMkNp3s(X-#-G9cV)YM>%JFZh zsJ_=!MUFp!{COz~pfZs&DJA>p&RYPwvCEHZylt~uGUSk6PE${!gpd_F%?52oeO$Ys-oJ=&hJw5K@MF3|4gs~eU1VqDzhcU8^cZH9D zPN8N}U3%}=GmQ~KuwUGYYWrJG?61?E^gNtnjbe&4f6d2VFQAkE?`?mri615e`S$;^ zAt7Sq9{39j@V`0ydzN4c|8H6UJ{mvP@&BiW#6Zpi3fv$h$lTo6_@#K%mGNK$0d+tV zPTKtso=B{wnx`ifMz4G;j6qciT$NbC|B#_|gL7$bV{#D{gP7k*1w5!O@P!La``IC0 zX~qb?YDUNeHA(riL^o8R3#dC&gM-#3>8FeRj~#Ru+J(!EYdzNUL^J*LRm-9>1-!I5 zBwB)V`6&aPW@L(4y%FA_$_kh$VAK5qx$~!48%eKNl%Nv?IcmOv z173N7Mtp!tWglRMg?deR%$fy~P#<-Br0d+!E$-z;XO#alui2ccBRNf zybh4^{xC?i^!0{So3%J(qLTnY9&lP+f%in2d`Ms8TsrYP+wOnnMC16XOmI`|} zAtlx5Nn=vf+i?nqF98l}>dbSe|2|@W#SAq|F!bi}iTGI-(1+l(`r(YeNAPuLucZ;^ z6;;5t2D^3x+P4F8F0oG{GW!|QXyaX!z&nw1GQ!Nv*Pk0Humf(0p1*Rryaa?@ zH6eFzJ2GVO{znW=<}4Gf6}-groQz^N!XdsvA+>LSl>Og3nQ) zsjL??@CpFJ*(!1Q<6IR)yyL|6Z0O{H-Zj7y7$XsD(w&|HZJ4QmD1J>XJXS8N9CD4} zXB0j~qfl0FJ4Qan1MWq;Y~&86D?}rlKqm9fa?oaQn|j?T|1i$9)7%7hZo$!Zakb-N zuu|0#K-9RcZ$OeMv|5b?_arL@c*vjj>_tI-q(Y@=*5uG`68}7dqwAH@6R5b7=RMf3 z^S*>!01d}ymZw7WpK18MMjSvd-~wPg?VigPo_&9ci;xUbA*4><62&&ybzY+s=@LEtkli{ zsIQ~c=D^XIl|SLry&5vQu(Xal=I8*qFS@fW6W0Ck`d8Qs9uDZcZapPShGZT}dq$B&B;K^Wjx=N2HL){<(SD=P;o6$4O(9( zLasy&`rIxuhT&N&)TTlM=E)+S$*o++!R^Us76 z*^NzFY|OULE?KaYFalyD3*7OWVTgX;wI(!c5e)R0+chF7&}qTz-8UP-A2#7!4RLsH z#K?J}npou8F&ERjY+G0uG=}zqu?&Lju9Cs2Wy{1vB3t!z6)BKzP=#&skw0v5 zg8V+wA^70YZMx=^w|sp*qT9SUfs#v@)1JY9WDq|J3$wn=c^?ZhLKAi{G&cB<8fws( zjhdu~9Etoe=VCbkguo>ahXh9ow5lb?3W3qN4%mu9qt07^%)VW|VBdlUa*(XnAI%2e zJmU`;V&~}csA)o5+|Yz3_F3`+;)|?0#ChBp@F*DJ=I|)p@F-bvq%SuT;8D)detu7V zKz6{Oo)x2(rJaUJz)(`yRnm=+eRpTyyH{?v6zte{`b$9_7BtBWVr(s%nIjpm39S2K zsJK3w32=b44UR5an^N#I3&54v9;c|oG!?_O%V)jkD`+8JsN}KM|AZj%fKESV0MV_3 zh^8#BO$=YZcND@Y!&ESINQCp^QaBq5>i6-PVf)edkdY=NLd9JTbbn$V@4U<-vd7(i z*+VJDV~+GBsy2+!RI^bc9S(%Ajt=WXDo)VI9pe|^y!nm7`J+}Dm-Qt<*NXfPivX?Y zh@~9wU2!WPguRpmMv~UW<+Rudk8fy;)(gkj8+(2uY}p(bq3d`P2e3b0!c(MiXJ{+4 zBUUyo{;IB4pq@wYl5H;ZwD(h5%#ADt6~m;=L$bEw0TTw-DGw#QFPHH=E@AARqCOzT zfeg&8$_`mOq%esA8(#v9qfLx$7Ec^9K3%dQRZH*9!6E2`V^!bc=9@j#?*VuFGZvQK zTNKp6JW;nr1>yP>LNu8vF1jO;5PHe*79^hS;iXa&fzHf|HZ2*sOhrGo%~1#Zv& z^O-yzdbImyyd+9b(8guUWXz z--BVR6!n&>fXz#u3OQZ+i@fi!OM;cXQMf#Dpf51Ou!l!0%TTQ{K8H{+c}-RJksH7% zL|X&Nf7r}-3{_qRl*8y(N-a$*jGxZ@svLEg@=t765xDp?K2)}#y(l>n^FsOn|NVy- zDtiZ!7&LH~?-+pMhZ3O4!Sdb|=kLb>ASjr$3KpF}q1k>o7@Hj=*4*7Kv0=yoXVVvI z0LTC169LnQ{_k2wgZW8-h(v@^RdT2DW1@_|?x%$$q_8TxM=*1W|94^wfRwISGybOo z>kg|5G-6f{zdXzCr2RmbhEG#vC%xfm~x889(*%a z9whutRAggJ?@HB6^WbFbqotc-e<$G(=}GdAOM09@<3j zOa+LhgET(2k|>8%CVo2XtFL>EcK}`AgpT(;T!DgLUk~WEYddiZk27Y}YK*JcBGNO5 z4EmLKkiAOrVjeDgGPs#?h#uj;iz6N%aomn&58XZ$&atP-5ij^5kze(a3Oa&hS+82! zdoW*SnfVkDLi`KP{014~ytk(-W)QLV09Tn&L{#(Hog9#xZ55Yb51+wC;EUNETqeI! zW;9vt1m47w@H-aiqbW{$nXsz_tHyF6|`Iw8pD> zBza#M@%uIBIFCU|FNw3|-gc>V(RL2qMDNy|L1Ltem%T}G0$%9%*HbsJtzdUd&VGCm z4~5T2LHq2#&QNg|xANFKdM%_FVHd;#NCpZ-+1jOTGuRzrI8z+< zz*d}uCF@8(X7NHcNl&L|7>A-yxAeRy^(o1=t@HLufw!naUbk=AP*Zy&F$x(S*fOp*e8#r7GsE)R`uZytQ3a?)e8R5xxT=^_yoQXOx#+-KG2d7B zh;?P;=1P0u?n-F7HiuHxjm1w;?~VLSLW~EOOM^dgP%Ts$LD|o0ps)%}X2oqda9b?} zZRP`^B9;)$B>nJ;N9_lEk{(bg<_^PXy}h{g+x(JC2GSs$XvpF4_rxOs%gGNNELP$# zsz8pTapbmnvZ9mrJ*iHBx#xidRH+a1L9bRHuzC@!%(A8UX8S%;l&q7H5> z-KDuy&c_>H_vCgS>yWn|HSPN5yHm_=tks9e3wSKyFNCsa35DdzN4gj*@2!aMzf0ib z#!BLgN9w6zpy{1$3AYU}6i|c+)Mmny0w_RALH3v5-vjCq?aA2bNgQ15Guw78tSPC= zS!z_=Z{+j`a=`+?pYon~oIASgGrj04jS6VNM=YCY%K`CyE%(Gm6QkP0l$OC)K@k$9 zc>$^Jf?IOsbFt1gdn5J`g{}%1=01)m9g)d5KIQP&;vO2b8N#A4zL;^pr+_dvENeTj zsWf)!IGW^^F!$E=td18eGzu9Sx0&AbDH%IyDcBnQmO4TI=Dabi&OU}T4KeFp%Q)65FptYpAROX+ z`y*?IFzf!FyLAlbXxB24u=~kYmkgEx|54kZ!2mihZmH}(VQ?&jJ^oj@9(IZx-GcSK zCp(0L(01o=J>(qYtgNZgVsn2m`nou!)gZ)`8hcGG#I~~PHd0wu*L)YRV@<$o)%jF* z!TnyMX=~CN?*U!h(av0^tgsB5)`Rk6yMRLBr_=0RTV#4K5IFNmf z$fgAN2h!-bla(1s?5AuoYTO1jb~5xtJ5@4E5!$E0zaKhf%=ii>6>TM+t3K_sQcGh) zt_jXniclMGe5&495m@Q$dzM#_WL1>3g+k0}gdf>9BKg-;+mAlJ=p|`AXjwbaIdad6 zy}5##sBm*;H|$0uCnFNg`y?fZ4yxVM@Y5a&r_C@rdVjJz&31UCwi9kMf0QZl&~&x) z7IUx3Vui{|LEixSB+`^*C2mi;RXgIYvLtTJ)`%clIX_?V#(0-yM#-^x(xGP3l>_Gr zZV&lU`m>+Ku3dp}NV1Sm86@`dP%;TAzVsVt$eG|IBRu`Ia=nhQ{A3puvo_+g)$Vj+ zJ7(ljtN)`LVEz!E5cElken)f~Gy!rykX3t5M^y?j`5~0qqVFu|{(#fRCM5EtD_hc8 zmHTB-cIsEMf%Dlz{Hod~>f8Olm%O}$8k8A@@2|05j1}g~v@)yQ7q1#++%KTN$`X?q zVisZBJ)fPR)S&g8=0QPV26{^M?1pH?sRFO2l>)Y~#hZ9BEf4vZfx4$Pu(=MQ6#BqV z?phXB(z%Kxm27s*z#M6E@y+-~>#P;;qw znqH<5<@V~N$n{`4BK*3K&HApsxM!9`q#BI7zjyG$9I-}PN zS5QIGF}5b-JE~V-eiJKgr%PEn(&~`-4zplJ>qtwH;q>~x_Cb~VvZZ*%%~QIUj+!;6 zN91CoyAL`J1P(uOF9xWx2iGmEA=R0Vm~*S{y-=G{AEdTpSCLqZ+~V}B7a7|PfUJMa zR6X;q5>0=``zXM)s#vpt;9h!z@XpTBT59uxM=|NhNR zFsV6P?RwM$h1QcL?P8meevyM|5^$H;NnZtc$OudaWMj}g0GtRPuh(~oh&LN4cLJnE zTTwd{tQ0`ew38!GkSXl^$Fbd}_VTEP_OyHEv~<|9p?xfQFDm=H=cmQRx$KVPNa1{* zz0YN8TRT)cF$4VvPJ^$ZR`;fFikQ6E@5P;mDHI}_HCgNGnsOUhy|>i69V<;&`i*US ztyT0sZ#}MH-mZ*73P@ur|Hw+lo)S9x2v0F4$L(f>b7A`Pe)V?FMb=CRU z>_q2B#^nNBM6s`tT*d=|wdJj75xTEilCwHC#R$XW+f(&eg}lG+)An)yGMe6Dv^6Nm zyjWhp`0@Qa?0j5A8IF|Qm5h+@HI7I&xqN?@COvhsVb{nt;_Q&3oN!6@v2Ed-+mCA5 z_|%gWGEF+}t#HL0@=6xBu;kLBpHubA9vTKjH=&lF=R%l_=ns??)*RYMD`Xn?58B;{ zDnL^+7qB}&HXSrZ+DE-#YS!{GePhDT&*K_M#;~8$Pv;sLxw0{Og)C_+d$Tuxg~ag4 zH@aWnR>ka#t?Mpdzi3XUlSyYsu0%{rPcYm4K%&)M&Hf+e?~lskV|o}++WUn-CWRJ)Yh9icjh;TDcvy%PTF1S zI%YH^igNvynlZeoss*`5JMq08j)SNvbJ1*X1CeEy!8!)#ZH`==^9W(;y`Fh$i43{g zpxu^#0OuYUR~X?QSJoipR=lzyTxj5r`&$3*~5zh>(mYF9E}j#aV1 z|ImG}Vr<5`VO!|r*mtZ2ZHXE~qI}P0u+Ew;H6aa7gH5!bpdPUzFS_gGsTQ8Riz!>! zz#rrM{lxc(r!&^Ux=%#rn=-#1F8+?MEuoE?FjMb#l_zqLqG`7z%_)y_K948cv`<7} zckaAqqA2!FCQ6H@x`aN$hdL3t3Bv)Gu zgaEvRLU-@T9ZsX4^`xpX>Rk%86RLVdhETSkJ4hF1SrR=27M=EI^mj{o|8CdrHA1yU zlJs;r4`kV;*o@m_2Xy8!J;uRiZ{`VI^LbIn6}#g@vUoh{ODsF4lj>YE=HEs)$@g8= zZ!7HuW%^e>@_g;l6re67TZJBEe{(@yQ!gX)%>l*wY4o=H=dCeD;SX0zr+#kPsz+>f zGt0HCnO^?%^3@i@LIOqN$r$z;zwUK~aU&waFX+vUX7&y`xfk)>2V`T5DIrA$)O!9( z#o;yyoQ0iT%ft_y6@QPx8PuuCHkhD@)@P5-&tchcH=n|-CSyU9FK#fO?!BIf9xEdf z68&>pil+C_&2%h9^ps8o+Z2(yEB?9@({XY5E z3m5{yicN#>UH{xq!~A3RP{7N65+t$sn|cPD8quYPu{^IE%sc|V%PUX$&}uuiTUbo$ zVK%)VGYTEHCUe=~6={!W;(bq;mM(It#|@_&)ue?RYm+;yoI2JZ$Zm&CeW}DsvQSo4 zP1&6lM3o+|tsM)$@uM(vcu>N-Q+4@mxVuL!8DiHOrOYyz(D99mJ@osYYy}yjOW~lS z?OE>2fZbzl&N0+vong><#4cf_FhfK?``DgYgw}~IJyQmwg?7@i?I3lj)}d`uh%4tE zIf9~QN51|U(VwIH`me-O!|CTFjqvscAe_C*;wh+&rqZ!J)x8vIrOv9qWwvF0TQOc& zyQKH=H#+~eRkeh#Htm7Y?EU%l67i6+tQrJQX<&fSa#l@_ znrE44_*t_Ib#cD8JbGe!_95ci?wyhu3xBC!dI)!MS zqE1Ck_0yr$#mU6qSl@Q#Fh@+TuIQy=vk?M4QY^zFZ+TJ?epw%$7ko}HsK!L{+Y1>X zG{TWYy_E>Gsu;}?jTl`H-_cZeoOBoBIO5;c&n1q~b-fS!?9d$<)m2mbASoR&^4>jM zpNKTe`qnI;B0bzQFqZb!nXxZMMS=O3naz|V$v%pwl}6t3n`duTNS2MgtV8KrLj^3hXSvfL{LyT(PEx+8zA;5sdVK+?Ojzr7(3)S9NqZ3y<;@? zbJR}s)auTQL*47&X{9iWZ^0mBO7l;wa9#ZRUzt!ZSaY{Yv;=FGnGPFK-|;qrl3=-6 z<;G9lh%fxrW$iJ7{f@e=bgc_CKFTuoY;DG3;Kv6PZ`fn$zMgy(cN=&9FnQuU+8N2+qVKeSNo~OW zTgPo{$kc%+sCi_+;3dvGoyv7cDOGdNnlSYEO0lTg%iIRGr1@21tR%bW09$p8Y=2zCSOG7 z<*c2mIb~U|YrV2}pL8a9nJSK%tenm}CqJPY&AQrbgNb>03M&ka-CU}5C!X;aNv=hPwm z(C<-Qm-^jPi#dzh^E`G>+{?8e4PC%@YK_`*mWqgo9=~a!V0LtfvailEj=kj8ZZWcH zYis9I5JM(GzB%2uYUmc=Wlyu}f>NeAL2r!B;aM+t@vD2~HL0?n6k^sr>Y0F6T68Wy@ypUIb2-&?Y_a`l837vv-Z)HPK)tshi@;i-ZK$?DYqqb2*qs@!a;J#9l9jzny2JTlyNLy>s<>@dH(xsr*`a) zfGuH>X(z+!`GMl5_QM0@VoBnfE9aG8!_Kpf$*Mp6aGn5eHEL%&`C)Bmu^3rzBtBc)7ZSHodWB;*_V@*^J6+e#RTn< zro(1OnlaA>mMUul+gE$XY&TtF5d$RWa9rlH-Bk|97cx8jVqEvGu#R8M=n&sVg&(ed zTH`rf+$wSEmtxA$Jt_M|DZj&>znZmfaHlD#a^^O@L$t3L9wc{WpFUzQi;hc!w>Kyq z9^qcnSMQ*D{wz;;K`mi^a-_NW+_Z@bWR%Nq>ZejRO;sN&y+8TT zB=e;6q?|G}QHe5RyW;vp4SBUmUcQ>ssl1X%Mn1uA`;7TRrYuKY|AOXSbNY)Lg#9^6 z^z^P8IVXEXAI~L4ag<4&F4L`lQf?qvaD7#8;c~l`msL8$M4cd2} z+3rf|7O(b=JTmSc8w?tKs=daN%j-PV8}ljA&^@TSZOY%2Iis$|a`QAev={IG_WcgvY;SRb>9?t%NT}DhcNGcP{2pQPhI#Jt0A|o zs4?ALWw0~FSli+d&Y}tR{Je-NsJDPfn5FORB}UI)I<0^tpJ#vo4e;4AMTp8B!luZZ{%do$k9OS)R3Ey4al<(800lY-%@7 zCxhO)m76b9s9iO^@qV>Td8xz=gK55SF;;DtXg@I`%$D-~h=Zxp1H(%l z1PsbBjZo8~W3B{^0n|Dj_501z+qHYZ^z`(MYPQZxBOEUpIV^JR0&eEY2rT#HZc znVy(s6kJm93?@^ove`);9oVf85PdVX>MuL47tlT^B0)~4dPXtDg*L|Mv|`6 zSj>v4SzXW-{y-2K9s5iY19J@qql|6~wz&(>l>~8=ckRs$s}*(xwd?OoNGLqJHOF4F z*Xg5EJ&Va_(@I>u7JTSPxn0{q9`d^4_;J*Sj6OW1hbIbQOi}5AO@^9g9l4 zm#VYL-Uvr2VfGHJZZo}pAM{z)zo+q zweRw}5$};lk#N);Tiw>G99YN%6;FrA6En3KUE8ny07Au_JXj_cRyzjhaneA#6R!cI zdK#^(Mt4K(M2OxH97A(85lK^Mv6FQ=Ju+#SgzhtYQLX{bWFn6Qj$E4QrUdQ5uLov1QRCDqC`g{ zj&h5U4ay>_+7-O}Sa+S-f?n&w4zBv9^cDcN(M__ zx9_&+^Om2?YK$37_ETsy7$d$fMEohByJimyZuRp=91KA_BfA{&Uq#d!=4d|4I>+dC zo~hzuyA?H1XK@y&6PnQhXJas`cDSd_7|1DiYifHH&CNPZ`*z-y%GSc)X`>=1yuxROLh#^9=YB2{BuleR1h0>fgVMv z+C>B%&RN%5cxNHMm7_bDN%biE!{Q`R<3fec&xPyDLXN2TSH-z}vEAB?%U6#UG?+b! zoY+^`p?-YT9K%$oa3fWJzS%vRd3U8unQco5U@hifzYb#*_0-Wp?y&}Is@jT51r zTn9jeXu+jRVF=X4a3h{#c2KmYaEqUFQWpJ86<5A+ct6u#1ViZ|6)Ta434HJH6}$#z z7@p!-!)_Gd%`w-~KokR%M6HK8sGoFze!8Lgw{N<^|Lm3F9Z94U-rOVq_(H2E4!zA> zwbdb*KjggYH=cDj?-qmqA%h$up?OwbK^8OM3J46jqyO_}>w_^=`KD2EihO^+YZOoS zWl1n6P9pvCnB#{MX2>In)<_G0?*$am7FGwK(m)@INPFLB7D=-TRZg}BJ#aav>NW@C zXJ0$>!$Z^LZl`SWXhmpYt_g1y2v9ZHT|x{S_tLqEw(SgRu*y ze!`rKupWfl9E|BxK`V2^VtShnD1aEPCFbf9Ci2wU-KYo+TslqUX+j~gqbh= zqR(;H0|o`ZLALiB^iN6!W`r>~mzF~TL}BMHV>QRt)jKcWwi$r$9dTSI{=ZZ_H!!Cf z*FDUR`aiRVw0~?bboYXNKSuS7smy~4CPYgipKzeaI0C9mNno&M3=xPCctqz*BQiGd zLy<=1Wdo!n07vfDKp|W!q7qmKtEn$u3NO3bxpEGD(r7yxOh{S97&JiKdoWZB-6|M0etR5X4J6&Wjm zId)1oc&}yvoisa@o$?9TjXeP}nU$ATfxbTw1VjApBAuR|PI=jnxt7Gb?hY0fk53Y> zNB%_}qf7X_)c{Iu$}g)xfD6OysAzKE&0Ls1Z{|+Zq$779(WB*pBC8oh7^n8ADbvrY zSl}t8yPX{7ON@e#Tsl-hnp~fr|HXO-04@yq4p=(fxb!Y(h_Ub-Bx^-VfQ1?A`Wbjd zE8kkGl-N9iOJ+YA5sO{pfgj7WOr*d*hCd7wa*^d~PoC6Z51b~e7ihc9cEl`wpu`aQ zN?PsPcUDm)&wztuvp8668U>WR<;Wox7-1`l+)kjcD#e@wo}Ken*Cd8AL9gQU1V~{} zfG=})OYLxm$B#zcQ&BNtw*lE|BJh3qGOSI)WXriLMuFX(A)kckP#-~BdWJ{MmIF!A zX{(*LQODdYlFaDA_%s%}X=Hgz5V~uWPO`w$B{a&#&z$YYL??DeENuz!bh;Qm-UwkZ znAf!JRg(4Dy*dolxTRSu9r-96DRdJGh|I!8Z@L(6o*<>Rv4Fp=0-~`PA`D-%*c2r` zMrpwCR%{qJcZ=?f$*ykH+;1-LadkVM0dicdYc{RNHiiK_k0%3B^wA~NM@TM>4N_@zv z+Es%gc)D9gE97toVTn05?%f$-eEXIiR-Xp{ZPJ-naEV-c7K@U0|C8)n8%h+0Ri6kK zh?@HmYHV2KwxNnPMs+UN9VslU!#+Epz`9~~j$QdZ!euYp-WqP_7>P7__llu zndBVTPMfB09%TT2Z&jOr#gz70Mi6hsW5_2!QDbx6pYZ;wt*xl}e0zQJX87_&xmnm|43PaE_>7phXK;IBJ^&$u_S&_eR(;^53HmG%$C1~ z2BMgg-w$N`!uSzEh%yhdey)|Mj>}@#ggowb3Ms(nVN?K$-yi#|%TA_#A#f=%G9=P<`QJj5`*>ha>_C%R5*EhxDZf68b%{ zIuk||0f8nRf}`@8aCJo>iG4Wh3bvPjD;M~VgjC^T`Lk1lg z**80Jr^gaNmnB@R$GYK6LThgR=}S1tzdsZkiCNwLH&MnBVC$d-rvco-?N-W{X!)&f z%wOr?o4UZPm<|TBTj8a!LKdJ>qyi2ZHW0;v0O#3Ifk6Opl#+mwhEaxNYzW^UvS^g% znvjK7m8}dJ=O{rxi>jZGtNuB8H#FdPC#El4^?!@=7>Lo|$T_u^Gua>$RGJl$2-;Qw z&j}FbbOL+`R8b@7)I=hNp~3(As9bY_CBB};ht$I=erNcXRtT0l=vYJyl%P#q6UNKK zpeQ(tj}1)lZ%L+WRSB>MYVdf6{!4B8rzCP*2KJ|U2es0_M3?{d#!@73z)}gH`#=2E zC@{}AXDgrI|943SMllk|4eyrNb??sq{Hy={vHu#}KmPQ;!}#wSqyJqX|GPl`U!EXE ziExGgxAQVa^b}5W0q`9K9ZMq6`ZOV$R)jt=K>BBb_(~HRu+D zn@_*QgSO~=o(4p4M#w}Tj_Uo_7&Q-=2!8xx3LI=T=(>iXZv9JXnSfDRj#gEm_nSn; zhk2R*DXU>lN8L1{jO%denw)f!z4aDPH>^<`M)1zpulX# zFi&y-HxOgbeP-o2>}>QkgV#i0b$jD4D+sJpMdj+ zGGx~=yd|njT?yV>rRo@qML8LVM?>n9%XuZ>qb$e_#$3w4sp|v7BU&w78qa^ckeDw- zW=R7d$`7oTS4?-$7y0>JU>3{2SSo0@JU<)!-Y5!0mj%zEg+zr?nUx-9o-mRdgT)lN ze*|RMuQ851>{5sPX|>?h543B}xi7Hr4coqOSvnf4SEl?JC9nrun?-1zXZL?6yexZ+ z5ngJ_Z!#KxT%8N97zgcT{>GPl0ERg)o1#taXIl3$W z$-U~_-Q`2{VSkO&zf_piS2p`u6e(L*9KfS)XTAQCp!Y>ufu1mG^Eum#Wv*lI@-QwW zV|0IsGT&58R`L-J{9FtW+LWAS0Zon)lv)+RZ=WyV=^oy-%ZQh-Xfd@}jM378=}|Nh z6*pe)4TAl1Nk}Cs<~)M2-;lWVL*l0RXX3`Rup!48U_IaR_DkSA?ZQb~KyMSDog~^)gs^Cx9`CR9d1mncUW2MrlZXV1D+);e}jLF^=>zF-+ zfc#b}^hHaD+-sFB&xW1gBe|>B4SkLRX~Mm>a^Eoa8YvHJIlxzh=50? zaYt!^{q0l#;SO@4)|WH|Ze`bNpY*4)-EFU4^E-%tVq_OI-h>396! zu(4%tzWgSSF++W{yww_u@Tt)TOv$|CBg}y|#H;&=)2=fU%2^s+?$p40U80m6S{#~o-aBRfDIhGC9F=NWmNPydV zw(XbTxZfAN<^QJEd|iiN#%s{570Cg3$NFbprzKoGQzAFd>Eh>lU$H+2DLqEjmfp_N zKg9!)eM$RA@1+gVU8QUd{w$3$c{p;{*LxqU9D<-Sx``X3J>SZ`7;or9@dY~gkSq?O zhDCsh+dxr^8JQw6NQ$CMY6j;@K}Ym_uihsJj@~C`KPe%xH-pF9cV6ms{8JA&@5}xI z2sD7LP4(P$+|q2tWUqNSq&wL2mNqM<20&2{tDxCvJ~K@%zz>YRzr}%QHz( zqAmwqic2Ku93Nu<$Nl{a;DyvhI55|DN;$hlU8Tcq0PLtg&34S-V7A&?mQ!cLxB=AN zx_Ll#OEQ1FVYeL9=?S!&Hs-s(FseFR=mR)6@cnG;j^uUToJ_W=pX>3+9E_L9w6^I|6UR{7hUjs92-<#t@Loq_;WPGFmS zJ$uI!Rsot87|~3E`G*iAHK1%DNKua9ag3#=VK;}${kbn;J5#? z>Y=81l2|YGN7Z9ae(z5qR&u=_^9~h$XmCsCINr20WaQc|2LfOJtj?an;()iYnn;wB zM_{hZ9-Eu1ufeV|1>(8C3ptbLcjqN%G2x04267xMNRh~?6VxO5W4K8@mSMb`E12O; z4LX0pLZ*ig-!Db7W4Xcnu6m|ogO^(d2%#}ICg7F!imr*>QnNiN&*=Y_X{Sg(Ybm@R!aKZ(^%eZJPko{>=_7F;{B^4NZbwG&y z_m6Fae^iEZYImSUuUMReg?VOh=FyA5p4lWGr@c@1@7YG~=)N}!&!SY?aVA>nYKju= z^Ij6|sqCCHZ0|RYc0&uU*OVt7GCkjEmg2|Ew7m*EaFZ zJ(3k4(B)JXP;~K*xlGx-%5dutJ#`tZmMtduH3Uvty0$~7hw4!J*l8enmI_x-7g0>k z*5Te-4j|oI%9SpxUiNoAG>_z68BH8D-|nm1 zA$$}o5l>2N5Q|H~G9yJ=at?~a;?hlUz~6vLzVYLp$q*jEkfzB3b`(S9w#LAOQ~9gB zuhVaNRT$7^%x)ocoCZKMp$5H^)3FDgB$s<4s(Q23RlsNT$yLTibRhIDOa)gusIP=U zA(G_i>^v7(qxeKkL?qPA=cjOXx0tfOzw;eGlrHulm(KlYL*qGq=xgU1C|dO03mye^ zVb|mf5W~NNk_Cmn4^`DI*lII_(()@{xv4BJ?!{v^oC>)AjY?lIZ_oe;vV&{&_+YEI z($RWu3Cst6gZo;@)YYTNtSbm{Kr}-zF0SClfpK(cX1Q+q6Ws5j(mJ*tUOILWYg}WC zb=2VYKw_k8ww&4#gA$(vZgab@9Jh(WIPqU3riGA^Smo&@eTCI8R!B-fjm6-I!wC_? zXT1%{6tbK8=o1Q>n87d1n6#K?cm{XD6*`f*p2wSf5S67iROwg%{lceQ7Qi&NABRHY z^7aYFFqJ~f;niJ42Tk|ilp{=4Q!1;`_&xN8`{Ag;MZ=4gG|;(Pf?KV$a8Eh|d_KO$ z&o-ON58Q=JHhc zO59=uV@;~ojFZ4gPq)%v3V~=)-ZN`I#(hl1GFTIVDPuqi@`RfRcMi@I-wjR6?OerF z3qQdyQkj~r=n(apyk&L^zc+`%ZcmXQnY>!LJU!1T|Bc}wD(6l1^`4J;FXSz zH*%J_{p#-gMBd3K^$cRTZp>Z$OJ4LySw8$0>m%P#O|C&N-@}lR4|v3{Zap9TDWbz& zoVV6`T0SOEbri#K8-BDiN#YBB@QjhbBh;ijXNv#Zz`&8p~t4(Hz0j|@PYQE`YxxCJD-}0 zp?{#z_5HbR(Rq~|`W7+{1N}NhIbX9Y{yW}%I@Wdi^)zKkS++BM^E(Jm?;p?QtgeQ2 zlhBfdK9-E+HVl>z5&+^#-vJHH%zDO&mrod|aaE9!D; z4-sEA&H&Xp4L&2%&{(rGJqMS6MY3nf3_7>+mfNZX+ZsQ9dx;JEL+IHSUYywc<5Px< z4NPVH$JCca`+q(BPBF3Sb46^X#N+Noo8cAz(34tvg|n~RPlT?Wj)d5%TAlgUiA9`) zl``pZJW)%inTfLTDIjaoUDFgj<4kdum*b4R`NB=94!TQc8H^TUFFM}LXm!u7c~V) z6O{d}Ou635oa338`519=>6GF~)TL&{xnlRQe<>i(+^4Qq^E<%ZE8gkXjnLve)8q zk}?cDD09A^TPVl+rNFnMV>PdGZIyA3o}QRJ-L8-WugwpMFB;BR$gn<6w;tz@4&dL! znl#`cdA?A~_HyLSc(%`gUOS4R^PG(M5;iBLQNU6Q<;z#k#y1zX2Sv3mzai66uOuO! zFX^hSVW;VwpjPmBt<*kO(K2Y&U*P=~XQZvMd0WW%9%0~Ior2x)kHMJEf+^K}fe;wo zvsV201J}hK_yl}Zz@ENY1YzHI07hD}jH>DO*2W|to&v+lom1BJUKyL>Qt%GKfi|kj zW@#Zg{LzdrRm1e9Uwn_BrcpUr=2xGK@7B=ijVn;55=C8Qcc!N9D2W}VBDBA4^Q&#` zoSQ`Z$4J9jB6X1qj8ozQx{q#3*&=J|+uhf#6y<_EIn=%5@soV=#P4Tq zFKNF?A2opxN~2^qPOmoS^J$b1jpN(cn#Iq4Y5W|-{IAu|?5zp>lx(OtJ;4^DjI~2w zS~*K>8BG~BhNX2-Tgt!~EwdMU^$$Di#;GuCrgEtPw)4|5Ga%S!ae#5ytnoHKujLLt=1CU zcyVrVWIW}|WiuqH#z1pFO$W(u71poWg{0vuEc~5H%95gqHP24{skD3LK9?>u1#3wM zva54&f9a`RkEK!lv_$KDVz8SqR~g%Kg!7(&K`(Oy_xmGO?>m+wkEh$hNjg7>yf^qH z^y%X};+Bo#a|T55=)^-WR}u4Fei0*GH1#Phv)#C77w4B1R*6-k&~y(PRqK~?J~8`K zzv!>lbPT-F{yau|*mI1FBmF=sJ=NaMr-pYOB(Tt1*hKjdmZ42m}9`AJR}0 ze5w4i86g@!O>AF@5h5Z&JXB|!?79r|cGxKRm2Wh4${yn<(nz+-H~4)I^LRMpvDNf8 zF~%_Cnbzr*kEuEu>`gh-SNjhVY8!3XR^L5*TSxYmap`Q5DACJxh0m0({o?jE(xTJ9 z6g`k)FZ9NTRL8gJ#)tT=eu2<~pa-l_5s_H${`(9f?xTC%`=tF74 zT4U3B&m^E>On^!D+vAs1v?~M8(`p$F1z#)VJ=V^%KKnP~_Ge&8@+L$6tF_fYFQYv$>Zt z)M*rOAUMmu=JTS+d_Y7mW@djmqI11D|5<3slUuPx3Onm4YAFafVZw z7ROg<&v1M0nW@L|wRt7&&*nJjLwtvIvHnM_m0qsrNwn_}2;LPuni503823qO>ql>L zSy&Wp2roEq#~#!ud0k4WCKyI)zI=I=;lh17Q_^qKd{Nsd;_HvG87V~z&vj5a_p&za z2ypc5VBe+f`&?}M;-yx5LUQS?llI7SZw_RJbhHi7=iS`;H7=wJi$zFNGSterH4ACy z5Mi+uBckTUnwAcA7rYHUg9Z85(8L3L9i`6>`nz`MP~D!e@us!6 zEKXcl+%h?>==?@W8cI{wAa!H>F3dPb0%4n{QRcCvq@)~`r*u;A0^X%d5-u)6f8en< zq)*=m8_{ekNIZTKOfSaDQWpMFAy4-xa3xjkJKHUsq;4HqMs7Y*c~3?C7nJql$?uRW z&Lvz+0jgIkb#ql4L+jdzHOZr&c-`D@Cy~mqF=_0!_*&pTS9r^G^WR$UNGOwdic@!$nMN_&KzGp z9*sSp_-O)hg^4F@A?3FUM(w3cPJ0F`eUqDvbWRz%t8TlWp?EdlPFe0XwBRqr8A*>H zl3+V%GF6J{Esc#sMARf64j(4KS(FZH>>zzeP5umAF!k_ zXnmpK8moEWR+#VgE;vdIc>#am8s;wYQo!y}{&wU04?@n$xAopKZj1(3o`hiIy+cV)TX9jX)qZ`Jr?K=uNbN1HcHRQWzuGkND zDyI$A8Z&k)M=z-8_rM9IEgjp304$xrDI$ie z3r%1{?g==F4uAk&dtepmkIDHM441!!Zg9U?`j$u${P<~b`uY3HNIfB|HB}4m_#R^| z&IS`-F6(mnJOm3`*D`TbvN4S?u=!Ry#bSYN2=To=qYMx5CH5VT#B- zFbOK6Lol$Zl7vr>y$9ogqc#bo$vBC?YCq$L)4FWFYp{%g`VS zeTIXKRYvCX!X+} zy^pl9=sC2Tjqd2StAQ0~)8cTm>TbCD+~d-ro%binQ&v%9Q3Yp+`))~yuatf&NYc(0 z>g)glh2>Jj{kfTV=n*vM%a@D|G{<-X`gb+kTpyUpe!|#64_iXaen4=3frffRWb#v=6YFZA+_+ zh>Y}wHpk=3gAx}noPF}rXyA8?IabwGPm2&=HvW93?-btF*xS8Q#UE}|-6ueFSl*Ah zEz$H4*K0B}bm!cQqaSKLx7B%NE+xH>qig8C{w!tTo6Xb`HJ|N9sH%1n*;=+(2!YH6 z)N#o1rJq@}-Vlmj9W~ZWy8n8p=?ksuQcZ5n;6*bM^=FSGs5IA0xvTC|%5QMHG+I2> z$`Q=ofL63}DSoTFH_Ea&I*)$R4{{E*J%{!A(R2yGq4H(ZXa_%HZ>SH80QTv;nvqdNAx*O#bO>%PClS)@dB*tg`; z@XDMe`b7GL9g!iSC-&;GZoh6@m>Mjh^#`@nUMP+Gr!90#jS?WA+Tz^bl*LbG3c3cL=Lit{qHkZZJ*ls-_aWRNDG zGF4h#Z+%V;toa5o+{gtpeZ_QL%SYY`;#fAZipPwLK#}5TR??%l-Dj#q$%N$hBEoi|IyuXS*X)HtLlx#Brms z>NkPJ{6&T#xh@#*DXm~64HJVz1Ri!}{C4MEF|$@Di%Y4$u~K_FL?omqp42?dcJGX# zFSQ;SIBn`tYt8YP?DbpwIB&E0Y5>wpRZ2=hN)ZGlq&t*GI?g?puDySIx%N5Vzh{hZ48|I;gm=#Q z%;&lDx~{t=OWiT&(8-Qs?hcBUK1AoGUW96lU+P%mM{oS#A1B)4w)c#riHCfSQ7o01 zN^ee7?~IqV`d&@P=)puwej)9zZmL=2?u}|7)v#kF)H>T1i+ANd;f_t1$A)d9UAulk zRd$D>WV2eQ1O&&l%B~}CgtYxmxB5voTwbxsUa6jD`{anfQr%ZG$8C2@xFm1mXE%k( z;ra1puB?vv)CW=KHHg*F-^#y_R}=F-gGX+^tq}*{YE$2ig1# zv^wEhfZe?@Z7imR#2H+nQ|RJ5PK{ldu^n_Dh+-LV^GURNanWDjy||^;Dr||xBwT+^ zUyOLfXMXt=LwSFk(i7XS7^Om%JZY2*JFizE)iG_2q-Ncd)4aWGStwKPeEWFy5dHcy zKSSYx#@5~SM+rD>v1}xPTr`1E4h+;0Oq|B&i5uLVhu=-;raWgEhoQjfJjXYolg5V4 zBqSz-!Nsb7tpvAA@TFh(sfC!=OtsmS10v6z{f;7GWkxGB>NBFrVRX;ybBG=S`}>{#~4hF zU;Q-oVjx2CpzLYM_%o?FrrGbqcUq}aMz+L6W2}dqAU+IYD2nuj)FgJt`id$oCntWM zdU^|tkkA03?Yw@A+i!s*{u#Q%)}yt9A61}?yta^+KG`Yc((pcpdDwd|;g~>HLnD%2 zQL!QXW1`J*Mk3KMltOFi>FH??h)H&va`Ub{z6mS;ISx&o-z?1BQ1(DTjw3m{ zAIV*GQ0%om3UTr+DtZatt*fD&L?P{VYJv2EHmQ%KALHm3{fJ`EveqxZHB@F}eqZU1 z#>%(4PE@49K)v1}_Uw_RW7u1IIil#%{GVq%zx?o%@o>IwmArMgN1gm_!JV9^35Sv+ zM8SgmLnd!OVVn0g_xbK&uHHH%h}$Me5%x9jO=A11u@$&V(dVAfLlW9_8TAQnb?KdM zA1>Cbj8}6kCQW)yoL}!_@Hi_S-{K`RDxdAvjU6DixyWm8!=ZLa%$a6wy2~`Ci{k331Qzr_XSUE|35CG&fjgohSPEVKgOe4;Egw ztnWi&8iG7L0o&p84F~I|&vEMc0suj{PC|p6)3AyVY+~T50lNZL=igWWKs02SuA+xY zV{0D{W~vsC)CV>YAHQ4MXpNIl$$XoM+eNe76K+FZVl4M=5KDeF+RUkEPO+JR$hYGh z_v6BK$!}CiEK<_#Zfy8>rIjLby|sr7@fWvF6Y6w3z|T+Hf3EMJ!(hYl`w}BnF`a?q6N+0Ae_KgX7O?`s$-!zZn&2n;VBq24cwS^D05{U|~+ zY2xIkA&Z4|#@40fvDe-olBaVIzBh8voEUu6Q!~je>iOZq#E&glJiF5dFIiddgwb4& z@fC-C{)1?8&JuV2*rEaCzt4;IO;&nL--+>EMUc=vG>jr5N!enGQ0K#$=n=qkPk^Y5 z08_2i3knPGX7)Tx#!SbLs54}gX`gJCmGN^1#NRkvDq0<#(_(X3!-_07?k6<#Jk##L zMiiC(j_G-1b@1)Q#H1-=Nv7gv_mQ>wpzil1*YG0K6hGlF{A8$mBzF))`L%o&E4$;e zI;%$Mt7Y~33=Aoq){ZN$##ep@z8RY141_$qovp{Fq z3(4d~f2&sK#?$EHjO%Y!C($b9Eyh!haX9^!PvNmUb0Z?G{NbB$MzpH!=o>czK;FCq z{~VQ)680{Ddgn}95^B|t=UqO)uG*e*Q5G9FEWb3E^&+fMugK1rCiyWOi#03*OKEVa z=C=RC=$HY{C+%@o__YTYX%%|Pr|o+ZLUZ@64B2H=uGcES?``WjHAZlIZZuMQ?^?up7AapT786w9Pmq_)O3qTB=|#3I})$b!L!sqQkQO*-V)+Q zIgNd}$zx|Friq%b(s29v=2~DwOAgu5W-kL4E2{?vmGSx)aN{r4-^yNHGr8Gm~g$0@BWK7eQ3MiwDAS-M?9cg1Nja*68)K9X$ zOma8eQmRaGf8z^Zlkt?+Y2&>K64+&44^xv*62fg^#9F%rdM@-ZO@D}2=k++)afk&y zm(aoc1$ck=J*FPg?RI4pz*k zvygnUXKahPGdCtWy0$b+u#!-sCpNkAjR%hpn3vWh(J0?Hh}Ra1{jruMCAYu3Ed}IC7lF9f;U*&8tc^emsL=V;}>9s#hT)-DM_h=IZO}0CrW^+2cw?fx#Z;J#(}?j zp?v1afvqF>$pjPh8YmX?u00S+kPR`Ly@$mr;Y zpyxXUh)KO#cV6IO6#tAAn32S0Qq6h%Dcbh}MqLJ@&s7D5m8T25nfl^cF-bf$Aeu`w0y ztU4ywsRDLF>*JS7zF1#i0i*YvNkE%2V#Mg;@0 z4`2%7Y@itb1e(g8Ljk^H>ORA(Fy#)>bw#D&U_|;p#ckVO0-bI5ux~g@SsBxq^TW}w z9$CnGkn)7boM2^LqqC>PI3!quX%p|DXQv6>`O3k*(9G%S+d2h0c#hLH04kUVn({Tn zZ4OD$IrW327gS>@p{=t0V&FVJ-i|27Zau_z4Ac8n0Ixi{SX?lfm62WG46@?17(#@w zRZa#ANTU|uyXMxWBCP2k_d^1Vb&HHMyAv$j+-gMp(33fLcj5)}C^YWrjO)EZCG*r$ zWnjFV?i3|89R_i!s#Lu(w1taRB3GtrJvNBr|KFBh-JhFG1p1Kg7kcyJ7OC=k@M7uK zK&JKLty{MS>b*r}WMwg#oON3U0W5g{NQCh+A>G^{wJ2X*)ZUcs`k|=ZsF>C|Qrj~+ z`Dsbf7fVbHUPUXB)1wpMcG((fPN@4g06iH%E5^KBt>YkGG8bZ_b4S~s@o=re*yoT8 z2BB!j`S{JNczM;*+jd0Wptz}Tad;c2$bPJ z#zKK_%Q>MYA-jDFx%OKs_vAKJJDBdMjRGcD&4iT?BT$9$omWP0;;Ggze|!BxGBCeA zVY5>HPJ1-=<8xfko@cjn_AE%z(wasa&f2exinndsI6FJj`WMxlXJgIhW^xY3QlxzZ zum27qZG4RHU%iT8_pnAlyFh+E&zj-H@XhCD z%lAug@J#_75&GuH^`LVzVF-w7p~x^!4okc%l0W0ndyU_}+W9u8wX*cex7uh;#&DAQ9Ir- z>4{H_Qm7PXrGa-$9p!#7u(wr!>)%ykF1hx1#ZGM z_Fd(yWdKXEq7w11FzWO`{ zUgQb99Ul0UqvRo}u&{?(wB{o?yvbWS2xz(IdcOU{JBd|BGlLE;S4Tz$*yp^>X-XHS z79M|fB-;f$YrnI4%kYhBCpovhIx((03dW;dK?86Fs8jS;u3Q=Sc#iFhi>897fv+?s z+PKz2jdf158`~G*nxkgatD@~ocEBTj2isf|ScC|z)bd?HT$%lZ46tNzU~VH#C@I^+ zfdReI+1$A4&+71M{_#}_FKGH&ih%hOU!9%JAA=+u*9tXqU7?!?()luPA7U?Y{T!SF z0_^)pI$4B0`aTHW12r!%FYcK$KNHo56R_ZX{Nu|@362ZT0?z6vFkasW(L8ug*cQX` z#8Lj$X1B&rWSS@-IvN4_LE7JJw}AOx8hO5deEF&P!g^YawU&WG0u4-Bdk^N6OdWC< zQcWfH1TS$|zgz)(3CHQ+Ni_5#*8tO6#CEXgZjxCQ1HJBh!x$Y zpoo9@Oy3*uky^^k^Fe;T!zYC-K2d#4mmCX21N77J{i|Sc02PVBz~lvi*MO`x?Mf2| zf1T{;;srJ)F&w~?nXuZmb)A;whJP$4BcURkd2FqU%jc}_-D8It&i-(uIKIjPv+nX^ zTDRq4R^0;9&~sd&k&%R0I5-T~iw)aSg!z}p8epcW?KmvHqLPxdsp+#Gf(0;l=7Qzs zouM#+gr1%K6qiATT)o$>z}1jth%Ns4V7}YP(n~lk&!aGiVgTq948PB+D-UyaE^i7u zwxn!Yz=#F<>~Hy?dIo4HLBJzVhT(mJAmE?7)z23(zvDC*OQ^_Gx$v}fY;1UQpHj68 zqHITN^L1F1;91$Fiv*-x zw?icU<&oeQeE;vC|KFwim!0!}x6S|GkIW4wd&mu`L|yTfl$5TxEmKXe2-%G`gRVrB zu#@>Tz#refeaotmapK*(cZ3%&79XTO`ltUx+##(Q5@bP*<94miaC7$8Q^$S9XD5xp%(8_k{rW}8)4nT?H2QAMR0qOaI(6@?1x!S2#q zvU2kBnLrjVG+B3ysHXm77yAl=#qKd5J^1z=fcCe*qCzg0rk?`CB%}d|t_hQw#EuU( zLKCKc5NcZf@We_dwYN%82A+}Mi<@t-i$X}F%{%)gjr z-#+BNJx?As9s_aCWOM#A+K%x8Jrz6z&|;h$wbjLEUgJKYk&FO}zbVX{Ak0(>+`O>| z%%|=`eg2Z&x8eW1Rk(SnC3unf;<68ZjDaf8CFgmK;0#9Dh*Cf-5KJLZU!bW~2h z5j0FJ1n7+BmKJvIj&OSj*APDVjhnf~|8`HBNb(*U-=d1wwqyeVrHqzVG(D{n<}$*g zI@tr0kh%d49^6QJ?p!XeO6?gn{)McpKR?J83PtVVmn=I3{Qyp$37otRe}Jl+?QJex ztbX0uD;#M)0>vr?L&KEvuW;K)cC2ovy#MDXF=nvCE$PIaJ$9Z2KyY)0^rP7E?1hR= zrk=gX2kJeq3|f$c1(e#0g5-|>Ws5c5g1@J_`aYzXa0hrDt3bfhGan2 z5y3qp_RkxH`|T!$ydeo4zn2O#w%7m|AA!7`nU1(cw}H-!FbNRL!NDOoH1r*m6Nar* zXaDUmP$PHFK=5E9;Ar5wq9U&A%4m3Zw?;$oheX=wm>5l9s%B?rOSvNzn#7qD<$uQY z@JvZivr{6m)M|*qA-E_9v=lN0@?RzM{4q-c57xB?m^l~v6BFlb24plBzlE6p+Y1(g z8`%wfGq&^%h*zBYj2Oah&F&r$@m|EM5N!^Ufq}5#kp0Kb~P+0lBy)pEp_P@Zvcd8i4n; z%GiobbrW@O$A*T5>DYQB$^IW-K7gleH|AT&x&r0ECorln3z)qC8(NJbUmUqs5DPO1 zAHt1i0wrM4B{nVUUxH~8xUw+DW9?pI_qF%rWi~^CDybr5V29}GPooe+1Y}*ziB~S) zudV;NNog?}ka{Uk&wMg1jM@o;fIAZ^#foV3dN>KifbI?>3yT?2uQsSGJ}H*__|KJN zjFf;qERxdWOzA7sf0W2?@eImHeuR8xjDNg@RWrnJUa3yQF9O<uA(NM~j0;e3QZ*dw_eB?1TCH!**{FEUv9iJVYXoE((BR&XJs+1JXwe0G&H$$vE{0VT61$G9$BN>%xs*@H)U;ORJo`6HHJ3XXgAI zNrNummNt_>JTuq_U}$3iMRZ1YMOH?}e(Q(+i~C@~sC^FBg4Xqslk{^@_r(U#%)=nT z_xx%%sM1^td#&mE{(W&1Tg5*%#dsxhmTc~clj}jb4-8ti4j#goa-!5P<9rx~%ikBM zqbjeUz(MocX6T&c;bzQN4@iGoK#iwvvC`Ugx{U}b%t*2aRVkyfbjp7V4BmiEcO-8= zQRei!>~GltYBiTj6b@G>WNUx+86-&h)R((KCeI6AQ|#v#?Y4BuhPO0{+ba#nlQ0@@ z{@ZH~IInqLTTo}1%6cNPUNg3LCSWL>QYaD-EaT%Q=@32qi=ConfXBPzZ*-z~r0%{D zh+>5Sw;!R|O3=}gUG>ydWPf_Ja(tw|ChL!I>^Y2;d2*o2#byi>J}<&t*z5I(L{m4g zC(?Jgk^xDhNmL1`egH&#yB`T`q4fSou73fn3>(8H%6@5m=ELV#mfGoC7*NPg)5>9# z_32HadK;7FzDGn#>JM-)SqfK+j@b6q`d}0Kz}d>< z$*#j z13LyNHBhoVLmy*gE#kgPOH52$=t)~S37E~ZFlcYx@?bdSPlG85J8%7om?zh%>eNDj zNK4ess6Kq{>a60)e6!Y_C_g`+6>Jj)v*ghZ1ISHGUB1>QxW36{M|=!jjWQSr+uYj9 z;gcrf(lL(qLt!JcD&_SMNep#;`+s?~C1e+l4DU8?K%p!Pe8t~LHWHfKy@M|;WScMa z-tx?b3ED};<9*qYZoFCuhFU+5P2A=w4?cLF+5V!gO%vn+pWN7 zbR6}}8^|%8QZ`b@5&FYVb|Ozd_+__c?RP^a^7{|qOwUrXoKXB4E|A(sAiV54@eccT zXOgama>N7i-RT(fBj{9|hNap5YYpBwOa(e}@?5zxkXIj^kxd>icem%XvTPu}G-RP( zii6iAqXxOKRscuJAL}Jc9IN@up<^S)HLJ?y?ID*HjUQJ4dA0E?LLzIHa5&3rA zc3`9uH~OED-?2jPcT5+*)o;~KEdIu@ z?z-X96iLcv8-T661r=u2YlR5~hdECF!!-QmDMpdr=$qO@(@H47(m|QG>i#x29C?qK zT1(VTyK zk#PeQfx}T7{a8&S@`iO3Xnpd9bmmB){NRIBpvL%rOzcdzB~#6qKB4}e@mDYZ5_C#m zS@nNv8e9$Ag{d19K!U#b8$IrBPIBY#B7P8_ShJt54OLoa2y96|DO!>a!96xY)Sa;Z zO7(3-NB}?zS)3^pM&|oV(gh!_`u4vwg8+a~0%T!U8_$mNR@;tD^jw~ZI39gD z_r5-Fd+jcvfEr4Ie!OUfW<&ytGO^~il;cQxv4c4tu^P{*{}9?FtdgXRjEor|m$ZE8 zg(YRr#~Tmyh}TR(=i)Bt(U?yOyZ$&2J(-Zw(o2lY%=ZK%Aa}cUu(O6{@Hh*b@F!v9 zqU!D)Ifxy;PLYah3d3#aRJ2Gv+4CHK!b7&7Hrkpc-F*dbC)l^L-p6?#T%2Z7wT&;p zD@ex)eBZnymsav;TBziQXv;0}YhfzL@S4B?*HFk?qx*t_1GG9BI{S0ltNL1ki5Oys zaDU1sJ%%1V5!5JeoDMjO_BAF1ZXWU0^2lQdDJUkRR(c`4+?Dsjf7c7o>XCG5DaKb6 z36F8)ztLj2z=gAy5AL?SLDFwn-OvWWvq?b|CouKq4K+~r=|C>CC?Rg>GY}xYFLTw} zs?gAfSNd^(uT=rFz`hOZs^wBB7AVAt3tWx<1vm>X~e z{;o7Y#twe&8KA~)IEderR0yXC$dnCw2C?zZ;r3V*$8~Pi=A06h4rpXOING%kuW83c^twW&U)i*tm}@P!*&46BTu+y4CnPgs4^@JYZG}?eR~d!{OlGqu?oY_hCI{uU_^hED(W}2eZX}FFho$_WB79Z3=v|4r?UX!65UweUW zWt{r-`IODVwQV|?=p*+bILx6?AK1Oq2HL015ZgnoJ7T4N|LoTn_qS@yuhSXTdkum{ zsOPIvi|7}O)e%^=%7D>Oj%CfXW*=*Hq&tCwmklp9L>(LNzndabElB=wUb47C+5{eE zzON{GJzqes&u8gLBXHFHAXMYXR2p_{N^XdNEbkdKeh13$oCc}@UjB_9A=>NOA03Ca zBB;@1aXQ*aU-oX0uh&?|lHJ}^@3*y-G|wlndsfP3)fmS`;5`<vTKB?9XWjM-ySha z?sTS!Qz9UZRz#!LADQYfwA7V~i-uTJH)#u!{gSEqN?9 zuMi?NjpjoBSt-aS*t+?ao7`u%gsNSS0K>ql^PCW-g@-{QdbgWluEGcm5Teu!HAW=m zFDolUYE@2G58SuFyy3s=0q7cj6jZz&DGi~J4I)~P@bN+*^X-a!FhC4s+TFEBE!_tQ z3amlBj1mQ)$iJXT7ZC?H6r39X^e{a=R^ZN)tBYRX{`No}Pq6 z?K}u-(LZgA9BMK(HRWRrdoHLaB+M-IqTLFjlz|$T7l~gX%SX&-$Jg=qkIAneeQ9u< zZoAcw=^YonAg7FQ&2NUKNTCYgfGEAr*NifQYIiJtjST8ng;yGNgoKdCb9!+^i*^&l zbL-RXSpsUy;1w6W;#!92)!I7E`5-4gy^!8KaRq~js2>Vn06fz5fL!SFtZBX>$y-)d zR$Pe2)i=OWQ$mha3gOcW!OeF)07zbaPj2k1T20+WViEl}?BL4}94Pfz1l33VM#(Xb zAA`Br^@-4>ocP`PfO?kP8ZSM)3{hP6!}fkNNVEuf7a(a4s-1|1mrz?-!6Bq z@Q@h;hO2xxnjzonx+?7`j<@3rOH3oxKm3$5Rv?~Fgenp)gMItk1!rGm z@0s9TH>;v&g-r02sN1s6Fo#w)?r)d|>sR(s3Kq4LMyR_XNT{5yn6S1s#hI#Pz{!l# z*&iQy8$ul50ZzDc_4Uz8&*SeflkVlB3XCv`Gasq68v|0HThIHOij7EDr0sV~jeoCPxRm34C`>f&RDoYCgElyPuz7Pn%y+A3nv>S~lHM z0k75!kY0h_FLYnz3XI$K7pLCa=_aDauo92ml!*>j(n@WsfTFxHX4L5@CaD}%a-)R`Id4nMMDGf675 zHmzwRj^;0jnF`q7$*GJRs2@+QC+SI!ap5bS4c!82$FQtVL)zm(R~I}i8$eDf-z zEDYqN99#{mMHbqxN{-p8$7uBfvw-NmjRi{rdBxBqih#?<#k@m{t8Goetmh{-x9Ax= z$9QQ#d0^7MV?yBj>62v5ueN%-CwI@O4b7sQVCi<`1`zuo!SIO|V+=vQ?o)MR60Zut zK1pL%*}up@OQr%cN3vR4#M85-Kz!rSRqFX744P@^*U`aeA?)|XRr(8yvaTm6W~Yah z1FZ$&SXubnJQ+t)JUCA_&Gp;i+s8O&ACq`p`@KiDtZEIu-+Z%i zSbZCb2Odr{_w`7Sa14x_9x+DhO zoA?^rFy>=#OcZx7{KcJtN4n?>_m7ZfxUorLZU*Uj5A)a?u%1faFJvDx{`c!vltNnO z)sna7iqm9|S~nN~0C;jrXnD913Zqe?_icj&V_0JDqD;M{VO^<_CD!oev7PcIP zRD7&zZYHOgr24*m7)|A41Qa0XS}SH_0G>F+0ic9GWRaig9kfq))M7|g^8~R3Jh!cPJW^E?N9UQRC!)Ld)Ux^Jo&=7 z;ays@c%b*4G#&ic#XS^s9x|(43qIW?Mv`p~0=>7`S>=U7TJ~hWmkHl`c?(^XXO;O| zE+)Qlq*t^o%eB1rKQe07Mv!?w2N6>2}+i zl@W~c+b=~stsin&ak^FckXPL&X}eX3@A3&x^ySU9&vBRghTZLH8fC}J@Qr4l35xca zx{&Zg1mV4B8HS~U!2Q^JW%6Ks_z-xy)n*_!b*ScxtJM|f8*?#|R~?%_1pds`6ytJQksa6wxZC^ukBU#WTy_w&-kI2e5VY2i+CFYvV4sQ-D@y3=45HDieGKr`SjGz z`Fk|qSN!L4$@sJj2*{LmDglpNPhZbrASGvvfA_w@MhE|)WObxe%; z?o_u>eYi?bC83FWejuoki(&F&v2P@0N%8_5-xG{#bQs}CGPZ`*1Bcr06>>DTbUqG> z;H1vFBI-0PU4ktgqa0_x8>rib2fR5aa(iVPqan*&u2aA%=r`f5oGvC?WMORLIH}VO zq*Ua8OQ~eP8B<@A%v}0dacxeZXn^$beJ>ZSC zd}!N9E$X!yRhfvbc#$*c%@0FiOwJ#8h7zd~-&;?kicC)sEpY~k3X3K^kRhjtH9}cFG3l6P47V($6#dUZX#pyuQpc z@VC094ET{>Z7`|GqpE(PS#Z+vcBfw|i!1w?veptxy99>Tm9yTSZ670YG8lDrnkWdG zdIQp~S>qcE58H>S6~vl~Hs_w!kP{*rD-1tu+Rn0`C@*I6eL;$P`Y@V{PHDDWgiV8n zBK$OZ$O6+KH;H9sMnY=(xJP|HtYtP%;!}3RsK)rL{eq6Bx=%UvBe@I&XP6`w8Y;ZhX9p>vtS1kkCa4b-3-}}n z)>WF%!Ju-X`}=?r0YyHs%vvN^x3iMmGn@k2{%w<|9$muDH>~Rlngy&JU#Uchrb?bN zRGCY#(7@HGLfPLI&)EEqfxk5~(CcR7tKhv3eH8Z_|5KZ0V{T^9T&nI#_)2Oh&V!11 z(fCT-RcCKnE4A@2uCxx#rGMW3sPFfm8Z=m{ z7q0o>xM#lfs9i2MQ^h@1l<|T0;t5ADKK=gQ?n`o5Kgv!Lj{R(+PDwquj&QshucTrL zAkpo~n5*&dI(xwPvYDv|=V!d~ZgbV)(j?=-?*WBp-xpn#g`n34qbic| z_wpT^!ozEeLQMO-o|+Hu$84Jc&9`)`@kUeZwK#)Y(%aYVOBS5zO+&AC*O?Wwtv6Oe`J_B7e6Z!zG{woQ>qyEf)#haM41#w!wY0U# znD_c&d`MA^`}Xq9;TjiZkC9pr{?2{^xsRp%w<87sFc-0oUa{K!NGXez<1=-G!@%;I zMh7davb2;xXi&^k+{+VUuDkod8O18OZXu zQ}}df+8LFl7iLG2+rADBND1D$jY5-g&S;sWor`VG=a28AbvAkawqb;3rDa1t-K~90 z%J_CsP;@Z5&p(Hl2|W(2WxoiO%WKLsf-7M`SE_DYFH@Ap6Y-PvoE@kcQBEhd>TFC^ zN*Lbh^Ww8-BT6`YP7_lowA6Lma;1;VWHBq0ZTI5^>GmOq#J&2`pq~u)pRUCfoT(M$ zvk8cQV03i}n#-(NQdp~7PZlhG%(?k$i#j_^Z&gYd6mZX z-rX<=QNx+S5ZL{%3?*yR{I-rpjn@X>LrGjgYr49prlKwEXR1|O=XJBzK5LCcow8CG zEr=)%-62`)nCnY&HE4$@drot+p`Bzm6f^bQg6G!3z~`zD z-;F7r4C*g)Rd%TsntS#Ce(DD+XG?8PmVU?C?<}3!sui+Wz@qsOr74SRQ!h_Q zg)--H_HlI4|$}tLm zg@wtvK|OEP2__dJ^LjTmYSw<9NvUz~C@kp-b-SLS6u=zFb*yxRGu0Qt|Awu|C9W^n zlE%#ScG2;>BXglDRcmpgoA;dGP7TrsIT^F31(uwTX#dW_v2xtEE%b#gYd?Gk@};P` zL$Sc!9+lc}+Fv8}J;TK)9JPl(7}r_v?Ehrzk26@>WLRFga!`7R^M%8Jz~)7Tyqe_| zE57Ur_c3g5x{XZ+7a8`4P8Z}a_wy6GbJ*aaJ&AiVhII5yv7rn^9@gRYF8iilFV;F*wH!7NEV?M0xeh6WMObtduiF{FpNMS=q+Hlc zfASwf9s5eZggOdBjrP`=ekhk$QR6gboP4w@|Cq;q`{<_F;EK|tF37c3b&m1~iAkRo zN_wo_PCCqCs}jJOgMdI>TFLIKjvldJzL`QyC|-ZXEIUF;n~u)NK4`M}{PE2i>qw{d zr?b6TdgWVx$)a=?fk>v;P`9Dint#Uhr>f?@+DgAz%o@GI_e?9s!k8budu#EwaE4rj zW*34)e<*4@5zTBeh(Acqo43XILQsueT%>f&-unX@DO(|PUXX@ULd3rI82~P;qEJFG zRL&Q8`V>?y=;%XrhC##a0h8zJ)q4rOK0;Q(e`5h0)F^s?k2L=(dgMAzGVrL5u~xGMT~Vwl+y3i3^h+H>IIyW z%5mM|cLzz$T^9Ql?$uV(Db2!KdEdEFf#NQ#k+^lOn`Xj-swZ6^SnzH0(w1Xt~(N^S$fZs&JK zCkNY%(fyU|*;KUDi(MD79#EQ%)*95_+r6wYJYAJ3dS%E5-DzGb)2?KaqK2XXAhOaU zrK}IoYg|C8A>T;o<3u|1E(=*brGCnzlE1Y8!m9j!xz37~IVGQ+ZgtU3l9-JJw-HAr z9L(NkHSO&aKL5&HBk2^wnp#0v`JMJao7Rd8DRFh#e9zsMM$IwWxU7u}k5Ep>0=u0r z$Ja9Z$2Ny3|ODH73N#|oSn}7Q1=6xi+A6rX6#!<;^4{CuH2ViGFP1ZLgP#D z-RF6&&*reE%AO7Jk=@*}ez^Q>TN05b1C*?&E-;K2LcBdvbt4%|fSXl+v?b@jOn?%% z$V8OdhXTTx(>bx3 zzP`sc@e(;)@sGMJ@mAjNlk!LXst=!%h-w(zKZUI(N%7x&UBWlqGI)XI1c)oGO0ow$_YAZ^*D-WXUm>rpUz+CB5}(r^(KipuD_p{>o&;p#ma+WybR4dW?w7SDY5 zn!>EO(@s2Cw5|>KNMr@BfnsVe_u99Pf=$zy<1J@BT&$qPF2{IkF|4OQaU#CW2i;yU z_}n4&A%C`YWw?DXN=z~dDjs-xDQFZ0Qhd(}$r|#da=HR--00|NKw${a{D09sWRtOw zKHT3Wyl)?Q>zFa1Mi$cL=Z&Huk6kvzsn54DTIg9Kr+uEW)ZKLv_4KDxExjT)&+}c0 zRcj|oK^hb!c0OuC7gx#!wb>n^e(+0kgBpXMx+lYcJ#VYzRs9p4n@q`@`7Z&Za=w2{ zUxeGyS${Gno=b4X7{_S$MBBw!*T6SF=DAo3aReh?9^#7`op`>+t?|hnFYql}rBlW6 zO?E`|ttI58qSOa4MwY_&F{bzb&$%dH)TOX;K)z`bVU+K$G^7`o%1J97?q&xunmmS$ zAwju?ch^uO`b0@O&KB3ny$o^@_n6*!8my@0K%&E1 z6nwi=b%BFXP)@AI8yhs<)^)b2#d*)6Y( z4vO1loXtqx#|jbg$8AJtWXcs+NhjQk43QhhN-!M}WoT zg6W#>)^oy;!tZ;~i^`7bE`}TH$Z<0A?<`Vm{0lUWD^~eh)dT=Lk@sRfHdJg0Ff;&0 z^rcXr1mOHlpe9}ttn}$DQ5^L69|5@W7C^co_|8iV0c;|N5H_f?_v`njtYD+o4V@Ri zw3o(kA)I96 zFGU6ZNBH*Om{1288rnMm1#N}l?xbZ3lG&cm0fUa9-2l-%vTW<<$ak&SFa+s!0Qu+S zK2QA)J%aFXU81UL>0dJXY~qQvsf*5a;tYVEqF3NQn;oL1y*&>a*CY$a9+=nEYItO% zE)9uL4}@s{cRbgKjd)1k7N%t;M3*7#qWF!4a7IAH0xlaG`e)r2^Yez^=!^6r_|HGM zJF({7(EEW=Mwrn1`TD~JfW1v){lzyQL$~@<0l(+11;8HzAciM)w*K?y&!Y}0Z=i8p zg|uBNO*WlCbVDa*e5n6Kb?3)Xs2MFa;D~uq=7|gKRJ$QnC2qUrtEhSeH&6bhI!^{V z#+c_toLS{GqUjU{pbZiPlNfeWxgF2$@9AMU#;>N%rez=sshAxdzkOo{pi{VrZ0yU)CaQL0TiJvc8AZnAdX+3a@DtNvGh z_1g|}LApr1^B@K53uN-qo=#a8yWk#J$T$7}Dw#FsJ;FLbnpwwPW$g4Rm#xm#xGoj! z^Z?cn6m&)yDl0rh^ep=T7w*>l{ym#PXNoWhbV=7XD+(%j6b3eBdfq<5Au|T-O(>A- zSY@KUmWne=)XJF{LC1yM@*5q-7_>`4;^f<4;=^cHM|pM3M9`0r`XpgSK#H7fre2A# zjo5_3O7?suxo|(l$~yguBE(Y!9Cnk3~hKpNW=g!@VmVqV!WX zPz*&NC(VXp3BMU34Ub`J-y=MofItIgbH!d^BEzg;fSED1z&GL>$szB7L&ORD89hwYZ8dCVqe{50e1aw?Ez~0JIVNMT&jEm`0w!IIsl_@>FSUh5hUG!>@Q3XmKqyEck9vRvyh0+&iYOwamDQ}G zJMB#Rbr|W2ics0yvRyWRi|+#?+wOe1<|BJ|2u74JK!Y+8#03nQM$RgT`W)4Y3!L3e z1;_zO#K`S*XzFlct={hh$mAYr!xVs=B8l*-z#fL=VdKf6V7a0$DgR)=V~l|3YF?Wf zJ%?gZPt(z@15uuHm+`9VQg5d|herNzxn(bNJHYEJ-&}2WUA^iF{3TuEhW%yHe6XN; z+iyUyWqs`U=sMGy`GA3*I+>^f$`vIte5PRx3bs-BcvVP@m3Q)gxhB|+*nrdIXrMyf z26P^8E6&6P<)@sQ6W4&WqRD>*lu9&=2)F}gsHf7-_=1S@-GL`I;NG<=to6Tk0o>_^ zy)De)#d;Ti;z1sONv=^huT+Ap{lt*|7H0_9Tl6H}_=Hj;2kgn(cg^97w9nvwhR!dX zF8TDmv;+bqV*m;;L)RC--oT{-Hh3{D}-F*zIH@mK;R zKDalz*a2|M9&$xKr(;GZW9Ybx!0j&xw}+W4&Q~4ooBmuo za(V!z84aMnIBRjOGs61gcz$vtphn}jO4nU}M|nm#IPwMso;%{3+)}E@@E8#+mC}cF z1r|RIC=QUG>d<_Euj4WVvt7afP^_^KX3mh|gr?&aD5#e&@P<)nS}(3~jbzacQCI~uY@e5{*^6zN7Z^tmTHE0hxEJr_cnZxqY5JN$`j0B!sjE-0yfmN2@YB~w%ef8;_cq6oq!RM+C zXboorc^9PyX3R|PY#E@&_+X~DU#WjtH(VL{q6O6+teos6oF{51MnVb1_;eYTvXnu0 zT#K{Q)wi`kHj`ewQq8xl+3KtpjJ%+OW=kxf2v6?YW7svguT26Jui^Q0H5~dg?)%n} zzxf_s4E742sCIfPZ&tbJ5LbQY-ZB5I*m#vzInyHeAzZc@xzc%e0qV1vwzwWj%~i!y+lr@Lq|xiyTQ>^%W9*^>Kar2aAOE3VZ-|kkyyU(C99pAYGdh^V%t$ z?n;+jZ2hrv;%y#GQ#Uh0xcng1d-)GG<7a4kYdL?i>Q=aH^G#r( zJM+<(d~F3p$7*v*eh;J35{wO>%c737+B3y6;1aPByaMbl?z;5n-%I#wA%FcBAx5r= ztv;3l{Ma1;xgsO$76!_)jBT2Gpg~maWMSEz;n(<`g&1F{nM1dL5j;~A*r-#EZ2$Ig z$jQZMz#Xpj(C*1q<$&UUJP2(oL(l8*9?Mf#87*7JIkhY+H{D$KP~meaWp+a?th z#ATVLTd3wBh#GJokobeqB87#(yrg5uxNm;-Z~@mz2YDBMylsHDr03QTHaO3x_6yvB zOcM#aqa9`ev7|rP%SR@}{TTe$+k!-%?Nx2oS2I-Xg#=Z8l_j^2ht@r2m?_Gz-LY*+3 z_kCUWxW+igd7k6%*UMy#5Lg%dx-sD}WCVs!G_azx9k+PbvFT`uw@Bu?lJvu9?k)dP zq90oRktT_j^A*DeF4&t~nP%<^)RzVN|6Xf43FOT95qv+_E=9;$>Gq?p{3u5w;GX1s zCN6Hqs(`1Y1vq=mKz)(F8wMmW?3xc7>#)0{Ahdv6do}(QZZT$}tbdXQBQT?j-GP+x zXt}N0=ciV#vyrmq@S#P1Q!R!j@LDlIO|+e>8^kef939~sIhb15uz5(72 z5n%6hX%He3b-R*A$Rp& z`@C{9MF%OjvwpuUF76T~qo80xrty5IpDwEchO`grpovJ7_0|$lJNInq+Qj8xlrUQE zPvgtKEAgc+L6G`+E1aI{8`M@APzL&zY#p3sl*(O>8L6-@jC#N*OMEK&V-I?_?3T!Z(U9&R zmD;EBdqTNu4q}-o5f3-MB$i@I59bWz%!DrKLP|=4N{yUP#>gee6mm!znKm^ zO?}u4b30rQ+hX?W;rpy9!lPJ}X|&8t9{rz<_wT4_3eaTcP?CGS42_at%+5KLs(g6T zm1e|Q5#H}Hr5U#~+J-RGcrVoU?{b@n{~X0PFtU{fKtoWJy%lf@kC`lt&0gXag2(W( z%ejV#Y&QsIYH1Q!plnc#>NYLPt6@GoH~cr@8lBHOWItX1An5SYpwb}??GRcc)uJ2| z!w0$z*R`sJ#LwJtg62SIIQayVltzwR|Lj<`!g;9nT=$87iFq(ofZy)ZBxhWkY53Rq z{=Y8y%O!ZWUwD60ltxlakXmTT7dfvEMViZj^n#N}^ted5*{J&YM@T$|vpg_rpZwRO zotp5-{U5>`iUi(P!dK9Z&=JzH)#|JP2z9F@8@3I< zL6PbRlJ8c4ioT3K`iB#O35AQirMO^QSR}Ob1wD@|kz)n1*JMf`EqNFc!>K=zWmafZ zZ%E^H{3K-$XZHJ~CM0seT#^mHSYlNU44%I!n(2`MpHZ1UV+?HDX!w9_5Mwf-D)@OT z8}(X3JN-gEz*B*fLBg1SJ8aJ3ULcz`n$(BWb z38sa%_O}a#@Rl-jRbRyaz6@W<;XW4MnK*y#W5Hg4c`qDnc$T9-`>q_&-QdrwmhR1W zf7Eii`x!}W+R|PY(a8ckhBKdag9)(dkc>uQWVZ1W|ZYqPlW&b4a6{R z0S`*;wbF~!fY1njKS#2g20D8n=*+fA|8UJg&}AdFBmT~PsLaXk%uN5yuE87AGyw@y z$!FfL>i|>+YCQQ>4E$uRf_Q+qMIcHtb4o#4^A_XtY~fkMSQv8e;z^kp@fWk zJ7CG00r5AN6qw|1;IO)G8|d=%A1(*`FN55Rl+2_|y+FiwNlua(OsA`@PIMxz|7|j4gmf(vb~8 z!@vFOK39<4D|7v>Mw4Hl5;$ighjyFLCBwVLrdQUpRnwUOq{z+H=Ww(9)@I<8_}`lW zg@Cl*{#-@*Z5G0`OGJL9Cv(LhoaUadL(Ilw5{W26dwrOR*9X92&E1yqzo#@2V&{-h zLZ`qtN5>71Ad=$$F^1f7?<`zK26DbgSk0U?ld&P-Z;ty`-dvV|ZX{)kE z2>Qc2ut4Ed}|H_T&+>iu+yoz8Yl3v^yhW%;K z@|d^KUATxJL37W|??FnWi#w>296>wZk8E`8Eta2hbZXdrSD|?PUF#I}Z!W-D$=LWW zeNub*lIkee{*#Zg<>yK1o;mE0sS?tEExHeKj28(H$J1icvDSh#6p4pr&d`|4 zfQ)nNc)R;P}1Q`NaZ{MkuG)Bj9)~-7j`?X8K^{`m#VIlM&9OPF6g9N zJ%KJrPR+ZKqM|q$Jw!(xtcct)u^j}Isdb`y!k7S{%xiF8M$P1cjA&mBxr@Q6b^KrCdwj+~gZ@y5aGaS0iy8ddJo%R|-eDAAjArrx>?5SZiVKJlmXaIrw(9Fop}0Je?K(%kxPJ0cd{m zCaB&f*LZmZF2qo{&nQQJ`%R9-t+G`vWzfY8ybH01gx=-H9d5~Yu>seY%qDz=4 zLU%U0C3<)|x$n4jEPo@i+HWLFtpR;=*yhys(~Gj-?86?86~~U#6!MY6{RI77 zXCBqB^?ADZlFe^qIFvW`ytl!-L!<4QU^n)eUVT70lGgJ?yqtQNf#3dD%N5LxlIooRk86bqJId z4BQ<$*4;n1gn7Zpw(=SY?_p zyBCZAm|KD)QxWE@y^dkmR&v!QXPt~U>pQU&q=T3?0db^+2Q1r#j_;)hJd1g&5+yp^WrGy*6XVyI`xr5R=y7}l~uCZ@U80rm~Ch&Zq| zDDDlLCYuk%Wwr`Htu&YiG9%An$6PUxcuq5#V|=-)J}B3Lyn^v2O(NgiXbY8c9X(l~ zW=zQ7&F@E^JxbvxlJ>%=>L1CUTXTzFwBdwQ3_)}l0W_tHT1 zul+L0!q&z4^xqTuSj>JOLr(`bYlnZV5`M~jIQ(WWT$p)|rtU@MPTWLu-EX1YiAZOP z`jrwRWzntDug^l&HRnrzpO_VvCjHc+%daZg$?0u>rL8S@h%uK`XISbK$$Mqu3uCoi z|L4}BN6l^?3}*h6?ET63hmjF1IB%e21X2FG_D*^ZV6-ZGTr$C?|J2evZ#z1)2CIO6 z+Os!eU5HF{!+ig?9D-T)nY8H!)K%X}n|GgEK7x`N{pCF*pi)%l4K7g+YSzohinoS! z>Wc>_fACDQ8Vd#-5{*Rpp5V}O6LC7dTDLMipJrT9yFYikBe~3A$a)1#l#rN{$<@e- z()g&SiKD_xQbfWH?U9~C_nnC-K}C!HjsQujjK(5d!#h>!XN1+w#5Jz3h}do(DgRb> zm}u=*)S{8)&P$E?(d8BPM}6(Sd&)=$h4_uM=a0%0ZhWM*{>WGDHj{gK$w(?J=^1C+ zJKK-T#pv97I*J}o54xMwCT-ViY(?BRd93v7Y$blF`91?=k#vlZO033pqv295dCX)n zF9^m9N5Vov)EoIa*jTF`p96f|30r6FDo|b$WxEUbuIUP#i&&!rS`_~9WH{Zx@ zRZEvE5)W6X%nlF`GiBWzHq0=#jTEYof}awFs-6M zV9()%wARjL!79bsX`D#&8#YG^9ul*gA`Dg~cWL<>sL{?+p42x=o|UwGn<+odkmGdh zu4Rx;CeP_HV)D3|avf(-Due6Y9qTt+lVkN>^;=z1HoNO1*^7@ zDeB&{hd`m>`DckGM=kT!i>+3cy>;+dQoagfYixdf*4(9)re)K}POaI}FSSf#W9UVm zVmfLh18mkcNvUBtNAv+ut(>1 z$&eE$V^l&TYwnhIHL66N+QWn2QwHkBGf?m5d__}w$E>&Oc?v9{ft4kGG z*?q{B#a*&Mxuc2@TX7e)%UD-V*;H-I8y8n3UM$>e_y6nAOO_f~D(l@1SgJ?R> z-H?@quhQ%3S`fw8+sC5A$cMyg$a8y1&fI{*X>N zPiB3bP(R7IAA2*3Pg5`Mbmr+2Od&g|4q&Ei8+=f&v|(V^mmMkA@$+I~3&0O~*79JN zmLy?MOcF>Dh}29B$FB$n2VI%mutLIh-M;prL&qD%IZw%LZIMWHKuii1b#^-1Ng%=vmyE+jE@G@+cPq)}tT4_c)R4Y+*k zTufT5_Tg~HQ@dWt<6><$QYug5wn)!>S64E+^8{SySh|I`U;kNzxd1ALsmUYXx0%0k zaOgaK`DhU!POCGj&Vc=~h`R+Ruk`yhz~|I7bgCM&0~GZDLnzz5ixPF);50x9))o;fZU@_=ly<3+Gv!Q^u4qx>|{p z(R=vAGdSqq>}79rY_mp2mqfCv>5O8^_o-^R><2v5PnJSbHTb-AbqB4JGL+)S_$q2ImcS7u;8>qyz2woll4L6;_iuXnVi zBa|oU&q&^LF6J$hXP^5^wH)`_ba(I7A7!-<;$Eh*`nf~ zj%WBqIWkgEIdWR9^k#*>fIzrcPp@+i41gbJ>L(E9IWe=+PxOG%Fg+XspWHP~zv{B3hyQS(;s3vr)wXJ0N5F}3(nL4|i9{spE`{Pgp98%P;aS`jW?6SuisNQCroq&#&hHqN(aeIlg8g<;`aK)=^w@NlRCxkA88htO21sN`pUyNo04!Q zxBS*tf&hP9jY6buvTvNf{zdvo&p`fDp0~a|HC@}YacW_8og)JGlM~kD7(zYY*K-?T zW~z3|;B4_&2{~=PHeZSDJmn{`VDuyM?XE2xZG=(YO7Wvs1-&{V2AgSU&jMS6_9t78 z{ckhZkLzyR9CBybHoWg@dNv!}sEi#Izu6 zVx3~f&cja>*3s6kA@~&C~0e$C($atZ;3H!To_Iswtt*AuWAdPbyF}Ow292~60GwL z>N%UJi5%n##;;eN9eCEfzf=;Uv3Miub|`1Y8j=1JjOqq6R@ob)DYO=(Rnyr6Ihd4w zV-JK_9Bp25+H~=%t5C*{4o72;%Y~Pg{XS|xOFY_fz>9n|Kcwf#!rIVo<$zvzA|W8T zNd28V%cn(D=4RtSyA*E!roe>_G`~8DvZl-&pG zl&M+&S18`gX5LxEHKWAy1I6i_FAS*2FFH&J0756$a$DNNM##0Kla!xcdXMLNq@#O|{vKWXsj|n;Ev`m-{{}FCUS7y(+wDz0JgG zqDhrzJ+k3@LA>>`jxAsG!xb{?yi~uLY33GQrp52~L%BX+u^a78l|vjfB!TCmg}rYj zqvb)X?O_xh?gOE9%Z|%-H&aNin&3R5JeRg`li0eFa>V1rS-*?kR=quhdSdZF$mn4t z#lS0S9k$!m$=l}?ikHUh8O(6+hM+$iwG8r}JZY{&y`Q|5(X^C5F|%D!IP`r*NkxM2 zL+kfvc(%_Eb(vP`ry5#v{A$!jBJStJd`dM-BDidDYx~-iTZpi%8~wSeC=qD8AK9@w zdYf`^WAkE5_F5>m+3p&Hvqy1X-oj*XTLUUpq1O;yfXO`jr1AavSFNdXST*RWXOG9( zBWN}7%m>zshn|2js{9*{xVJ3K{0NrhGYP$z%P+no{eLk`66`mrvpMLiHP1T66!cUi z3^SVpg=ki(-r8_%S6`Vz(H!VuWg_Uq?b)n@K@5nCi3AdF`J8>Dc3s93DLs-`8EJWo z{X^1v470fIerq>*XxqF~y7d%4NYrKc3N({?(Vy}>>Q46b=ZZ0rm3cUJXF*9^LE>m6BKs&*RC=JEr6f0H#>qvY z)@{gjVyX~(~Z_oTD!~JSOk!}OAjCf|E-ob961+B+NNi7%pf5E^#KOXAy$;umj zY#Q3Lkpr92FWlc@%=Svf8s&)x$sXJmRxDwQkTG&{p^uqnm%^vh!^ueu4DV;j|h(`@gaA({Hd*viaZy#~~YbB&$C1Nc7Y>+36 z5(iUFrN>Ydv}sYRwMp7(Lic{kQ^r`*J=}^Vei1(N7(RDH@1;x!uV%LFy}esij4#&O zo;9~)f6bX~jcRUBi|O-?>L0}ou5s^Mp4z<37E=~+xlOC3u5&kG@Zm3?XJ`Yx&(=BW zb~iqy%`}DG?@}0_OE>_slBUZLR73q6U> zvc+FhrSX-3p|FAwRLHp-9|)#N*e<)?&YwSXgB zB59!>PZ*pi1@+)sr*AUoU(;->nI19!@X;|biyBQUN{7+Ss{AIsRBqdqA+DzVFaQI` z!lf|p1F>mh>*wlk*$oaKfSkzV#g+zS!5 z92Q^9+9)KSU|rTtMH&oZl?d(UCQpMW9OQmvK4Q3NZ2Fhk=876%%xH9VWGJ#`G~Qhv z4~tRl}vXa@$x8z^% z*vdT*GoO%q>M3iJ7P$YY(&)2BIWxDcQr@>}^7aJ0@uzL#Wyk$Xzv~)~_vKVd=D!sMpi|Z54(x;GT=cj zS0gM!!mlgsa+#G}i*3M2+6z9mYX~L8SBy@_x2MQmx?uPf0js-fxE#6umIsSFMcOkN z5vEyVU*}%?Fx*tN?F*4;kHmdR&nyHqg9SK?4_bXNNtK=TJtXG$vaX86vkF&EDb($S zRuti&ZK2by0RO0&_&NGo01h{Xm>Q=hr_%E>VF@NV^(qP&U+d$Y4qTXg2Uu&F7yeJ| z=}1i#-*^7g_#bMwyM)*{Qk!$*~KDmDE(mocvms$~ODBRfmM;p0(@Bj?|2Wzjevud{M7xh(T04fUN~BhgGb zTdZmOiL&!0xoh~V-6BJdj79;e#uoO;IuaC#0Kk@Z`M^d{GSo6`n)(<1%LGWf zFXo$0Kv1Rle2k&96vBq@RQcnjO2y+}G4FUEZBw%pV@^CP9Dg8UXLA4gE2(kiP!Bd5 z4eN1cn+?_N@Y#OK@-gZuR@RTAMTTtQ*{i(dLB{@{z`wbq@nUvr#Z1z*CaK$1DN%k$LVpCq5k=d0wAedh?8?-qs!W=9Be84~n+Ec$;=D~d#UjgO zn12ESUgIb>+f2;Ber-y}(b+Asu8U0K;S1GB(fzx|kHxL_g(F=%uce6g>0pu#D@Z=% z(|FPQX8(z*=Ij^_8VZGBd%yMfY7f@77_!YP_N_+&Nnf;7Uhv)G?ytO_V0!wgTZElR z#wohmGURjIyxBGGyVSWPuTtXX^ZvYbnbO!L2M+a0+4ESl_9bqtP{#jQXGwNx z9Ou_?kW5jKydyY9S~{g?H*bRb$>@AFa5hjckv`94hEoe1yEvrS0jZc%*vT?N;ezNZ z3HN-m9XI3C%^mwM<+Ux-?zA-#BccZIvwMOy7D)QWdqxK zu(U!jP&1KuUf|*Sf+0No{=kM^KCUQ;NU zTl1kmd^Vy@P4pB2Q8``=N80o+EAH66bk6juJ$-D%lxcVGW10E^rAXm^irekX&dbaK z;sQU*0SHk0l--GSx|cM1`|%v>(n)Obv<>R}lRx+RmHh#@5iO|W7@o`KHCJpq`8B?t zF>e_dQmcE-_UPwtwa0Qd-1YBfk&0ZYf~NMLlD8*9Film2$(G-KA|{3_vlJv~_BV$A z>}h3fpFdd!B%nQG1oi!U8Ff}oxWcIO;}pPNp2w+U!O<@{6N4T zG){FZf5^>SU%e`^g!*n$5VfKR&3R@yl9gQCs6p>adbod$n7+)cQ!VPWq=I#=RYa}o z!KtPToz`6`O?XS8^|r|pMcug}$MZVxR8p4EDHdJKzh05YZ0sKoFjQLzRfId-eYabA z$QDvlG_aQ>M3mW-CnZwhN8f~Z!yPBmdO=!gouum7uO~IroYL`nVfJx2em5A82YFrS zr)b%}q%q7Fn}xYA!!SjSca$CRDpb>i!uKNwv%#p_5jekdn^t#%wgImXt8=loO``6^P%&4k8Mh)5YGIKz#cEEpjGGD&rYM< ze|DGYBcBSAB-S9@`vAf_TlcDlFGzGk19%lfh#U*`!3GTV=39BXhqk13qwT6!6c8o^ z^c?rFuGwQT8&s>7{SKrRX1O72Nlh-gm=rU)^o0Nc)R5_vK|b3K5HtVtOI60W9W#-JU0PH7Vn~ zQQm(u>2lm@Ws9zClxLazatFh2FVt0gJa(-&`#6ylgMz04Lw+iEn0`E;`?=HN#6XZ0 z!sO(Vk^8&zM|l=YNX)XOH7P@X$r{7xhe|u5QNS!Re&R=RNY~z#r)z`p9)=MwTOI^G z0nQATX20V?m#zIa82+=_xPa%VQu8e=<`>1l0=I&>w9#47I31c; z56|!gw<6fPW9f~0zXp@WW<}+lHihKC=*SMAh5q3$$kZ$eAT+9F2XDLl=XM~!nI!V@ zx;fN15oU|3dLqrFEf^_GCe<^Z;$SW3;$fL-E2Lpw&F?Ajc2uUo=(1q>&*wINnv7?} zVWGmxr$e2246*|u)JO8O-!&rvfL&u}h)R#KR_zEeOTEpK>pVRHX4dHvf!n~%1$NAi z&Q${6KEQ|5YSdNdcz_Dp7Tw1iq>OD6K*5^Z+CWNXmdE{^#>D5yJ(@V&8b%!0eaC9Ss}Sg4C8 zN{oc~%d~GfoENPyhBzvlMVJF;2pjPeC8QOxX^sRtOp&~eSy+r9ON3wI_GicyA}+G= zjk~aL(LPgGUTckm(eG7mWAECffA{oXOBO4V1R=D@_okHTk5M2*dIN2Z(Pmzdi!K2T zOCvP$C6MicIN@7Nt?LB!NP_g>w-`KS@V};8S3bg)yr@}zwG-Jq+p9A3L-2LJrg#u; z(i$wo^e~(PC^C2z%YyO-D^`FRw_aN6W&B(lo_J)H=7&Q&id9u!b-0=p!fF8rj2gKH zM~rc7G^nJbfI0~|mi{bEAVO&4;yqTg1KB#o@9#?hVLTe{6K+)X+IX%0@3%mR7C()b z38j<=l@;P7xaR>(&YQ4=WoI8pf!|UJHm@2tjp{h$=)%UN9G(qcq-Oz1<89Ng*uya9 zV6qZ`l4yW!TR{AgU+47j^z{;qAbYGZ;dNGf4E&EM#Hh=6B1fGC;ktJV#@sZlS-kuT z>pC+!CIZ?ZIvX3W*O4*cB$>{OKN@VK)}bwjhM`aC0x%}nXsveN@Y^J~c*Q(Qc9mJH zKl5b4v^)cM+T~A|Xk5hn30|BYE-X(#m2=ZrJzwY6Zf)UqB^*!)(`2>T9bVL|V@MPQ z!B}oB6c25^%A%aWDT^7cux|%S(nFBpAY}Pi(sZyG@2PgSuUbfUC%NM+7X$3R+jo*d zt}r~h{4+fe0B70d!NTYbL5>+@ zE{}C!ghM2#00jx2fHavcr^1TIq9X#z?_tr#GJ<+69*+JAYZGp-rsVlG5b_sMRJcIj zV-Ea(FTZvIN8{yW{}qn4xXOWX=LwIS2_rxvnbZqg5hx&LUG|1A;+5TGoO*p$^>8Cv z;9DM@*}%k#ElALD;im{^JYRWQZqJGMtnkh&QLTwD*4HJ!_ku|-Z-CsK>n>;miL9?k z8c}76*%AM^i)80V_U-2dpoGa-tG?0;_vyFdRJ?B4V@=ncpR)a4WoZHzlZhQf#YI7Y z@UdWOkw`$VTNZAV>Isk=AHquK+K$;jNkPUl@d61J{D;%m>Y+;w1wg>LdoeX>Uu_{i zH70wrh%W&{5zTrC!tb##v+1wFJs_jWTHl=9htClJ54+x##jwn4E+e1V| zi+l>50s~KB*LD3C#I{3?fk2)a#4|A>9)Bu-?IEzf$DL?b_x4Q~kEI54I+=4n^VB3t z?l`h|fBqF|6d{Jfe&;>q*!tok|w5mJl>Wquc;UV69-n5JVjfyVb`epX%>CyUq(4F6-v1s(e z@pbnr2fuOs-&q9v;z3R$4iCP)eB&EZLjlbsb8a!vxyBGx~)YceBi%}*&N)jyZ!JuCo5UaR7~PxVK`U) zv6`&~S&YH&6|@R^@d||Lr+WQ6&I%A-Dvvjsh;XiToz{wiQaE%*8Y}P1pNH!pSzerkcwhE#5#{Mz5gI9Wmpeb?=yGs$YBz;jl9a_9|s=?8;wr`I!`~ZyuHE7+~;f%PA~GXHA>p{@dKvK(t2GLXh8ne z3>4kRK?o!7gguO43L~=&e4GLYu3Z@w-$xV$6*yPP<6-zVYc|QH?Kg}yn-IlgRkm|K zuh`e0SkzBkue`vIoC>GxW@l|1kgsZEek{Y%@Z(R z?{l5%HfUhHI*1k)tpV=gbEV4WDHrUVCf8t;aIZV`6t9SO;rV@u8`spy{y}3h?jW^T z?)u<+EzbU3Y#r34TaV^zuS`7`5CQ$T_kXnL$g@&O8WS=2?FDP%?T4?TDFS!hPfX9L zo=avLK{fin#dt?(%`N$PV=^9QAHVe_L06|g5N=rx6oj_grAPTA46<1GRaOy!&Aixn z4z2sn3fNPPP3e5w56`9OMvKhwcwsTm(l$y~&-+Jk%yRtsHvwY^ixB2uZ5q`NXf+Iy0Eb@KZ%=vIaKhMWadlpyB zH4MxJf`S;0q2ILofaojb$VH;d#{(v)a|$L`#_u2eYNF#vIPBO+kgY6Y2Xj7alOnIC zafM_}fYqoZUP~GZ*v^~cViI#cpEL{{6CQ!tC9{|31NZ`nXbb9|Gy6@_oO>eaE_&#m z;=s%*j^DGdx(wI{%H=CkMiFK0aAVEXq7c$~=aLcQ1M4g*(oTj+*O(ms)g@1)c5 z!ILAWm%Hv95p*$x`^<0!{%WN}KKB;_eP#I^m#vykDuER~o_KdG0Ad^Bn)GU_b>W9R zT|BaM`FZic{C|qJz*&z2{n)6g7_YC3IptSxP;usY=`;lJhu^up2K!bmS-Lq;8=uUSpsKk>S7Q08 zE}P-1kkn1D1;XEMxmy+=zG}+im7(L0-1cC6m$ZFY980IjWX!1gwrj^vlGdRvZr`%D zXp%ryJLH-rse!}z3?(5Er*-3cOFfALCZysG2nzG==`bHsMq*dO31phT{ncfZIA27f z#oL6LG%tI@aWf|9%IIfUHlm`>v|8dghj#-AuPyf}3Qxx2m~T3~j@9(~?#DD{!C*m+ zjS;oWV_iq@g~{ylyutk&Z&9gM=ubnT1FjnPqNsJY)h?Un=+q1>{uP5Go^W_CF46gy zwYyWl*t&H68L`nDH@6?H?Hn%3tN7}eyLr1h)iDaPvz6##F>6zAGGIh@Xuf~u!4agf zA^fGCTQXLceBQ-hIB3h61pB(+iV#f| ziC8M0LX79|G)_tXO1U+4zUPIYM6hvXy?!U9>3n$V&VasstNVKGLLYA<=Evz+phKRd zH__&w_{f0xl~68o5oYHw?A?Gj!Ey^k@98;jYccdCZBGdk7pH)_pHOR%XCqd?Q_`|D!M$+%a=5^sOARQ4rt~dCx3O%i^1bSZM9QS zRslm*{>y_FDy^ZNmmFVlW`A#*Z@h27E8m?W!}$~`Ov=^Hni)cU$HLSCI{VM0HE2hH zr(WfHe3jJic3$esjEuXoSbwnS&(o-U@SX`zoqeoH!#<)jJa91?N^^DgObb@Bd2IGl z7b~B)M8ZiCEAk1@9v9>yal_esPIG~ib?L2=*Su07vl~fFL?P~>Nh!VkyF200u>``m z9F(A{(WdV7mr$Mh3 z_%|FANb1PGyIHVVfNv&&RpKWcQ^f7T-K!7t&vnGO zo!FS??Bkk2QGksMX=-$~8{&Z?gZ8hG+o~Qp+R#DdP2th2ehC9l*fpO$VnCR9fLbsO z#%x@RF6;gvaG`HNZRntbRvaf-LY25>=eu}RKNQGAk2DruOZdmrO9l*y$}ZV{GJSU^ z(7(_(s($+lP+qv6Lb)CGwqtN~o7fvZ{f0vmo;Ypt0xlS!7L|pMGUIrq?$KRuNrIF;?n_n&E!m$Jw_g*!|&sRH~87SpRJ?L5~*QPbpkGX<8{i$;e|#HHRRD zzjDK1h-l6$RfWwL?9tfd77T9Mn&UOKdxtxr3|RWxWXFBkNT)7ntcV82MZ{>Q9jTLyb`Uxb)L{KhROEQ?CRgo_OXjSt`6d@$`QY9b^Y{Y7D2 zpi@UXZ95>Hoy&X&>#XR@MSJyLQnK-;4-0wx3#bYNce{s_Joa6JrX)dVq~yUL_#D4T z=zSO!N?~5Q)Riw0Y$%QIpE|q#t|+BH`s-bhyvi6I@m4a?-s(A|%m(#xp`fI&xByuP zvYf8?rKuV0xy%-85p-n97+OtNh?Ou*E@4D#3YXZ~2PC+(6IZEk^vs|Y%F4qhI=#-0 zj8fP~r04vDU!v8J8-Xf#^$lZXy-l;nOc~11L5UZ%9TP?Tq*F>Nj%8e5i8L1crv3!1 z+``1uwx~6Vr-?$AVw?p7!tJZa@U&{XLIHp&5F|NnXiY!V)T9&m#=VKSS^T|=wY09b z(?w!D`Y@ci>FxQNX@qL^69F)vc5}(fV%(DaLixtHQ}GkHp2e;XFE^YQ+nDxlQH2%_ zi(&Ts%h|CH;tmZ>SGJnzV#E`k6Ue$AfI-r*=wOv^KB-TeK={4AcPD1oFf8mIb;kM? z@iKJ|%y+*lS_kMxbJ4dS;nWKUq6#tyycn6fqIHelZy3ME{D@u1qb=0129V(Eg(}}b z)0i9OZ!%=D$r^Venqa zJ|+2|CvifriK^uD$u>(<{mdBOQb=?Hwn$;$wxPxkD*OgPtN04)O~-&I)sLxur+MrU zC8E^Oj6NCHjbTn4o85H9`=-zz+Shu_fAdSc{GbqLdUho_2D%8rEb-U(^scNOZTkZW*W%leD@rZ|D`CPhy6>!@D)Mg-@iJC z8HLc5$HV}nQD-2Da;WABqSkrlFC>uL=d$%do=v4L4Cln}%cj@az84$);RPb$R7bK* zbZlE8eWhD)Bbpr(0S|9P;s%R8q29*o7QPad&ggYvM52sdqu`cP^g;hEvn+=sS+99Q zE==s=8iy(0wsybmLAFV;7hhe&D9D-On_USDsTYx-BLbi};)!KZ+*Crj

      L71G=Oc&6rqwK7o&0S|^uJ!`MqXFA?)wY* o?!W)~e?R~K?%)5<`)A|qjU4X6%I%fc=ioniX(g!w31i>?4`QH&F8}}l literal 0 HcmV?d00001 diff --git a/v0.20.3/img/tuning_plot.png b/v0.20.3/img/tuning_plot.png new file mode 100644 index 0000000000000000000000000000000000000000..5da6fce350ea68fe052bcbc6c8fa76b6a6569c39 GIT binary patch literal 45949 zcmd43WmHzt`Y-wlDhMJWf;32qlypf+cXuct9nv8o5{fhkNJ&W}osx=_bazOnbk})s z@BcX;&b{M)y=(6=+#43_U2DxbpXXQe4OEnuz(6BJLm&_sQj(&|2m}%h0&!y>6$yS4 zP-2%2|J*i|kq|{(UH>PgE-MOwAV)}v3ah#%ZcLd9-PfA9JsTx0BH@lA%=<1yQ%Fw~ z3zz8do#pdpeMPIh`UMPRexP59W^Fj+_V2ho5=#{>Tu-;}-w{}A{@A|nt&;wcO8f5Nf< zKY!&X?f5n`FeKz+y|5u$b+r{QsF?~!YORYip>|Pt7ynKczlVSzJA0W%p}w*$0Yb<^ z&msR{bD~lrjFgFqN!d28%Y-dXHiv7%ik~!0IwP`uw;$sHn!iYDxO~=qk_Em*ff8DU zFSYImYvbjX*H`HM?asqa`0dTnUoS7ONU;b>RaMom>FLkY`?N>xU%krs_)$6tSsEju zD|28K7tMc#{0ay4riXw*K`yHonU=OTA!!#9VqsCjk;*NB1BE}C@dt6-H!NI}p(0(eh6S9cRv7=WUb-5o| zNy8*G$>Ed`I3)ms$a%g6gEJw-g>ZwIm@kqQr=HoAb}iv^$B-*MoaFR^uy|>H-r;a# z?8~z^w^2}Zw6q@6(+~5%digT7pdeJI*8QT~POhiE-pVE;+mlOmTYYscHwne1UEntS z`36gLVo_1i=%}V&?_iF~I{yO#0s<^7@(bON)xn&B`8O`=1rz&A{emW$G~S8mTT?Za z8%FSVjrUaA=`Je@L(T0@n)25yFihH7*tQCV!kZ{yKa+Kc}kKlosNzpeOorxf#b z^(2?=w6Upakmw^F9UY`IE;v)0GM{?6KSSonj~~?3)Et(&y1I`ZJ+iX4-nj#}m-6ig zyCm<^r*;c%5di@KSLb^@^Q~d`ad0ZE#*}q+H#Rr*w6p?UMBzf#$eH@x({*o1NJz*8 zJlbBcP+7VkZtys+DALkqwY9Yc-lv^=hy~ME$0sK?DlU%4b$d46o$}?&mw(pQ;v*t_ z6j-jWf{_`So|zdF8>?ZOAxU+5yhBMb;sXoPECgQE4dmN?@856DG#HR$Ne+LCj_yk2 z*}FJ9g8#O%kzPM1@<4Us>OfZOuU|J&Pzp7RfA;rBcA1#IdS%C*c(6Xg$IU%Uf(cbe}8{hi5mz+Z^cV`o~iFnZ%%fprp>nx1;YS8(2W%zqresx*xqPb&1 z+7I_`!ON%kEZyA_Lge7!z-2R;os*N3n>$_*lc!NQN3xS8GGdiaSb^b08Zd|vUgt9O z^~0;Y5$wq8>LLC)0MpggRWV0!roXdOHr|exYr!wezsVIk2>;Bhc)UgU298%hbOY z+u>wyzcyMtpCatb>$sAVpRc<&*qi$NTYmoQpb_p{w{9UFUDrlFS?cB8#SWPBsHvr< zJ;QFf&)?ts6+;Za`gUu$=H1_^8vHG~uJFjn$e0)t3ybl(H!j>oFvaa*Aw@jbWvaXi z$5t@Nis-6INvmwJ2!6v=Xkl)SorDgvrH*iofQptDX4N2r*U9eU`bZ&15R%V}gS8=Z zb93&@yqVt|>o^?`wDR=T&My_jRt98!XK3W#3-`lQj1bcEU}IzFsejAWO5}C2c>Vfy zSDFyHW{oqW67}-W=)9rnAndr0SRzz7JzVUEhjI~;qLL5czBS8+_sDeb5%tB*#6D(E zRV2qn3zUKJO~L+lpetYag)~BA1YKY;WzHo#>1mFq#v4-&!VqysCiv3>i*I#_e~>Ty zY;Dqgp7bUYm`yT$BIQciviv3R^=m|=imq<$s}8*O`TTqa zG=Jq%IxN(pEt8*qcMKZ5lK9C4g*l}k#GIX-9UUE^j6&%tEG$e-{R68(PF8l! ziqqqGyFH55$ZBA;^W5@seT$2ku3gm}g^L;Pt9n?mU8~;1ZBwaF@YRbKy(~`}pzWn$`Tu3LQ@1isNa4UQLDV%;icS` zBNa6@H6l(cZuugDz#t1zN525 zs!vUQ=}9!O>L?-6IzBNXp5Etv^mlW4Irj7U+=~ro=h>H{k0|}jDl3ov4pffm5&L1q z$;RXb3b$WT@U}H9Fk=4Pr1W)1MtH4Vkjceu^e0OHJ=Fu|-5ui%LqAOYES5K>bfj zN+L4DqS#*^AL{;u6OF+njV+w3^Q^NZ?TV)g28A=(IkpS8d-sJzd}DnVp+!Gg|bcr>79W(az3J0;hE(ZZ5R3&!0aZ zA0Mx+Ss)`Lb8&IeM4fE^X^NnfI6XV#nxu(p_rP^ohYqqeUB~wH&iQ8L%);`+>*ARF z=p~N|;lqd71qJn%BL%$co12^XjM{#7#t!Q0F$>DQhno{J!yXr><+-`itd8HleqDfm z{{6FxR_6NX7`J4HSIxme9p?1!c*=R* zb2xrF9u7@Tp6{;?EG{mVYryNLqM;df)Phy;HsIyjB7;`RucoG^A3t99f0ZKPb)2=Y z?nxHJ#Kh!cViFxdyk7|nN!#N1l zvy2qLnNYF;hB7mUwHp%>68>C-f{Td%YCa<0Z^lkZNl6%T`v3+{NabanD<37L(XS8K z;^N}4^xE6oQBz?ocII1~>XPAtFZ^;A7M7L{VFglCznc3M2q#5G-@A7YpGh}d7TP($ zN5Dm6-<|S8L%X4k4i_32utlV0W@d(jgcuVN!3il!&~t@FZ zztdocUs|JQMgnLUvpNi0-`m|C92&|XxpNIB;De#xCPKZNsI<*iXI}p4j}9R1Z&2Mn*B|ft#M5;9s<~_ zsHm`=t`$br($Qgy>txiaP*qo-uCk+~ph$&R#!U-NqgNd6{KkzNVq#*%_n>LmE&h%M z5TLB4cCfeS@cQ+`Yc%=a#Zu6*I#qMzi-JiN29tHP#$_u@A$e_O<@oIE2_4-m0p|5A z>syc!#uh2~cPNJeluEIN!OqRggPx#}{B8{8-_xz?%1cU0*0?urz}rKx{2c5c;B(H0 zwRjCWT#x=vL9v1biBbyJL^PtRwC`+RAK9h$P&1Hk-_@*h=eU;xzbg~&YH$;dWzq`{ z3F-Or>`C+W5*RPSn__<7^aqIDi!o(7TpB8>U%!48Y3ASN%!9%uFVFPkiKWyvFWSwC zqZ~#82&5!(`#!qd}JLPCO#jg5gBn#^un1Z9pYy@*-o=cmXh_s)wK0IhZ< zaBcmLesXbo(1Z(MQOI)O9NThqae0}Dh)9Ms;qzwz>gsB0j~_oCnx*Nq`MIC5r&FM+QG}xZ| zEZhid&C|0dedhitF|pRD^&W#9a4z^PJ{}&x_R?I&o@@+Q?>ZJgHX8!Xb`1Bva}!A* zS%Id~tTxaQTJ_F}N3kZub*n}-Y9Fh1FikJLTUA|cJzDf9OJQ|&)$?RmZ1;F?x##Zf z=dg+nZ(9!M_YDl_6dT?WS%F1CE1#IsX9nC{kTc`!S9X`blgR=@&`1@Pl#2aB7M7Rc ziFEb!>U1h?9C_=g0f|!aCNbGkzi&6j0luH(VJxVus7M|G&xeP2TXbYE;Bg!t5s_M0 zX#4WzkNKVcfq^IV^xvJv6iiokpu8nULqZBZT)Kd zjQUJi=!FIiq0|m4YO1Pb3yUKKdMcd7ViQ5V6r{YrC~x@_>5LDaSI4J*|E^7rRYn!* zxN~w2y*^FlT#)g+ZOvh|KjVil%6uOy0x^(!0&@pupFP|q$NSyrPoIFB8GK#t0E7g85NlJpsjH zJ9xyR*Q|{8PH~oe(ns%e)d^ke$pD?+<{OB&yhZmS|Bjch1F9Rda+a6B7uC-G^l60b z3uFsLx=)SQhfta?G?zpR?XefEaR7R`#h}^rR&cO;q@<m zWG(!hdKpF-7B?5bPzGtH1>$G!`_R>aCR09oq~G90f{#xdl?dl**9Ay-w}z2fD%^a1 zaq8r!C@J~D_})9Eba4Vg!py9!`TqzFy^8IBc3jJ|o_p=DM1TMW&rcOZh11^FmVFop z!-Yt>fe}s2##RayUz&8!wH6=^2uZ{BM$)iQEfaSMXafTSyQkjt;Xb5~=Pg}ghBYKg z3+4Z$U$LEl$KF_3xj&J|0j4|wG4VO{HEc@pL4F+&ax^u~%*_M6+tkUQJn1liK!ZR{h1a9-nI&~;49cWpj;ex! z!X#V8ZIpXt-`24GcrhhVDV&hI{U|GADw!WY?i(7S<|PFB79`Ge>lx@huU~J&3+w9Y zdb7WxbY>GsN=gcZr@xNKas7bznT#>Z#u;mLL5gi$bo4E1xC0AzfwQ+k*GEU#dR=Yp z>$U-HC;OuMgk9ZP!tE{A!g!VK?(y;XUTD7L2JtL^LNpne=;@&;musx8t z`dZi|H-Kd-9{doSgyflVTQ~?PaR~_{^<;Fxno-=&=dN?D(&{TlqK_P~MW(zk6WJG3 zew>X*4cuM&oqy{+Mu7FgRVT{y|*OS z#gGki5*`QU!!TDd23-CHU2jCV!J81xMl~q@^ne@MC}gR#>hGWmg}l9Skrr!hsC6JJ zw)!C_VDtgnJB!bra0Ls?pBu<2H(*U0G%Rk#u78gluSxyg^Of0c;@(E4BAw_m%wV2( z9#ZVjP&$4LCqZNfYvtu{P9t!7+!$lHP)Q5(sQ@CghmAQ*|QqM%>cXk zmZADOECk|w4sH`@5fE>n@Pq^ep2yoDV^TbO29s!VYKmmb<6HAskW&7%L-^Neaw=yc zH7|$fCMGn*I>?B(grB{9w&__h^wrW|^S8lA@U;(NoR$n=rN7n`NtBcZ%1 zDM!O8M6x~44j(*taI`feff^kf3w^s5+UWHa%-F}H{6_028WfazwCBjB zYh5hQtBU_So6V2k#>S3(eDDOc&Wx-qE~`-%Z*OlGmm`qDfB%;L?4EMlU*rz7ue*1t zcu`@XAA?x;)Tjk&q=7yECg^Sex)$uKp_chrNOx_Kj*hO(c1C|!Q%%ijAXCm& z+@kp=k}ya(HO`x5<>i6hFy8iAckt-}O#$i=6Bh@nGl!pj@R8e1T4rXn>rIgqN)723 z0qEubDQ_@2hy^@Gq5OAsSs!oz0ci>~6<96X!{Cn}(fH@f2^GHZ#}$*8c{s~9=Z+*6 zg(;}joL~~kUF-Yd%s}LzK|`P9w3)1eW|)(gC!-ED{mJ9UQ}v!iGU%W$*v!<|rKP1M zBuoJ0_L_VFp8^%dk#`ChFrdZJnYW3@%L{Q9J#KBiq!fN_xT#ma^J~K*UwXk5r-JDR zU?1Zl%L}0U(EK=5)Rc2nEiEmj!m?ft4GjUmo35}X3Tj@PX}DTwkMf>0`SI@7$;k;n z=@v|Rkl5VM4y~i6Z~xgv`}m76U?x_I%7X9W=TDJ29O#RaU_b2c?ZKU0&btSr3>WBD z8=9GMLxp9HBgVnmJwMqah#iB2s`u|li5q;0i!-;iWp>NY&$lu(+%>X9O$9)&k|i&f#5*)R zTv5;W6X+Ve6nN#SiXW(}J3GH{sH|Ujwp8DuW5lCQQkh<@VQ>VljrLz0KuAS`g~ejj z5)7dJ?D#lblFAS3eN!Blt-hF8bE6M3k*AxMR$OHN(PL(yr{26vo6xhM?Z`+=6SBMz z@^rY=P*uHo`?fOOrzaW(k}l$Xxw-W1I+mn3fuBBO)i|AX`~8yJWFO43(O%u!>M<;i zpCbR~C^6Q~cLRkow3()flDhCbyC_TknDkFkvsYWyIgS_t-ZEJK3ht%uJ$UG4gY%tk@W+qr?MGX_2j_yO#AICLd zlv(okX=IP>h&VXP+u9@r1qB(36GR#CWI{JM5=u&TXC{7rjeakZT2{2ssa3XcT>WIo z#z&GtfZc8>_)dDoWp(}7Hz{cVI#$k5cXz`!vH(YsM&CueWg*o}SEK_71G5p=YXA^H$M4_j z1-d8jBUo@0e)q@(7~KbF^W2h)Ar|srh7N+jz?r`4)U22rWk!cwwJWv+%7s!6S!31nT{0CQT zfUZzi@@HFv{T7J=^E)CUB7_sEXm5XCH1$b@>Fxef5!BS5>^lh)z-zB`!73=z1&jxx z5guvP(Qf=Yq9hJg?wWoPwhoSycA!U>)dxIMkSy$`u;62E+z zhIa>V!#L_%0!Z*V_?`%)_zR!QUwG?K3me=IasfCQ7ZJb`f2(x=wp46vtb~{txGTj) zMZGO8nf?yN@T=n?c71!%M}6>-nF`4^W@c5!?KtS@K`^%)JkQjHg@tdQz-eU!7$KS+ zU0v5O4iqgg=YX+elJK${w_#Pszq0AL)-&D9YTAZ%BhcE|V zCMhlbC_+;2yQ9w;Fay*P#rEQo>myPNms~FbpII&*Pxfiqb4dnzSxiCu*S#q%TUpl_ z7F^W@Mt)y?xUXH}@k&dJp!g*MQSv@#;cHfwsfo#Dm4JZ2C2;Bf(3@Xp!9FsSFfcx! zd8wd+Z@fOg8{Z0;>|YjOsF8<r)2N--q|Sr#T!o`bR?bzH6wdVSkpGgX#t= zrKh8V5;|(bO>+|Y4Vfy4y<-UB6Llh4`i zGWO%Aax4*IqMn{B+3AL>#xybXz>lih+G~8SuG)Vka`jR4r-Ou%&mV=7`k^P@XjFSn zodq=vq#V@L{e4HUWE|Mc{lXRLWEB*ktor>y-pv7wpXQ{~Tlyq>{h{A4vl+?M&oj@g z2$kHx3~gqyo2Bzk*85~Mb>!8bBqn-G#B6SVj5(TjSG|=ShhQf+9W!LS_MmVvH3-Zo zp^1^sns@YcbRbmCR69sOLI3XaR+Z)9-C{{Ub?Veq%f4Qxm9Q+s;t!2?C;?UYy%H-Q z3IC1O8G`ZQHzh8bZL{t?hOV!qu^(326ZI1&!3S>s+5Cv}M}%vOD{E`h(5NZO%m-+0 z`#$x)^w7#cVsyH?^t6b5i6nO59UIMIUf@f{@A|^jR6>#zL~RwluCjF^|N7JqvchBr z^etO=bV=nTANpa%c034b{?Kw<|1n4X1|=b)ks#B7M<$l3nGYqLi8o0eZ^xpL0=??y zp}izk|AEZ1cyRK*eY;Kk$J_oLyhWz(gM!eN+`v1K8`m+(m=+>IksR(-W^&(O5eLfc zg{}D>bNl3+QX=$MON)uADZ0Oij?M&Bw8Qlg%#qhAWBkvaNj(`R-=dZrz@_jLSpJ}5 zd$3j>&?wo`(h|7iBH(>_zB$2#j*k9QVemcK_(bIuxy_LjlAfh_H}YaZbg;0@TCPBH_J~+^;up*c~5!!PEonWHHN&4kq`c`DM&=8Hueuw3X zuBD|V(Pa@RkN}naMu_|rD{ZEx>s>3V?t4CDektcYkzZN3zrPR0vk=jHAU!Ty(@@8g zxa|xAS`z%9I-ezL|Ho*Ii&hZ~_POMx2q6e|8UbFaoX! z0T!Bs^wt0sl;Zi85a3R?Zr>ixS5j6!g)B+|Of;YxbUeGOP&Sa?hj~Sv5>hBj&}L=t zWl<0KK)qE0vD&CuQ}9xpCPETKSOz@w_o^uwqgE9mDzO^c&-{t{{nuwQvz?2b*E$#N z1G{e_jxzw^Ky!o+;FL)#rJ&$_xDgi;f+6rjD9CE+dvyBLja&H=^t_U4_#va6>TJ{1 z4z$;hrGZ@*Wf-iL36RXWxGLk=Ovgt@i^jk(TzK~8aAkcRXyo0k4=Ds+_}w0Hy5!Y~ zTP+=eBbK%c1@ot-J@g^m>!L@@|K@NbJ{sB?gdW1dov|4c+5@?abGzr%HVkc3#-*i=+h1U-(0GL*rBm6er!Eq7bn5D+5YJ?@W%`NI085t_j^S90|7 z$!yCw|8BL6rjtsJCW_B_19i`}0u5$QtbFprXXb>HdZ82E0BYDG40Ljg1W&xp?;1zX^@vC1J6; zp5Er_Di0{!(7@m(k&%6&TizpNH~S;<)5aO82qZw5P$niOingHiA|NCI2n7rosKKE5 zR4sG_HT}s4(g}hU06}Q)=-m{;L`tfvj$oPZ?(RZ4h2uU6$^(3S+p&@ZYw=78uF6~J zc#WD55!(Sdu@39Q-(c2+JbaqA)XM7nW3$5zqb+WYwoha(Y|};A@ zeEv1Jk$3XXc)J|;bBfENuKWk>Po>O(B~Y{UrzrXSEHLU@A}*uC^It3~|8{=6Nz4Q> ziw>EZ^73a;hrqkoj0VOAvUq7_Wh?DNGZmQ9V*TvmOIIYm8U0!|4F+`&WK_R^+49>q zS*g)oG?Uyf{2eRd_rFf=c)%i$#en;>!3zc?SzT2HVHJoC*;P-%9guLpZX6xuCcpMi zQZP~!=?o*bufE{~q1O8QNK%Oy`mL19Pt#eDN4))4C;`esD+Xc8bEz{f2#+x`pCUrC zzP=ukC8jJ7F&gj?e&N2p$T|6_uLQ;Mrm#gm^eB1RPkI~CDY=0#hdaccJ2NSAul}mQ zoz#5j$Gd+QEz*yjUtK)`PqMSK6O&8;%wk9kwO{1_GJG0yql%x@cA~->e$LLtWdKp2 zusvm{{>z}FgG|rG${NpMiAP*$X$1MIZZTrahtH744i?!s%xrD$?i>&bcyRkfbdw6Q zyDn%(EXx|dc;9bi&Rn4Dy%*UCr5}wYsUrd=7!)cXxd;3Eg98Il+7nn_ia0v5$BY0{ zUjbbJIx~Jy^EC}d^LOIBo*y|ms{ocj{RII!Ci<4d&;%hSW>`*?7i~ssn#7;srIwE8 zOZO96XJ!QFoqlU}&2_J~n$~2;dbIiQBqADh=IavSr2(S)D4hY`mu7#d+tuYo$*Yd; zFVAGM27pNpo1ud#3UyIVUcREZcxQLFq^Jm_ot>kj!n&>`J{Ky?+jk$nhQ9)_9c<=G ztu55HoIge_Gm?_H6lnb{a`J9{)o*$_D+@o=Y2^6(i~SO8g@zAY@lNoK#=`UnefbwJ zUO+0z1g4fz%iSkW@}LX>Xd9oHF!=6h4k#B65Y$5cQZh1zv>QwR%?E`T#12SVsL!+@ z`OJc73-m?KlUC@<16M-@| zSJgATHY-jYF|oxgooj`8vfAO!ojV7fL#z*jXS}cKz-9&M<$$B#O4+uEI$JfDIEJ{r z!Ox_(zh6W;{>nBwm&x~s*`JwOYGy(A4Ogcyb2B4MRrTO~UJ&cYoTxkiXUWps&-{vv zpv5531gj1r3AraR5t9`u1f>!{$znf!Qq|Cq!2`P(Y&7r54S2iW7Y8G7qeLr$N;#?! zr2x*Amy^T6ahp6mHWq(X1&_XPs79`u-y_BLqOcJrxuHg>k~^_BmhYijT4UWxdtClL zm+0b+S^Pf5c5Kv}khnBa+N~mJaIm(r%K!EaRFOa|zy4T*A0OxHmgth)70SS6yHF{gp)yN4oG&ZZqLk#mMltkBT4O@~3=e33W@x zh20|ILz>V(pc4|T2@Vgh>vqaQv9a+Upn;%|A1yey4-YxHxNIG{U^ob$8vYW5q#)FI z7-1QA9`Me<+=UDdzla(5;QXY8CdzAWdrRN+>%Lzvkn~t)1Nyk}*aHkPg`P*wJQg!* zRzrK!>0f7OUx@4$X)=J*Gd{FtO*F0~^%@NY;lZJ751asajQ*Q0kjgEXArJrzuFn17 zcUPC3jLfym9Y--ZGt&SKAhH!|QlV!z9u0??5fr>%iP8%z?};y{EG1oBEX{Ioq9ve7 zi|Nhl*a2BQby3qV@%5$rSEAWf?eTz!gM1WVVEfX?DHID_D-Dg2aZ59H?XJfJRV-=N zrRC*nxWygG1|m$)p|&1~t^V+NDdB?s(0r4uBi5RriExJtIH*=wG?RZpgGcDU3J2^5 zZW#{NAf#~?`e=8KU8}29O70!L2xUGm^|_z7_Uh{D?gX>HF)1oZn+s@&`GmXVQdBYJ z&Ll$mmk>p0BETuMoD(?3I>aA9V-rO=83A?-h8wNyXCW{SNd&#l7dv8WoH-f{=H})i zkN(OT8XAg;`RA?I#al+*sZY3G~nR@88Ep zM~_WRD8#epj8&?F;5k}2-GLn_3V|^27eLd4dI5CQyj`y8D;->(is~zMtiCjQY{%w6 zR*yeAeqv%G-lkUTAJaD7qb8b0=ou4(G&~91LpxsTcK9z(j?F4OM6J8x=>h#(ao z)qza(nZt4z3P50p5>d4DW`9P|LVo18Z<~I1?&C3N3cdIOcP1^s038m}TRpr9vK;PwccZ&zmwH$kPZMt-w!LOnVy-Mi=Ex$?C{$=q?bpM zhmfama?)$?dL{xI-b8j73wHYYqzPTaeSH)xpw847)Wt%w4^nwA&CE3S${!FC@*4g^HB^zh&Mc1> z`)*@f65nfWZ3P<&lr<1V#6(50-dL7{01Yj`(%c;C&{VCP0}w74LhAosa{<-=CqA?O zpuUlzVcNHEa;jg(>mjeaIzS7EEX|iZRS_a-ZHC3XE`RfLa-KP|2QpZrQS4Vk^Gyd%r@%Sn_->I?5XVay(J zv{Y>DMANbicu|sHO;Gak^S`PM3{?|a7&_gEyYk=9h}8O_sFX6W+GTT}J>Q>9%M&si z08mlxJ|sVgx#I}RV^I+c{lVFlOX8)wb*3kQ*`{|I@21gt*mH29j$h@<(c!AAs+yUZ z4OR++t|2cU`Y(ZP;W^PsI^o$G&Lua_Z17>c=YgNKpqZgzf1?S&IEb9dS!jtc;MvYp zTqsS>Gkq$cK{KHe6+tZ)+fbeEU{GplDGC7xyUQ%5V-XT=tXlftQz3ytjs@y67Rf@5 ziMKIEjrWxolxs)FvOyi1@XE@{urLL{Tb9GK`y)|WXT)vz(-oGAR&9|lfOW51Z{eqbT_yhgsyW~S9xFU`0=NBd>F;P*a4$IwOq+OKd z&>YxOnc?EjiZe2_U1KNY1W?04ieoRg^XnbwuvEd(ziV>4S8|4rU6!>JJw1U*;VNv= z`)jTAQ%+7Bi&87j1ooz;dAGo*Pw-XQIyc}eE8qNyqA_|Y5cd`oX{wo9QvK!7;#Xl2{vWAJC)(PGOUpKRCe@yZ8JzBpWU zhWs|}(y9HlqBx8H2tNxAT6AYum*r_2R5CX=H^C`oDjbe<&u32kS%nW=J(g(%BnNRU zxQcv`q=Z$Jo-i`Xt<~5JLg)G-_$KciZ%!odGtb>!SQ#ZfijY!^09!LU`T!yof!*(Z z8QLlKvQsD|b*O(cRPyyTmGZ;~RrH!=x|cHAZU4&2rSN|Az}gQ>SlHX^Cn?xalZGD=Af$ET77&NG1 z6pV%NFXh|-cAOuU2$DZE)|m6Tn8Xf1xLr0kDE4<3WdsZw3-CZV?_Mr(JN{>d5)mg`5(Q~C zzicPqem$Eh*ffeH=JyN9BB!CjTc(>@`qlB-2MOj&N{C?_-Zu341!1-8C8Ud53g z)BOE!k-oA0oSr_P(GlToGb#E=MLmBM+#|}fyH4Tl3jocdR&Zy)Ep}?&jeg9me|&PF zQ}0ndI00^WX?3+5B>pnxlOXM>c7~$T8cITRf#LU_Bw?hc`pBeI;7(^v1lkP!^tAo9 zmEVTZTZ0$|>3dZ#HjvxVUp>EGz_I_fOg$!GrZfWauP`z>f2Q`}B#OlneXp1AX?OR*gdQ^h`J2QECnwbeVxd> z=`T|K`b32JNS}(ST_pASW|phrw5M01N?0ghiyuZKLYIts-^=P1lEXs_{o>-r{&?2x z)SDr{O^y_6f64L?^DL&f3>D#laudL)E4WaZSCL!6HYQSfq zBM*vx*~z7ro{j$NTwz5eW_IfHj*;`mc;9%|BBIL#+RJt#KH}VBVNUm6!6mnXRu$dX zOiOD~-%d3Y*mP9IG7T&)fIyOE&SERO+LVnm!@Xd3#oltxE3%S-EQk`;A136Auh0kY!6uh03xre$P`!5cclK z-a;rX%qc4-8si4i{G}C34E^O~!-rVAkB`sgwrdQ;C_6hFG-Pl)U|@DbxZA%P7zTKsrORk{h$Lv+L`Yrlu|cVqvR8fWbYX ziGkZPp_;++&#)K-#HQ=E+EBUsZ+(aYufDTCwHl~~vbuxvo(fIzB? zqhlhM?Uw~VLom-EacgI{1Lk(1JPP6l&=wecq9*6+@888GBpSRiN@BJmg~@dS3Cy|4GjqV+1VdtX|67gX4KUM;|h@8au!BM z6W@W-1nE5Tn;Hkm)#9m&ki32d9Uk5}yypQkc5ttuqeFwkCc!`$mi>-I7RqA*u@Zz& zNHAUJomUtTZT{+<9*DQ?AU!XuDnYF__Z87ZmNzpG8EN&}p% z2DMhV61r%3u^X`gA8z9i68>JaH|>g#PD%=C_3WK^3*y4Gz(sNyS<~3(y;mlkUuw)7 zP;T8PL7PQ|Evv3~FfcqpBLR2|0c*(K=EQ&!fRiN8v?1tP^?kzF?r7&j&PPXv(WuL6 z_svtFm7wHrBQ>EQ8au)%p05#=Z_Un<&$H}4UhLva?c3h!U=MgF{2quF*vWGuH;9FS zATmR{hmhXNSSf9v@PY>IoAsJPgFAXxHNy1CO^ls>K{4Yj*IT{xW0>^l4|FCgp0Z`i zsmbz7yA%72K3GxV+#B7xL_U3vpM~KI8*KHVo&KS@g#p%hP=iyQWw&|D?xFUZ3lq1o zO9wMD$fJhNHWDISA<+N{!x>oJP^JUBA2omxu+RM$ukZ2O;1`TMknva>%7ftmZuqSZ z*3-`GN4Rg3e|L9-QUayc0dnOa#I!GLp*%3@{M?tLLW@*-LkLxtnikS}6BB%}BSIt< z_Mr@W{cdVPmbq4XprZh7Xt+3>P@w$`C>2t*kP-#{54pXT&!h;qLl6NbCw0}-hG0Cw zy~saQfe4>pB(j$vlz@y zOe6;58cxL|=6(&XwA>UrV%G%tvuc*5Ol2}bhNcWUem_L#`}CnA}KN|JRFON=R0AMW0zpdAbpYLr9Fp|2i zLzuSV5M(ho&-wb*533)5`0#MO+n!0O3kst6FSyJgoW?i9J;PENVL-e24)#6NdYnXl zFiXLP?2b;oiRVEHZNA-mO&^Ke=Vbo*n$b~F&m9XYuJ?a}BhwUsp;KlS5f~`NcBllg z%9k%G-15JE{hFDnuBiCJ%1XD?2gF>6mIXB{LzV<^DWG`MuVPW78K1;10 zzb89G4$=%5HLwQ;E>*B67WV2_ye30Iog_`vx<}OE->HeFrgR90zyUfg2vP@A;@S*& z;9P^*vg69X>O}(7;^f3j;%YG>dBQyC@;RXU*S{$>wy@higDVh0S3dHgP1Q#TG6{%Y%DC+R~2RB6{6#xr*0UcH0e=k3P--MMDQo>O^8 zI|#fNr4j$sOK(V0`(fpti%Qq^l`cO@b7;qoklbHhVw`Z^puakmni({lXt@d@T2@pPeisR-A$qhl0Cx#g2*5KlGyC;|B2!vg3ZONp zcn97tg!O;-Xs{tt4(uHrwTEY5zr%mB0J_|krxhW3u1OM+zYHyQt6v2zO^l9WCCP;5 zPgI$A_HvVCT7 zbvZ3h2k+Ebt@;w@{uF6DT~j3#W589Cq0c2V6$s%yu@GB%_gHu!mdSv6mH&p&2iWxk z9ZiZ90DO!{>L3H>D(W%h>CD;30o{R6h5zss~>iFCg^21;kZwju(GeIsF{uK9%p zVx5}LsI~g%kJ8>Ktj&Re(mv;L zOZ!8H``W8^Sf`f~S>X{ryylwt{Y6Z*X{SGrXm*B*ps$qjNRkd~d8Ol?|* zx}c1~VubbprNr6AWn^HWqr3ZZhdm3r4WtNlaPixU;VTfjbWTwwJaZ-6m(FA6YcNUQ zJah@lE>4G${cW+9(YzJ@<)9(YK3PkL>rW$P=DPjQrY5-IDi{uCz#6CfIWQ$)K6G?| zZ6Vo0IUN1j@kw34ZM)UHsKZ-mhrtEJgELQdf8L6_g`Cm?6<3g>1r+r@XV{Ag4UgIr zA5!+v`sDHfEsG=q(y>Eo5~BmVwg=104Y(Wgk1 z-!6=*cO#yz=<(Ati?`!PGciwXOVAz08qr4s0Kh~=MMX!?(=xKQCLtgg14$WDfDD>N zDeXCSoQ-iF&!e{ZJkyE;13`rBvui9Alak6=`d}x6i;r(Oue4F6 zk8U^C2l*D7AlKAokZ;Ul>jwAJ&Ga_WfO#P`j=reT|-YyBj$`VX!Aj9Csu{o_Nn3w{zgpC0p zN<%a%a*(Q6GkCq4tagGns{QV5+5bBYpk9b}O5= zC_m%jvNO_U!^T-Lu?!Px-ye<2e^@+U0PKXo$2W(-Nf-fxTGzhX6IA1-nYab(rm^#7 zUV@EvSSdGr1L&Vd?>p_Qeo|wQW8^HDQG_V3uO4zWY)3Wsxe@a?2|O;4kS*|_HGbS28Wx`$+%lvWCB$?=_U4Hu*dPc zK{nzXa0m_#&c~o2h(}-%5InASGyy-6o7O5~$oClKaPtC6bitc3!iwg+EZU>K$}w%D z*cEv#8d|G z9D*<-%#Q1iqQE61Gf+~Zh|@lT3i$NtQ&b8cAw`D>Cx|-e>Jk%++ZaJ#8)_?c=hDcz z7?QER!fJL(is?*zcTpn7Kg*a9Jf}bE-z~ZV6-Jh}?Ee`r(+guvV#GcUCLuIo9|}JSF5MYa9^BaDLF5 zcMlE#;@NaDY>1@3hBgG&O0&I>Ph@02lsibhL9pW-s4xkiGeocadZFUai@V>*5WLgG^2hu^MKzS9u!wgsRjje&X(eun;80U0hDH5C;VbSe-?-~(;T{2>~3?;#608`}V+&H*Tv*n_|y zIYR&BiTCOL<7dy3dX{01Selv5?u#ReAw~dSB_idBwrcis)&`Q3AR)}--=dx33s)C! z_?fc0|vaF&rkP`i_<@dqQw)Iz3E{hKtTX2 zy;^nZg4s}RYud{K*W+!)fovBX)M2oV3nHaDpYJ2$a*lLEVW$Zr` zXwI)5dvYlEEweJDAH`Epa8653_6oZ__!#;99n3O9&|a>kou1^nuTopNQtS3>3p`b~ zA~rzVhcML~Jbt+bq!O>cyN42|@84I! zyw%}!mWpA$Rjo0~%2 zLpt{GGN4t^t@c5je7d=LF)LxHQr}B;4=&HYh9M$3(>uG@Q5#Ts(!vPcsmW*yV{dyK zOX+gq4m(J97IBd6*uW|vlhry=Xb|V)Tmy?s97P;rj#Xd~>$SoeHM-#7&j4m% zH_-UXD5bPEbnK2GMqxX`01 zBpNl)M(vq0yNXVc;@_ndWL6m6xy41;dkJCK{kdjzzh0nN*OtW9ybYr9`JO2q4Duyx zm)$f+S?pa8n;Ri(cAc#H9*~u_oU@n3Y&Tb+Ms4gM;BjBoV=k7abecG)>g!Uz=aWND z-TQkT(h3Uj1wltp&p;>CVVh8c2{k`A2c`5w*KGs^;diEGP~%|V0qmvmzD$(P7_N^L zqQixK9Q~_~JPoh(SH%3I^G4=95mO^0Tzt%LuIevhdklUvyS5m!@Hsi~uwHyl48}qh zg~go!n|I+$mSDTd_4fWJLxY0=<^hRQSMTv68a@R`hZ+me`6-Ht-8#7%<=Yd=t`K~i zto4~7Sg)caO1Of`zin8&kA6s{EK9E+hNW;yMZ3(x%{)~vBZLYk;t`crDVs_1$;H#H zu7u!M>q0H^4}S3GYadu<4WHh@G4cz^Xt9X=*<7 z5t>!&zj#~~(IT)fx^yrhF)7G@v;PPS)eka=>LJAUvQknIG|zp{zmQA2mRe76zFeme z;wCOSBc)%9wwkPy03CDX^d_n(cQ9;A20aam?r?AKY?nQ>Z_xTz{{Cgso+8JJc-!VV z3oR9f!jScf`P~-O1xSKfpJCCeS`e9W4U8pG6}nvpKJ$oOXu|1F*vFhB%S1aV*ZS_5 z1LzlCJ?!D3!~G4HfS>@3GT)m+D~OLOS)%xSK!}zmLPZ2~iQ({y7i6+}NTLG*zYbh0 z{{(doIQ{l~D<*9F0UBHuqVkxTSBq1vqWhHxu zP^l=XND5_@RaP0;}XDQmDPf*7QM^s^*TMRzEvW?encggU@%c z2?r(+P@+ZWM#~eZNTPr=_Rl`?qhr&7a)=PG5kN14Q%=0`yk6Cf}MiP;F_?f|?~3%PYS~0_)ve zU9TXsNJNC#DD0>FUw8)jH73R}9(p|nHa2URC?_X_8cZ9?%@xQ#TX5g|`KF{~3+%Je z_I5uiFqG5)MR+&K=!44AoXf3NJb@<-?A8XI zhpVD8KmUi;R@lHVQ|}xBID~|axrf%awgcd`;XiPh{c(&pB3v+xULsD|;IJbz)p4NX z?r$8A#OdtxMJ=z9*W^Vp6$zozu?rUs|Hq7@RRp9XO7BhI)IrM|^*k#J@BA&~Bv8D? ze4iW|f=98xB`d`kTCY+LiM?zg-wy3pMf@9BOmp*5C{|!9P_X*U2ah`F|B|#{u2r}( zkin6ZlyvP56%;=OEsKxTQr~|Xg+@g0@FJI5g*i$Y-RPGPBYZ*<)Gj(_cvmDPN54Dc z8bNVJx{sW6HCWX!sHe7e2wy##bP-`;Jax46;*ydYuD07-n{LgHkZ?@it;j@{S|Dsr zK>e}yXb9rq>G6fYRZ(Q)n15*&<)>f7Q5cvXbybV5ncyL)1jQ?>TE^%+IGTl34x=HY zm;jg%ASAP4Cnqbb8nt7U=9o38bXMvQAU!^Qd{t?tlcd;;e02D1ArJ#JsE+$z^Z5-63tYI>mCseVhd==XKYDsVgCWqx`sX`u6u0;e#!8@t|MZ`guLb!m=!VgZ<9cgvXp`!MXn-GMQ_xNmbE>kl^Giy`yZn^F9M~O(?&SsI zXt0mbTQ$YZa)&3A9(rnMVDMou4G2W~@FabZtZ&mO9w_3?ZUDvB;Qb9u2R~B#>{%82 zpAVpSfq=d)f|`cLsGD(qk&W(EuB4h3ebd=m$gPQ8W3qqZ?m7!8iqXA0I_GCl>w>Ay zx9H%6MTvuUVt$^JjqO=%tm}WFE>zcZDZDq*A@~&&7q_4MMDzrH|N0d)1+LUbzg>xy zFleYl)^ptS3=f({-8a_&2O_YbaU}lPv$BxB+p%S3vOLziyf@meAbX&z3$Xi#h6aCa zzMW)b8X6iXR`FSaF8DDKx3PpAWQJExJx@*5yKteQm9zbtc+dR2DmT3-?tTy^WCUD7 zzqW(`vhgua;WFgCKtswv6QmQGk#XbRJ$VAuyqDC~51_>p60*Wh0qtiib8jc9HzKq$ zGOQ%oR+cY^SUB0sUO9a_MEioTs>Tl9sF8_@tgI}pjB^E-X0cK2`pSlW{>+DM6Ywg$ z*~fkyQsY(;!p)7slJEc`@0RA~+XGe563g=QkE75-OPBVImXv`6ZzLzj3Uv`@*e=4# z$_f}Ep#L}^8i=3qssx3`%UFpUREcM@bOmu>piqKF5seY9i~Q2kWQiLOvy1Vb2)qJQ z$gM*R9X!;ICb&W247(C*c#OM-6bLl*-y@tI@iH9^4F|z717~h-KH>gm;uyCFC2v$z zL`0{rq6}6u7bHA*>DiEY2Qg~!TOiH{CoFTxA^IjUP2><^DOTFm&h7{wWYV}V2XN$u zI4k((mW~dnnd`n$lCDYt9{95G0D|;~h6)rKWZcijV7*2>8(t7FRCO7N+wCIz%m9VM z*oQHDbWzYo0co0qIQaTMlBNZ`pOHCYQ`A3i)1zY7=@b)GEBjQt+pf!wVM9-D=+h z0Tu@tUZzOj4?X~T=T{~*w0X3nGU@73Ncxe0%?*&gnb`>BkU~N^w|h(eX55@h%Q_kM zL(D8am|QjTdp7ruyBChspXS>evSY0HP4DX@8EM5kE0Yd`Q(ul}A0qoKipr&+AVMvb zdrBu&7okG{=%77Q5Yo9|Vv>wNJcO*lu^@YzhraepYI8(%^as-dd>e!Z(a|hNC|95G zYqME7mms|=Xv>DdcF1yRRO*k4d4}C!!Nr%jQ#}v&+eI9Y z)ypg%HU~Uco_*O7a(V~p#6!dVO$*wG!=wN_XJ%^hM&LFHPw>P-NB8QOHHG5W_WMc5 z&HVxbu)LbOyDzI6s&lKQAB{4zv~-2Dm+%g61FFg8x!&&=4JmgX=Uw}7ud4Og)1<2- z3YT~7r9daDvd?YUp)Tsh^q~`!C(NFEM{Q>A+__V2xqL9rS?OE&c|*e&IXTTr-%wsd ziezgW9T73KP>3)a#6K-oIFTiusX=ZTIue|=$QGj|ToY$i3n`e{vC2xV>DoFDObCu{ zJ9X~{(<7|Z!YmOT_8pTWryl2%F{jZff=3svd4}tB>vtm;1=rvjHIvA!@ZR9t@iH8b z6ot}xP~Jz9KeB5W;COhHQ!796{FCdBmu{GNB@BAHH-4Mj`}#fdXDF>Kn_eT4Ww`%b zxm^YX2x zbch{^Z`X8`qmSd@J8KOY9W4RhaE5-W^yka;1!(04VG4cN4E8ac$1(h4}B$!j~m(wz1B)0^dZ2=79sxL5@=i z)e8F7{v1!*%XmwgJ*;uJU37ioFieZi{l!}L$pU_Bdnnf#iz3B%c)ZQe3BI@5uL5Ws zBnH%!cm}v#(VK59NW`@nSgAw!i2yjh@YJV4mNGUuC_Zp!YH^dUb{AbYYj6C#&(YTM z(?^%Q!0)mCj`4pQXP3KC8yb>w?dKgAJbd_zWf3Y$Sa+^nQ_cEP_~s4dq;Su=(plh^ z-k&%RL zqTBgTC)TfLzfMww<7>~mEPqw{PkZ?T+iyZ9afF+jC@b~XuJgB{PCPB7V z$rhM3Dl!?Fy(y$4f%#jZZ~5$_&cjr>kD*+rJInZfeTK@bWRY6eKWnEu_jXV^FNuOg zF=VRxSGx&`EQss*{u&6e%fp;j+6zibU~VQVSMBcAUK0mRbsz9hyo?<#9cRxqR8olD zc>@FM>C^2*=CiT!PA?o(C~}de@oqsP-*fZc{rl_SQ=CMP-u)7-QzCz3bN>Ui9Rxv_ z()A9L#h=c5&mW=C&|Ny-)`GH?jh!9oJgBFoZ}&Wjk0%n8TeO1A*1u|)Sy+(5MNz`p zL4TkboG1y^sN-v6tGScSgo0B~k;L9n95kJyUcGQgmzDtU0eK zUs?C$rAHe|IdJ9;g-BDSqzp!%^Yix#8bfEd3gp4uySMg$WC!*c-UynZayUQjTLDOa2QsuEs(%^LP`v8hd+r!B!9&I-ZBRu&i(p{GPo15F)i}Uk;zN#4h?TS01 zudgpuJ{hWEadF-JP+qo^lW$rK@vb8`c&!LoGj6`r0o-8+#>d7euXvw90`D>73y&DZ zqcl#ON=tw0?RLRx6UW-xxxf;Txn&gP=QPgLP3-E?oJ! z7|S;-QMjcSGODWFam!RHVI6`d_dr5gs76HdD5s!mxJD`=u#%)#FBvuL{6i73!wX?N z+*3lnPY;QTYC3BDJeI19GUk3&F*=~&v=e{CR<5(Y$4g5{Pg_PH!6uZP#{7qyWb*)3lSK$2mI9-o^U-uxEy+FUmu^9 zertFYhy^|xo89I?^TWfE!#(l4mneJ7#X55?w(X*0c#$hgR(IB9<16s8)dgB?u>Eo+ zNIfMcLiLq-J5OD{aQIMS{rCgXW$o2DUdFFqy9^xSFCX|jNSW9b|y$)$h)s!{DDCe<~K zg3Kp{zZ+dPJ1*Pc;AW68W%D~wWLCnf2|Wr*EpiDB7r7#ri9f0Toc74I?+7?~VQhFY z+1Nc!knuEkjey5>N1$=ao*PX=_F!~cSdm-M#tzCWFCw$<%LF+f&3(RGg zdWY0s^Ce9!UViI;JTF6%nnPCiC)M3L+}-x?yNXqmTc0nH5S+M8PI8W*y^fJDI&NSf zePATf(#f&`jvEohA(X5TIO2FmCa(UWJ%kj*@!-YDIdE#E-wb!`;vI?CSIQTa|M>pS zW~i?A!tV~&U}{fuJ23> zR+bIP8`en!3JQKMtSB=HjE6sJdMSF2G^@m&P2r|6Z6H$_X(G3X$mZI6uRTttKt%UC!# z8U?m9uGa??JDnf@Q1*LLaA#$VWQjdzRmQB+%t7j*b=uiGIwdt1)~u`A?5SV;eNyn` z--zi*)B&&kw_SJ&>>m8fT*%4G^H>}WPE72_(N!h_uL%VeRfX5KGKxOb#^^9&j>My& zr%dxNMpg#yJ3JyFgaA@faqFn448p(M(2!A4vG%_#2~9=|ZpMFuDW_w_z1JTL+1tzZ zeTnJ#8ZgE6BG{up_I$^!al*Tw;x*?E+sg1K+UUqK?1x-W_&PNWgAwd4U^-Tk-yuIA z@tRUd>VoYLFjp9Gk+6{>Zu%o65yZ4S^ivksu5ow7ox6B(8Nn0iAV3FUiEm-~0weXy zv7i%rVSZdG#g1n1a1{?QT2s5hGWT`1Z+c_qe>R;=&fePeoR3=)_ zKtPZH0y;n{Rb?f-jBnzEqxgSXfR`^}6ol;=395+C*+wyf+9(uIS-A?*Urv;l9sw$f%3emadmNJz7-~S@KU3VIIsKBk=m-WcuCoD)-T)1 zd&7BhauSENLzobA3j|=Gk5@NkPo6yaKEEw68{P9KPRq<6XN?~U%G~~z&|_FTk?9je z7sT=n8D=nFA=U3;`JL^Y$;S<*IL~nRLj)45a}#*;pzBZF?8k2mqXSF>Updg2e;l~f zGo`h3d#G~V)pLipeH=`q`ubmy76R>$shQc#%nVejAMcl83`jw~9CLKT%fdlZduxJ<3Jq8B?6bFBztSnZWZ+d3t&$-tl_GReGcb~4` zexAZ;R+3%YnIJn5aHQ?hxzh}J+8Qz?_9tyFT|WM}Hz(F_DZ2SQzs1c3mdHnJWS40EYW1g$~zre=tqhS%poQ&KV8PZcK;DMbBfTshd{8GxC{TJ^dwwLwc87? zPs%V3AixofmF-w2vuvd6a9m*wod`&!=CmER7x33VHoli0wf4*{tkrif0KX zd(!!lmxH!?FEg|K`QyLsp+xBF+T&xopBHli@V2eKWIC9r(H#kLP0Pyq{$fpJuWg9{ z#a^RRU$S1C9KE#_axg_w_mrc;$wcy1a$vtG-tfo=-r5N8nBlAAicc_5y}yIu&a?P< zB$yZ}<-)iQF7naIgTHw!f2Rd8`LYKe()sf{rQC4s<9N3>Nm~1{T?~hUbUq^R3yBGc zt{<75ecU**I_NXAgI-$;C5y00^#;nr+cO2Q#-lJT=r8msWlpRm_d}Km`;+^p}fUu7Y87MTas1`B2fer#VSv|c7MY<8bDb&b7I;Nzt z*=es!B>mD zftOz%71`pW0$_llvs6o$m;m;g=iikGc};#wfGJobLTIVAGsF)dP`VK-JNuu#AH9QA z0EuQ7vwPnW{lrZ|P_>d4m?$XxBid=*_2b717{d=AG{FNSp4{-py%T|)HaPeo$ok@u z*`A&3ww7$(TQq*hOFqPL-_Y3DLAAVl>K6P0ii!#MXUE36TF;i@fI}7uQKDNvr0Duf z_0*}{3xW8nvBdy2A3J?I58MTAb(#8O+{3C+%mJWA<}S{BgtEVV>jA4gq>q7tT3Q|U z^bV>gPFMkq5@bFJJf-Lv27Vk(f4W_O?ju3x&o4KbdBqjC(`QUZ-vdP$16O?q_@*P# zYk(k;A0=T(+D9RY6-)fpJ9pMq4^wFT;81XWtj&i~9Otp#`SX9E=!IYjRUAn34W}HQ z@KE3mf^FMktJfSP<&8C_DF_<#JHj}XTAG?Hd(UvwgNebWmlv~jz+d`*$tZ+;`=L4* z&6DKr1gIoQ0>1DgAS5`(chCA8%hrZVs zKt7Y18BjljdNpgjXATdr@4=lXKx8LrZ14wsW`)!db)s zhnE%JH=Jv<5=P9PMN97mgQ^1va0?hyLpy#6n+w2Os^ViZlPE>4k+{b-qB z^#QMYA0G%2aey)I-@gymq9hH)9;}S_5TuAspGx|Tw+=rt#7L-S5B|S=PL9O?kI$hE zJV399Wre&c0>Q&$6Ziy#B)BOvf$2+0Ibj8n?jv1nJ!pSlw^_PB zdNyAVyTDn zz6#0;g0v?>=gB0i99_*lrJ@L->_s2H}9y`4F$M|sZ zTuA$q#TU@zxI7w9dhs%|BZ(t^>x>L3 ztIz4cSjl(o5`ZcD=FJc9-xnP|d6B7DM%yA>Z=E*t`l)x{&rN$57JSgm`j$s$RBih7 zQr^>Jo-z;)Ep!H|a%0&C8E^p{fuIhtMBNbRV67Vo@EZC)J7T7JE_zZ1PKZ;nBNd&diFg#@!O0A1 zNy#8XA&T@sNonbdus_oK6mLzpa8Z$y`-f)EBIZJezy7JY_Mo^+^I{D1 zo}N%K!T}P)X)(>y((;bC#uhUu&T3VB;bdVkxN-&5)L$5~jvhKR4cGxxPG202w`wf9 zZk+9OD&F?tPgM60rroya8JXl+IUsNoTyp-K-5MBIq|fq*+}^!ZYTsDTwDl+>eLFGSes{0>#?jR4N3!oI6o+YP z0&1bO3c~raa^{)5>Ey2`Ofsjbjtoe8e81;(#c?Y6u*{6j4D#{LXpgA9PhMPfgn+GN z?vii`AlTL~cKsC@uU~hxZfS{`RtNqZxTFF;*68zp2M;}De6+cCyC;9#R7d8f z7scwvYGL4`c=D`4+IC9WB`=fAh$k`Ab*9MR?4%+XV zVWi$0>UT#OI!=5M{CKMS#g2o!qtcIVDaS_pY}`d%2j~>b0R;<8thn&7LQthV3=1>C z03w!0N^Xnk@M>TNN!jg*Uze0mwZ9wj#72|ph))66w{LEIDD>N%CsMaceWkAo>EHgE z%D!A;bBm_8;`jq~bx$f-1?6#PB27~#PQrh^t@Fo^vTL>Ahad7W*~4_`C3SV*zBm+C zCH>ee#C&;uePYf(e^j^aiaqFEZEXr~RaxZMCM8|Pz_Y2#Tp=`f$I09FAq!MY*))(s zAyMnpsohb{vzPm17%e5A=^i~%cu5$!^j~k^#%?nDg*K-Pf3z=`p3Smt6icoQ*?302(NH9dE-wvs%o)R|TEEwzXJ)gDy&%wJ1$_-%zVh}oA2PQ74cR0*#K07Rgg8M=bdiRCfc7XdbH!8zh8HUn(T<0i6Q z@q!~1{7W^~V_*5)s5_jAN?>JTkCK~j+H!olA z296FvC0a2+em{)gt}~t$m?|^6NG6#7^5yJ;&G)L&iZjQ=0uMfplwOr~dsQH97YmUdK0N=u;kxNb5{01k_f2lE7n^sOI|W?*kXT8bA#<_*cCM@Lq7 z!+;KzLy_@?k=nc#{D&{)Z5g#QY>7UW)mpXr`*aq5MNfIPUk&>Db`R;4QjpFW{2ti5 zay3!3Jnt;4Od+O_MA5u7eyJ_t)85bSvR*>2cZ$u|^0K?E-olpe`cm}^M?Di06`M>i zuK?de;xWf_o71}VQ`{x%P2!2-5*2-!}M2Fe24u69GPV25l@+)Z;z@1)z!Gc6reuA(n)z4AK#J}%KCX~?wg1LG(*QTD8rE0JP_jbuQ-2dw)-$T3#MZE zJcxNIT!^2bC*7ovFoQ9m6>Z|ny zsl3Z-jJsy4)z_%O$fNi?rLAr6;XR&`6cQi!o~g27rB6b{kV>}wBDW`XQ3A4q!CfB0}J=h!{>?; zq4aM2kp_$H_PNn_tlUE6KXk5KiJe%X{FKP}pOOPB3F-@?eFHEQuuvRH3J9n}|L+(> zft8DI)9tmF(C!LTyPnT@i_t12TXKjO`Uh_MY!OLGszwq?W8(wIPJC=@gESs0l=)GEkB28Z z8*fq5G;b;{eB?Ky%sbZh(EafFk?iL_eItIV^1H15k#Nugb-{~I|F)l&+aBX$lfcU_ z?mZ}oJ8^GuuU}#uhrx|Ii6943G#5A^<8!i+d11fvy6*OBuee;%y>XYTZr`dwzFJ@= z;n2{Kpkz;zC91kL6B7z^v^9VMhv*`0hSRa6G986Rv9T%`x zE{bV*p*R7!0VKs^_Bf?CMQej@pWw!sPPUY8CzbcEq022P(T;>PrC)UI8NNC?>$d`~ zL*BKZlB+R%eD^w67M(<8rpEAU75N{H`|G{=*F7#=xZ&vuroKXU)DSv{m#=a@Hm~X~ zZpH)2NO>RN=ZBJ0>{9+uu;wTf;O2(kG0myZ(jT8RH}`OnOnCLKoMxEoy7RC5hJ}Se z!a>7@KWm&UytD6qr=<1cQUWW=r^Mhd+7p_48-OvYH!!LlC(UpPZZ=8*6#< zW{GvzA=Fkx-=OPnHI6HPx#{)1n?_NNVEtn`Bdli*x-;mzaIWC6!Ddi7)zh+KK{Fz- za<~7r0>`6=RRiIA#}?B5ow(J1fB(_v=z^)7mp&eQ-iawePcMu$96UZ)?IrSB`0w&^ zVC8!cYASz6XJ=@zZlS=#S%aDyJ`h3XhuMFDA@4+Mi}?yxR#V{oAVWhxNwiY|p^%j= zgFg+})sI8}lFG8A7O7r<3Ef}PErJf5mP2X7Vk}DR$-hmCOC0}-dzL!N0&HZ=$Q2|V zaU$WRAtg^n(1nzpfuR626Ye^&1=kDuwFhavH%!qmq4-+*^9OlCFd_G4A31TN8BSAF zdkr5R&fa`T&k|oKm*&_}eV~qhUZrBx`YGTp6mMsLrBUoN`%ch#t6A+4?J z+|kc?e*Q<~H4;)gExookOQ8)%cYxqWUq&4sL8lDZycSR zqyp=4?SR^Vy+TIjViViH=(ky|#dmf5Cvv19C-bp0q1!L(`q#8AWA807~)8HG^; z=bZ(&x#8P`^GdEg7TdQ;y+2vh%FsImwBN)eq$es``iB8(um5;XM#``Y4LP)7AU#4W z=TPv;+5dd^UwY>kC`EW(Ts>gJ1IB@vQ8c$b)nHAV4_iV)1J0S@;qATB>>RQeoj0u2 zH~PtJS!5~FJ1!MlcM)T@7tq}e%Cu}SLoxx(1AR3T(>I0BM^;wxezsnKYa=$CjVNk53qU6lX_xN@A?uUbFGSDs}1Ibzss5W_f#c} zwLebJ__giYr)M==_4-L=$IM7_M0`%9f{PXqV}|)mRgG5gWzZ2qwq#fw#wK+LGm@XD zb=DCZmdz65ZaF#NCf98YAbknepbD@0dsk+mD$`QgaCW z0fyn+c=vz=+;bcp;!wYy;ET-27y~a8k+446%mmN`QV_xe09si4C{TNx*TJyBX)W*K zU9a6FW>id*CGW915WRKAYIAllQn64faUe%7ea-HrRV1s+&mUc3$qy0RMK(vHF-i=@ z0uX-tK9B=-^}P)5)o>lPciwJO?~WL>xN=M#ByEb zNn|7rJSb2w0XGA)dvPsRO8Y|5YHkjS_}9(812{Wzkvx0GhU0R}QVS+Zh#1^OJ&}9w zTRS;_xz4xtT8eHqr#n4QE#ii$11|X=p_+=MEap%mq@aJtt3UFy$Geul@y!F3C>t?2 z$LgfPXTuVH`^ZU-eS-RI;cGSWfr6BZPfzZ7s(b~2*xmJk9n1vft0Qpq9M5eN+nJ`I zZEl|a_AOE5;OfEwXc{l5SY#|?NiGu_xXc)$S^75WYMH&a(TA$EGz4DqBJC0r zP3In3TUx@8pLQ+>nUb2C0Y-hWT8t;{J2X_=rR6?uJ?q6#%)8CQ=W$OAxtR$@%G2On z(Xgn{@gTBHSXf_I7jtAL#{`uQY|a_|Y}w0_r;xtUdl9Y73qscs(NIb+WB%UN%|dmf z$c9&|12vw^}J+uqG$wg47NPr;~ptFS4B^BC4HoCef$w2Wgqn7kLGI1CvL6&nZW;C+$xY(r>d&z=7#(XV6ua-6qeCip~%!f zbm8*q_Vht$pTOEU&(V-Ap2q+vz&KKbceo*cxhh6(YbWy@3{EvMCTB6(b?9D74pl;Y zm?r$FEDLe3!yqT>;Ou|vvF`kNM&=}y^?BaN<1@-h2?=fD`*+xHE7SiB7XG4#8Zm%h zOBZ(?@M2iJ`(VMuVNh1d58XC2sep_Ze^1FP6++txPg^4t_)t|rh71AhS=<2xdniD# z!HHv83JL%n;)USD!3rmYYgj>-{dVITeNfZol3Q`v7cARjJDhELgh9X0ki_+xyeYRKABHA??l0-9=VRVv{{!*m`={2;2t_5a$|UYA=7( zJ{A^f&kfgtp3DH%dl;D1N*vm&>|v;@3wZ(J(9t()tY|p74!*Kb50H8m!k7DOc&27Z zS-EUwBRKmBgF0whT^@GTZmpE-2>wT00U~u0oF5s!HVLF%n8lM1u2)uTWS3-9g3ZIA z+W|6Hyvi3s0V<@#N=BV{z!uW~hYG`;EKvEbs1(y}Ad7>it%S?HnNe)9JWI9&a;Ta<%tzk19Z zf1OHsdH(8>^RXtbH;!+vCM8jv4J_oqkBW_p!(`hGOrx*)4Z|4>=A>eeXjc#Byn3a) z-y^z8mNvFxhsfczmD_)JzG^(H5v-eWKz z?5G>Y96^SQ?FzrmdBG=${UJI>XJ!rri#||&Q&{-h@TR^G$7}!0g_X7b1A0?m`ucP- ziU{P@)SJ^E8eDcqoO-~&i$8+RL=z)pFjG&%kK|u?G6e)>H~-4K zyZDCO*w|cBH}_AWAHrWAKTdGJeHoPD<`?~?Z$DzcPMz4^B`HTAM86Lh1Tk0gXWd)&f*V*5l^x3%R7t?4FsKSzZQla#g;oJHc#Z_)y zc8Y4g6N6O!`vybIM?a0Ka;sV3_J&@I9{g`I9W&J)ug%5T)l*3?&ovAaWu#|JWb0`x znD^`+?>NAC!+E2;_+@&mxxD248Vb?*X7BeLee3h*qtHTRcj4{UOnuqRJZm?tXZiiC6_Gs$9s3KRP*zryzQ@#` zADNzh_TfV&_I-Z3qJ}%f!5A#w^zkEdsU4i0##o*q*y0~<^o|6%18_p9FhTqo*7(S9 zH_5N4I1aWDwyK+M7skT$o{O}%cij%iHY>1{jz>`+vo){j>Kg0gGd$W{-5T?nK9E(k z$?hZe07QBy{o4-z!Q5&{n)kv>L{E?UmD}*|d$L1ME8H6%zW;9Z@QsAcMUlX2_hGvD zb`U4c9UaJtoqtuiefFl~(%+}jwr^bQIf5EW&TE4{tMV{n^%&16_+9YxXE@l5ZMF{b z@Z=}ZX_J$Y{fc?2@w^k~EzF7=D2!4N_Ji6coBGppOGAeMGVe?-rtR38Utw7h3|hzY z_NJ?&pEv&SczZx~ESt@t+IIhSi#s#Z2S=Nmyx!>QT3Px0(T>8cK_K|6b@xnbaVBqW zjIE`Q8qv}^!?C_#M#DqTi8N*7ZAri&5#J42n8wL=mRQn^2|wCVf0Gcl=bFdo;PY9q z)M*g^wyoGwS+588%_e&@AHS^it2s~e)NbbQ8WDGwSr3>$;vk$ z8~!<)KyMH^TRq-0r_gk8!+f_*ziHR>kq;lNnfP6+T_ZrV^AUge{=V5AU?-@ksU;!n z=)~+lE6fo>lDg@+2%S;w5r>qqZAMb>=zB+gxB3*}{=Xy3@nGyQK}@J$jsHN*ufy>i z@Txc%R65Jx@{m4w@N{8lVbeSP?7mVt z-|d_tp*YX3?oQ5fKa;*um@_RDs_$Q{vhG^M2BB2<)YI!+`zb}osm$>r+tx2aI_6#H zkEeTbxGpADuK1qmt~%fjd^^8s%I*;cA)$-`DDL2Jgyj)*`6xd`qwzzsgWwW%bNWuAxw6x)Wy5Y|~q(dm4w>*zl z?I||ZG5$Tdy)I!v5`OlTO2LiB;zvgcewU_x`M2;G^%w#;mSsWHA!H8-fOeI4jEIqe zq2WDriKrZ5iG!D-SVA93w8*=pMgZ-ZGtLeU&;{pNs(-ovZe}73lYt)(f}0h-!ZuMT89ijnv{0D}B5duv?b%@X5iZ;BnXfg>X$Ok7+Y9_29D zfCGh|1SI@171tm6vL4F7?3wUOz zz`TqH3?Pvl`kZkJTn*njnK#!DkOW4tO7-v}brXtiZc_D&yVeD*U2N@Kg}cNP|NOdc zZT$i;m@05UXj&OL7@aoKrSGM1Xi*YAhQut>&Z`uecSr75sK4$kc0DRKmX(WZVPT;L zObEI$NGK85GlCTZjF^}+jS!Ov?L4#ngPri8+W1jPKNKw}u*ek<*^GJ)N%_~k zkPn-24sBsL9na%as{BzoBBNfjG<#2f5nEn_I}c)t!1}3kY9_G>uyA7h2QPYWU;@xt z5vgOemc6#rJI%1?QK4dzMAqSa(_z$Mp-=;P*F+ArD{{R%Hz-PtD>{U(LtO#(C>n|cWk(pL5V|__*p~_ zdm1%MRgAer`GGsd00sXqhwb?UC6GGgpW%t3+aDmcqjQvS-ksXgcum|@@#;_h2_UT; z!`mW$_U7y0?Wr~j?N8Hst4_YLfIU5Zs_?4PSu(+*D#x1EPCeZD6j}^rJ(yo^Xb^fe zj!vIu+v<>~#jRU9e2hq%jcn|$rXUZ&TZ0Z6%6#}rKpL-2Bzu+g-iCSxfYY9@br+2S zeoSM~6{Uv0`;~;mw4Jp5o{WCZ`%+U@Cx4rvDWxZu#~|djpWf;vf-;`>afGK*sO!C| zbe@0p`t{xt+BwISPYfML2Es&dR(t6do7FS0+w>~0NXrl5v3-k{{+`CDqL?G+B<`HT za6-5I5LOgM8N01-^i5=O%5GU(S%KRBopb@YphZPJIqk(j8)0q6`f@RqIKuiE+OYC+ zxjx-l4&27!PF&(!?KH>iNV>x`W;(CUOfLH5v@7d3hgxPQu6<`s76K$hpQ>ym?bni! zM7wB$;(+_WnYHnrUo<>TXBT!y5wjOYN8c+!q5K{kSkK2A(5eVJ*~i9qNKvY&_-T!o zm_=dt+r59cZa+I>P+^qSbm}MbFFzcxSUz%KTt2a)R9E<%7-(7CoCQnjjd=2W{%iKF*f6=-BKj%2Cub@;5t?LQQeCE5z%J036Pq zGA8p5be}izw;Nu%1kXGM=Ap&{{tCsy4gx^KP|i((-QAE!fnog%oCpugTW~x6C=#bA zX?M&zk%*o3oe-!^ieMCUuFRG2P)8fBXC?PbftoJ^lij~2&Q;uU3Zh?#GJbQndAOea z;^d&QNMV1S#E5p^(?9(!EI;<{wt;-BYC;U;FbusHFJ@)0B*$Rj#IvIoXEZd(Xai?d zKd7J&;mq}}P02|Ag|{&GDiDH{(Rhx*Y8JYv$$$Gb7z8jDHU+&!`cZ#1nB?AHt2h!d z41M+96VI3@uQgL?#h{3ospS=mYBm>VMc!bgfFrJ18ip+46Pq3M!qpwy|;73C+hf zTWdD32IE!|MwEg5Pl5}Rf`EVpmO2hLasm>0aSB^mZ=HYSiTel*!hFVJ%~1#sN}@BZ z8vYiwRT2nQWX?b6b1^$P=wvbs+E3ol?rM;c(UodDGpo7hj5h)UNJ+=Z$l4eN@4=km zt@+#`tbA9VfjDqSaZJa%zy3=7(*5$avsp*YZ`|nh7%(rm)byPr=#9p`6X#vDvfjj^ zuBXrfY^`)u4jM+^`5)rfWTCKAx@0``jDzn~$6VU0S0By4tQcK-kohWgAl@m-*7%Kq zeJRP02mV{jV(&=r9z1>!&j6hNeY!`0C`z8aNmf|)if94`s;v`>`UlED@XNqq8}I8c z{{B6*nkOYK{qNib3Kc=->EDS)UYv>?_a{iG{GScWICVX|!Mjw35UJrOuXlarf3Uqg zp0s`P)CKYQP~KEAoiiXn5U}<^2^x9m3vHbyh}`(GW3}MO(BvOHxSK_I1>PWSZG~J8 zr84qAmbRHN`Gwk^gHCVNAibp8^SI`*Qq2j1Zf9S1MuxvV^HR-S8)m`-P}DrP0?P+& z^qwLDM~S}I%%co>5SPiPdKSeVT4MTFqPlIQ@M5B8icD|K+s<7fnqllSaZB7r`!1yr z9yVN;JUJ?|VJ5^DGQYWb1JR5y*d;C>tENT6mpXID#HVpY=A{vPl{NWt!P%k~YIfE+WFbfmSqQUE{A@xI|SIWc@Oklf?E*UT_ z#xH_$bp7Z9hMty=Zm(usW=C=>4UNLajI&L1Q$zpl`mvhLilg_g-bsA;P{MJ+Q#!uC za4Zq4NSpDRwS_?VX>0%wzd{Z3jEo%3csQypwLr_kWi_2TJUh9-6R40J!*)jf|HOllfi zUZL~3QvI%fXyw>SVBIzG=HRddr4QeZuaeNRx8Jnw%YPLm%jEg~596}``a3Z4uwTGv zo0yQ0gz*?~f!N;1oNhE#1Ol#+Ars_dJbtVbw%1;sXGpIk>Pu7TKyhiQ*8DCBND~*n zj;BT2zO?GI5loc6vvY9^>?tuDU~x-PoVBg%M=sZ=W)s%#TfaK&p3z)deN5M#Epzd; z$y7Jj6lf6OVhgpwia?UGs>05iC~hds*DZgxYmFA03n{{oLJNzD0d0)FIqUv!)OftN zwZLMr#$DM-{$zG3tAqN*i)bGWTo(asLKomKpaKsff>rCHusHvLVxT-r#0}Bt1f^%i z#a+H}Lhr*Pt&W9(?5(RsZX@^jEHD;Zb4&*#eLL(PV>ie5mR=1H51)){ z!{LtKoRru~x6@ZoS9b+|LouaDwATOt7XFeD5b68%+qcE}8sua!1sOfR1U?p7Xsb1% zERX&|XF62AeJf^K#C-S*fW*Ko>|?Ns3xgS>uEG3)EnquaQ^w{FVcb9Z-h4 zHC+5r&0<_bT)Pf_ZO(aDe($f^xy6vAxk)|&4{K{vm!flneSO0CqkW_HvK-8DrfT{h zc$)4R9k&&}cD{0q|5U0j)RAK6`{1ayo!yPAp z8Gs)IjZ7U#X~w_OYjgdsd}2A{M?_!USMt-+)J#mgnj;SatVsJd$@Onx5E|>UoFa`= z!2Ju%8bND_!Q;TnN}Q|QIUNOt0_i26K3y|4Wn^adt0HY*I04!egOwgXZbM}}w%3SCSI1FpSTDis^g3M{?$|WLf<|WA(N$=oVx?~zR(ik*?>g49hb`nL zLJc`M`b1WtCqbPR_e-~oJyj`sHb&`V*(_wuWv zqhH0G6%^rUPhfh|44oBs&o_qC8>tnJj)LU;R>F&(1n{_-pIZpi;v=onq9>~MrW5I+NSZkA3Q`e_NI0#{VX>m4R1 z5Riw5H*NDY_7opr7+HDw%&aWHbw7UmK;8Y&AQ?ah-aKRQ=)%G_GOVsj?AETlg0Z`T zf|lSeD**k$DEohq|6Brm6P+HkuTUi;MnCN&WH3J%y7y;Aky*K*| zA0r5avl-`l|4orWpM`(H?>LP7;eN#uj54X7DMGq}{7;10#GWoU8bPx=AbYetd`W-FdRGg@1l`smcV z)w!XYAAfQJhUu^NFe+NJmrcN83D-_XA~QyrUHV#K4+SsaNj__lE&Q0e?(Vqz_~;gr zY^Srn9M6rEHM6lnT$$DUZychfx4J)<&^>BE#Zck;o0kA)+U3+1kOqJUtx6a}aRl?X zdUp|Y@f1%tY+r|V0jlP{5fnghjh9U0*AE;#_!KwV(N!!X^eKHyL#X~_IbPwreEtz` zE=aTc*WTg{A*KV&0)|r<8DWki)$ZL!e_?uozFcnk2P8tm!qnv(0RcPF(BQanl*L4Q zscY|ufgFo2*9yC_Iw3F|u3lPRrlX-bE}9$&dJ2SFkB1GCYW{-(D$C3FE4t=0$b(tBOB6Fijf)?-OHhMhtSTd*`q^(5PvjD=$4LCG|9>*9({xxW|pBI&S}$ zQbRnZUJ-Z2uhtGsb(U(@M)En@crYN?0Z5D{YSoNaF(Ixm9IVa-h~xothsvY#L>yr^ zo)zeDLnEU*X3?0m2RIMMb?=KX(2MBh`dq2L-O})y^zdM5-P|X|G)W~1x zva7i;^{TYu#hS;Mqufw`C)iBPG~ZqvY{658ldySP@AWb(iG{4&UiQnX0S(zS_r^FI6{N?bW~x<{&!SwnxmNZfTNj=O$bsu~1GM>iAofEx z+^-kM0&1_p>c?rw#u>JklXEMkX5$9y-*RkG$Wd@NBSr*y#!>(xfCG6dCb=pf`dLTG zkoV#uouG;5>R9QZXX}vt&b_G#95lPDy_TxTjtEoG&^X<>gGBBL$MVhBKF+I`WY3`OA6{KLtn!;99iW8k(9!5$OB(e(tAel)w*kOw^k}P>Ut+Et*_yf_no9BIdrM zZwFNea3)#NP2%yIdmCf{=v*0dtHEsIYa1n{^5=zC9;Xv96b;|H9p$~Jbh#-!i0G|Ao&`TY>U>mWa23E~Vy3!?;w@M-Eem4}N=sdE{GL_W zBxWnZ9U~x+<^4@=6`JZ||Z%lW?<7f(<6kCdr$r<}_{|HXOrsq)ROjR_Sk zP0e9hTtdj)a0f5u3YC^{RI@#Yz3-1Ev{d+&YQ`yjq7cbUtvo$)D$&ohxMq!Jy z5REf!5sPcs+z!7j+0a0L|5_+#gZ&yu5L{*x@h2yqqbxbBb*yJI9+HD0gJ}~~Delw? z-w7^&SGpPkGw%)jOh@D(=hutuyE-zlsl_zCJ9HjrD0%NfbY_#P3ukK<%V z{(*u<3FCotoaj$c?R`w&ywL*Q+mEay&LLi-Uy5@qAkTmiQ*WU z7syCdt;t=X|0#PFLzQ}ZkX`cQPrZ!aPAdO2MBBIDtgNW0V=gunVW!f0%zBJgG)4aq zbb~lwb}@9Kct%OHvtGQe7gyjTj9W zCn)3FIYDW><@&8F>55U{4h{u*c_K#~db+!XBD8;Q8R0$BQxL75@$7ofx6UJrKdBd` zGImPWry4}@=jRji@be0tlPj8!;lsZV_b#Es|C5;psq*h4HUjEo5_0GZyjgj}|MxrS zy>)1b9YX+xxg)V-IHK`scw{8dXxIBsJS>l%r=^u$c@^EU7n_xliAn5A8L~|@WP^@# zuc;$a1S5rsv&Tc)Kbd$y7Awg{^uhaH&49P8a-o_y>Y}w3IObZ)cTxiKr_LiiCOB_- zko;b2r#Wh7$%TJf1Onr~6B@_#k?G88E27NoSPnP#7pVO__8`{@LGLP1dSaJ7P+a`i zl6L&j&QWoy--DrQsj>jRRk>NCb5R>xQY!tr8h$bZL)8@kGMbq^*Wh)lsY7A85va03 zQsK?5efl&In)&s&pTvC>PuQSD{S)E-OGOx{RIo-GTw+Wl(gXfzyHTn$moXiY?8CRN zUNhYH_C6Zzf9RzF_$(Ng=?YfeojRwZLoIjY;KAedQBz)0$=S^ej~eg{kz3N+s~|7W zXsd$3(>N~<#sQzTR}ySBQFn4FlVHW5b;`@TkME#vzRC zas4~}RFylwu<*<9uPSgD8AMc16S-tHUW1=L{XdnRdpwkR-^VYcQAx-aVjZ^FY==qO zXmd!~y5&Z)wXMpqDMd(aDHCE9?NY5o2ZqW@>aI3zbXdohS#8Lo?VvC!Z8TJy=JOkg~xL{Zr=1scMqc4eE4waR2%(d?ASGp z#*T{x0TU;%EZ7m9midtgcAc``we0&rK@ysKANd~CqBP~dwK9&^){Eu+uP1vgme z?w;xK5X+aO?32jPfN4)Ot>Y*2PjBfYwQ0Uf2t20}i7HCR3Fc{Xy7hUMda}ALn>RNr zpB-INNC5$Bj1H~4yPIPVmQo&;LR#(@%a@0Sh4plFG&VO6l{sa0G6=r7XKb>cY9nM0@lmM|gJ~{hTPP_6COd+0fy|LCp1kclwP5XMNKz3DG zejrU<;5ozU-t6+F42nqZhyoS^E|Wjmw-|HdaKZkYCk9UgnF)~E!6LP=un+`CM@O6V z-`XPRbdolIc`i`1#FS>dXr^gIPBRU_NmN*LX%9jIWz&Y_ywZsh9|lzz^tl zaLzKug7473YL3R|1|%WOPLp$4Jghb{q3XW$`AESf+9gE0vgV@QrirzI=g`EsoZ9wK zp`b*cyO0kApiTL_@v$1+qewQR--FOIkbp9m2de-ok~)f+(t7fX!laGZw@;oqL-Wka zDDZv4kvgpIKK-VKupD{h@2e|qAier9E^73@cbY9bDlduZ4P-RVrUC@4izx@G$1Pi` ztJffZQ1nI22*g9wvb6;2U?c7a{=6MkurV2 z&*+c|b=;LJ=sKZ6(!f@A%UcKASxn3T{$BfW(S?G7iPNW#Uj4FK@%YITvK)D67moiy zsf>V~_4OSxqU^TbBb#)0?201MuGn|aW%jAxxp5$hvC{ebdp-7Fxl*SI>CEc$erkA~@pRo2^2eo8=DY=tD=S+nr=yI+psnx-RxZ zcUe6mM$YX&c zaZsVaYot2q{vKAnfl*hJtwh3JnaoRp4)NBtYnmVOxY`_@ zvSlaYp`?yUa?{XA;M|S6OK}Uc2G(r2n5Fglo{i`mNsg#gDoq-93IorK886|W`El-f`k!Yqy~r4iHZsVs zPFuYn#9U_;{x!oT2vDu6D;#7049I6_K42QKYUu{@-E0*wno(;9R4Cfoh@IL>#=e~hhzIs>G62EDP` z*O;c9IWtxJt@@oddU)qI%1S=S!Q=*B2a(=n!`_&#tqFQos=byM&K6A2Q@*>ljcS$C zkkW=X_R7d@3Ncxrk3kQs#REuva&MvN9?&a z=>sjL@6sO!Aj+TSYx=R{pk?vFl)Gd19X{clCD(l}KWMVUMG1Ay*$hltvH$PCH>cPd zZ@1f1U3mKRg#0TpZ;huMkM;iILPt+e_Wu1tgeWfW0fbIxR=$1xNX8^Ijxzhm@zlW0 z_Dhe=V%18WN?Qs+KG?@)^#b7)viFC+^m#8vgD@Ir;gb=rQqJx0L(^bJc`0 zaHfIVFUg6Hd3fDODeErl=q)PT(Vz3;n*^BOd&L_%p2fZ&1mq5A#wwK@7_m5r5oa=) znFr4}pICJmk#x$hGe6lrGQCx$mj>see{c01I(fK|)4O|P&s_)34*I&lBSp9%EL$on zjsizGM{c`$ekuvAYQkRs7NP%ZmKTe`CpLxG_>881r)~^n9ban>O{o^o$aYfdD{P_4jOEp2l zo)Znmdg&V$5wG=VTSG&i(uOnr0E1U9nezyx=mdZ?MtSbQ0;g0eN!pvd?59=r7j9jR z-a>_owOm#g9vXW0Vz`W(oS$zknA_R?QC}YcAp~}qm6bI=)uiin>qlwwhc~h=huxdC zZ$S~>fGHU~`L|b2wL@e;b_`!_rkx#;#6XPaCo5u`tDya ze&EjicRx;(eM$z${{05TAW*AF40i_HqMJ)K{p@GP&1Wn)AHCC(k# zX9g`Zed3LFJ4+P9X6!(ttf)vM(2(4HqQao?eyHsPj{se#suu*2NaQ4g|J=-g9-@YM zpy;gHCmE+~21L*tF{XcT5SF8mC1h>Qw87C|zX$B2<%^AdpOmD<98n5LU{;`M9oI_L zC}V2*Tkoaiwz(C*4f7~NBFgNC7u>ZJLy&*8W``em(LZbSvcn-A*v>DrY^ZzJ7yGmpwA5fGWk zERz_2`kzmc7IV;9zM^}e(IUk4_H9)tA$$2@?(X$UV1x@-Kyp$j`#q>C5sm2F7m%sgTsNB)XCY>Qa#2! z@2aWpTrX6XI|JZxC?F8>F}xQ@scMYWq?(KYr@^PwY4iy8r8LJ!EHNaVeZ|o-l4WWC zooT=S5PtEuWI%o_DFF1XZxZtXZmL z8#gs3=ouLevo54tW9!l=6_ssx1N9#Jro)WLk#7SGrDA+=$?K!+Q-Lz@R&=2$3{$gy z<~7*1%S^1DR@-SjQDGWZsiXPq+|S>G@0e-8UO_1G1tUb~huWtO4h#^)!cMTIbP1AZ z0?(j0S}*0qM~!tVNuDu8y|;ljxUY6CK%XFI}1jn=WiSM`9f{{pms6#YDRj@6?Ma_&zr&8TC^Q+Akb+pe25Dut$n zI}d&Ipbm0*bx_bg)BX3ZY@KdpHT1^sENwIU2nj~agv-q^lq4k453BmnQ>tYndJDJP zN>1gPt<<0Z&cz`XT6^WMqjvPzM*kR6Jjc!a-5O%}!s(BKgV9uicN)KQQ4`=YNO;N3 z-Qk&!EnZ(|tEimP0EzxmKfgksf_|w}+o>hv^B!%0I3^9QC69~eMaFgUu~I2pPz*f^5?r;-0{ zM-=F2+WNvE%cx%`2v#qleA1UeEjsE-dpXYQkH~IHYHje+(El>xU-acVsVPt0d zZ`+`+^1hwrR&X!}g6{m*K0gcZKUe;B?tkvX%k*~hKOW{kJ^kk_sH^-4yiETcHhu)z z9x6>RFhMXWQK3(6;76ITUsT0z22k^;$OU~TN&%`u+(oSTq%LCBn~M|m71p}=mZ#3T zub*w3K1+z9i&g>P;3VT$V<#}B0YOiDu6kpayRi?GdOf!M9j>RDJlCGKcHXByGm4Xr z{h2>$Lb$cfl&Fr(bY`~Rj#TiS)OvoiTI^77I9+7&ya7s@m^1{2sK(F#wky^plg1 zqV}ZmVFl6R_#>ZHv`PwT!p^L*Y}y=ZHGXcT^$|bxvv} z9%(DuKHfVlS^xG5)WZ^pw~n1*Imc>`X%!t8KW>i@CyxZWr{^iO22a+is1)x(CDeW_ zo}s2Y6qJu%d7%8pQY;9;f0$tVp0MD>upOI!$@L^@4W~T0_>yC=`uK40{qvIKb1x?Y zqIEJrN;{cIs15-I|f`MVT@FQbLc5jV*3RyjoPp(-mKVLzh z{{1`t{aV<3Xna)X?65$SIa3TJD(f!0Ll+8vFwj!g62`V2N1|(d;XgWt z-X}e(%m7CbbTP3O8Vc8ZF-*7P2TL?jlfv?-J^U~kfHui;*~)|=Hi*(cJ!Mn)JeFw!9>hP9256beAg{98XcgI2+dRey|l zT5qwTtTG9J35mV(sSc8HwtB&K%7bnDI7}u_9 z>c{%@sX~S12&N7VZl~YbbG4@9Nn@EjGVLDM#sg7=lU0Ty#rmBcvMJ0$S)M0c3vKST zthTE<219rP-fcqdfHM_FAub76o-HA1UOxYI?14u+M$^b@0JZZ8?0jeAYn_G@jjY-?br3#&#y5a z!jO`c#VV_omXwU?e0@IkK!p}|Bed^Dhlog;QGZJM0Q;{Qi0R4^_7yIv;oZ-;78V%S zq)J!*Xe>BBTe0I){t91!kA@`A1{KeyPLytwLO03P=G(PDm7o8~(K3VbwT1Tf__JPj zJ=X!(&Aic0oDAcy<BUrmTR<~ zwG9??rz2`6)MXD#bz2+0zGD#vc<6mO7aJgv>BEYuVkcQ|vY4UOY;`G@-xJ9+H#hgN z?e>SRF&!u3lUVxr(d*Hg1mdvr=5(#n8G1^dAwmByOOXY#6!~tw{H5pwvb_W1Qjb6I zxC&9A74`$p2FGr~$PY;a!8gz0UGeuBHF@=k2%SrLUwb=&Tfxp4yPGlM0*rD7XNk!? zMCOgwOS!@@d#hj@@f$aF++~_^bry;$l;c|88w1dNswB(xJN5KDkJ;SMx?y)74lCzs zvc6nM>7E86=2gqNlaY}*x2pueAb%3fk>@NI>{_IOqlR*W>TTvK^i@UN`z^zKJS}`L z_NV9Gfmf6ax+A+OkHmQM#__vW3EmQHuKrzAZU`10PELWM;$ls8{M}%NV875X1DfFK zP1vkgr3+qpRrS6+2rX25m12_cC$GmU$!jz{;m87V+6D*VI_JY9k>iWkycnhm=Eu&sS5h3pL^N}{ zu9*b_$8}TF$~)NnRiCU^T2xQkZ}oI<>kNOuXx5p<77Tc+P&Rcup75lyTc>@w9Odvp zJP8<%fjt_(tz4+h+XCR2`$LzCDIaGl|If_4{6cMMr?4(GhTE|(PkeEs$ z^Qy6Xq3W7TriXDj>j*)uqpYk9TuDn*?i4aAPX45j`~CaQnMF1me~7FX!XmbT&4@Gf+w1=2GdNd ztL;wp&-&xDAE@dw*k#gow7>GCFyC(!fPJQ2FRkpLt+$wo3qr&_Xy4v8E{7+21>`9X zhG4O)NnqoT{H zeBy}2i%7X1njj7!kvWmMU$d6GzKqH-*)~>)Pg~lFyvzdnu|+T)4dBbzdh~(s)*Bh0 z{ikQ~-r2Hg5^byd%l+PAaC^#$DL5$xxbQ(iy3K{cb?qZ>+@D57PzZ{%z7oKV{bd|V zO4=grr)DPUvG$B>t|-+b;q-^RzUg*d8lIiqSX{}&A3VcBWEHWDD;}kT_oubdgLTA| zLtfF(5*D*%6t+>`tMGYVW3<@cfk8ZeY(6Je&3m)44u`s^BZK%aG_KEEw3;1G)ju=Y zbWb`jJ>0h!=C;2&G9^9QbgGe(e&2r3KK<23R?(tl({!w_%twE^?^Trr1OjCaccp5q z9Mh~8I(Q17ec&qsxUtX-EnDsC)+1}(?5f;sxkIU2L)+T?x+z!k!h|XZ3yeT-(>Sn z?5FpeY)d;;;`QXq$X5)Rls8{Uh;>pWxMh9~roYe>7xMr&?-X`M)%9X($6G~%#t&3S zoLW}j@9Aw9)QB~id%UmVVH1;4vW&dE2mzy;Ig6-_LswtP-ih=&WsB6WG3&_S+!ig) z)R-zy>`CDJ1`X@H$JssQbnj)Tt9sa95B!xz=wLNx()^I7pOxDy@@|Ksq|LouC$_=o z^<{O5_fLNhirvuHr#n7x(5Cn9k5xm$JkSPbWFuh79A=1>*tfe;sa@bI4?^DV(C{>t z;zNwd@CyXZi##TRk$U`O`qzWT>Xb61Za zmq9l|U99%{NWG;A>7fkV}>K7x%w8m zUJJi@$BhfTE^~R$AL4#$*fs53VzO8|aVpM4+gc9Aon$th_Rrt%;CW$;?+|kLG#=dF zlWNTJ+PSJ_ocM|R&AH9VCUKqjmMzX&o8u5ma5;tRnQH&nqXw7f9nj`^+Htbl{OcFa zU1#&oK)Kg>~^{L^_$-* zfVKrktF8K%I9`bfl(#$fq;Q5?{gEw|SUTyr zlSZUlf&^)7Np-_hqF$MWM9BSV#r5`hN&QwDqHf#TbNl?(Ctl^zIR+>4P|`zumFO`B z$7{9IrxR0hsAi!2vHPK*fiNnjsfpA1ri#n`yVXhtJu)6!Qp@$hAzty%pR%UE?1>&T z!iv<6MTeZ!vQ+7kTjb1*^6aNfPTrM|;osyRC#upVNo6<|t~N=13=0i+GXDsitH4s9 zW0Wdc8w_hY3n();!-aUj+n_fYT5=gmvE3bJ_`xl68O+Ak(KJ4+r@7sFJ*&M_T$mO2 z>$?P{Md#$XQ3VVUARD`(+dwnXoKz8bV*{b7c0n62v zI-U!(M%Zoge)d=?n!EViS=1c>D+8J}hH#0$Er2rw^mrfb^}%q5poRwj-8!VRE%LaP zNjvppT0kkdKU76?^$@*q1P+7oa+Cc7*X#0B{;3qMq?PfP?58#vHId3vESyHH56HMHT**?nQ$qf71{b7?JAu=8=Uc$pGR+T$Jy4y@Lkdva(0OfJvPfSZ zj~iRKjzGB%MUQpV<65#_+~pXbwu@>1pd<14rCdJUX@8=H7h22A41(HsZ~XglOX9&; z>o{^g!YPypee@+gm*ODI5vzH$sRJ+HO|t2M8Uyp>ZV(6X50OrYc5S^y$C-F%Rn#WQzMi3CL+1!cU;kM7eRp&X3Q0lKlRBfOscC zMIPRwN{^@#LKF)kgMz?y+1?}SW!>oZ^LMg56RW1Yqt)aQmOx6ze%#?+tunqbiicsK zOrBTam=^42X2uZBP$La9LrhLlzQG#|55+sOaQs3G7Jtn^g<#FEG5@Xw6orgulvfsIQ7<{*GV>)vskuW;_2 zn|8~Ex$3!vpCf!7DsdXei=7`>WAn-rYtcwFbqw5teWaaA7Jt)vo7uD2-4CRT#VZ|{k#1#o7i zgak>T97Y7FPXC0dVg!f&$@z`NKIz!h3L02ru~%D96+{v^{$#Fy1Xt8C^E&x7o>Z0p&BR4xk30CgE}Vm<(qgpKB>CaEu%s5OSqT2f1`>IKitV(hjL zQ`Xx{y-Yd~OKCo*4r}+`vk3dE4HX2lc@3VF9WOibN^Nvi-+gI`J{C~^G1Amreg@(@ z6qWS}(pBKB1!sH@zrTO*dT@HrZk2Mh)Todt|K)bY_2RIy^MD&K4|z6CS$cG|G37}t z%yxLiFw$}eg`?h9Vpy;77h{?5J&yAP^0$d1%16R@&5z!bkNb~P6b#mtc}L46(aWx- zU){G}w|yNUSgIZkKbY_;Q(1Jn6BX?bKPF{1=-@$i!eDa_B8l5CBhhe;bFgg|&*p!QicE$Da z;}6X=jO{!U(}M^*(c}|{Rk6JxVniQxd)BAPQXnXfkRBjw9e#FQ{aq#>`{C;6CF^nG z+zO?-zE6nL$)d{1>l|{8_pt?4T@N{rDR(9%?a>HL=ULT3jl+tXdD1%uXG}}8r~ub$ z+A_fE-yzxr0JJ5s=;fRbXGiPhVeA`hVDuiVk_tY+$R6^ni%KS3*38w*5vF!JL>#Ro z=cp`%8znkm{$AegwF%Fz6(%`aN~ZL@4Ykr})T-Rtw4i2^y-(*b4QA4fQj7Td)xolm zG$ZDq+&mU{Bt4vQCxdO+*|_r(qseXtXMeJ!VcPDn_RG}!dda_G4#E64LxC0QS$O0= zZa)r%Jw|85Tsn##!V$w~zF))Q9#Sj4QH?DzEGTWrhtbiedHA{axlr$kwWp7Gkbk{5 zX@H`IskkD{zfnM=9<2OI59Wi4=XqCQ})G@7A!y zNc;hOf3iGz*?tYv#=;=wXud=4fnZ91mEbXwK7~V@-)&E|`6S76+g5$;w@#)+)74{r z>Kg=cyLwld;W8%Xu%wGB^Ep#=MX`|@0{b|^j&Wv=$~Kvcu5nnZUCS(0%-Z#wi{(#% z3=BR2)$i=0IJiC`&^Ih8*x7Q>oDY8_@I76g)DKZ?71-rfS+VL9!M5K1{vp-5m69pK zA1eh-@AoywhMnS^PFKduQx_SW;}4|NOd2lN_ds!h3S4qzo2C}e2zJ3sx1WTEhe^k5 zXBYox>rdh)NndUc5#CQ1XVp0{j-!_e`0cyP{S}@k27xgsCvHonp6@qhF*vtRUb&Y# z+$hjKXwfurjyqjm07@orTkqW&*WY1dQ#c>Y#v?WAW6$b!)AZHkM_j)=)R&Uea!-~Q zA|!wPnj1kqjlNrE;)q)r?`(s)suIO7JMx`Pn&m9wkNut{t6R-W*_IaUsXe4*L9<-){Ee|4GF zM~{jl*%i9jK0AIT4K_618^6QLTm%57lHC-)%jW4MD~t3P!Ms{&tqZOi$7%duXaf)+ zNTT{aE3Pn-nOmqzG*CTRCWMGY?of27b!%@Bg{okk1Qq`-$wW|B4890A3+nc0g^5@` zg$jKJWpKvSC25{|ZTbfw5-3)jX8w01zYId_5_vA-)Zq}(p-@DJ2Wz9^DCK3_*=-ug zZRHZp8ch&#iXe*e%KgTKbu~(%S#EFCJEcT5l9t`{)QeTgq=R&o7_=y?5ikFK;SMlUrdjiTn~gFLzw{xh%Zi$2T`KTz@WmWGyEbf+R1^|gpE%WKMTc}(KpH0iqSmH0H}L|NeuUwAMZcRA9KxY zR0U%Y<5Hx`%K}?RJ)bR$))jN%ywn5KqBIemRmZDcx^sFb^M@|3E9vrugaf~}{H8Wv z%8g2b%Mn9Yg=+e1i7J|dCPQ^m;#U2~#$Y#;!iaeNb`qlJU?n2^@SZ?r9Pw^>yxuXM z^y4ftd5-v3(a*?^uC zR0@7Rs!;yaV>nKylwLA%CNGw6n|7Ic+1$!i1?;@b`11K@;x63?tjTTG^MRh%NYqM= z)iJwfl}UPC5k6VrjRpM*59=hde{7r@H8GR-pX-DG?~M(ecbqNu{vEF(VgC!=;iuAQ zo7D%^$cus z%2OYlGF3;5EGBc@t+_7W?NL;D`}cB2`*m0tAb!V|HvG%}tjCamHWV~*{Tg>r2bT~w zHaaB({@m=iru=Q$TANG*7vfY@0_#HCM>&@hsd$D~msaObcBD#j>utGuUjm)1lqD?Y zzQ%IbZ{!1BzURX+p-xA=c00{xFj3F{FYd&Tzaoq2DtS6&AHM;y-CO(;Zzx zfxhe4J-LArsA9nN|5@{pH?T`&N6Pb|ICs!7QUz0ulS3*|oqO5z&ol<$Cz1&kG@bJT zvwgMJi)sH?gbW6cC7+O>9#i3zB&H_y$R;q7B&ZRqmhBewYawP|N zdv!r$^C}b9I1!>G4&P?oFTM&SY`e~>@ISaJU^uCzKJ+F)*dZQVcxrDtY9BAp-t(kp zHYNsv$infUZDz)g+Yk;VoCXHN*++;K{~7cq*rTuBf;;_uM_d zD}~Dyig!&-jq!Sy-y{g?mfLGrFF-EO?ui@+rN7(B`rb-1QfnPv`KqOAK{sm`j5Vl0C&I3=9MNhmD3EgSWDIuE&N6pzGn7^W$dw6d~N zilA2GF13BUIeyEM7wRpO3B2z%J{WX?F$_FdJd1*o+e?L1R`a-wfCQl7kg7B5$FjPUzAfI*j8Kz~~S7<57X z1A~G$@>x<4s~Yi`b&Rz++MYFR1%n3Oa^na3&iw9Qi(ywA8yhtS-4M-JlQL{WP4-(3 z@HBK&7uqYcjrW&(BX8hE zRMu#X{i)5gh%LLM6m>nWEv|R1?Zp`!6a)`-dbaUHDe%kPMo4pDLjFwp$w(6k(VoEb zRs7Qy@$1AAom!b93DDgAE%WtL1o>uc1O-29fScU0E1TDM0XLWw?OMU7q@tW^#~6;)Y)nlqyFF4 za5PY**1S!kbOeZ}Uq3s&}G-z*Gm8fgEE34qTkKqP+kMJ{3%GyRn-Mw@#! z!c}l@_<-Ewb-O~9tm~4BVxnHIMX|rX|3*ohFEx~}hWz$V1Pk~MB8CsK8oqu->b_zQ z)T*~Ah6V+VfMCI;TGMD_6BFq#U)sX!^76=E4P-yMcf{C$lssvTAlS=-u$v9y;=4T3 z{6vA*cc>FH0;V!R|ILYHLm9LJ0Gme6V#(&y z>sBcB)=N!MWgu?LL#IJJu053(sorw-Rz*cTw7KOt}?nCb?DwpaCmgb0WI3R}ls6l`G4=T$d~ za}&-7A3gf!vleZ^Bgwih&r#KnE8eq_cL5uqv0`I5=<`I#IRFOT5=_mbp}w#aVgfzZ zjO+xvJVW=h%%Y#YG?vg|BPdUnu%zsIc+9Mh`NTD6AM{1Q?yhoeuyJtQ9^2!~PC$-)6d_)h-Z21273m8o!QDmxx5 zeA@59XVas0*Zx&jPj4)7|-k(LmHFw(D{IoJp%tW>Odvhv2yPPNKR)e)YpI0S>@Tp|hp4 zL%t(UoD~&$?TUfYmJ+c)KAK7A-DX7dWUg%8I6SP6`?oI_VFHnuka)IUKY}vJqSGOM zD_9;MF4USfV18TRaXtA7L)Y61ZsF4Ea(qSLC2L8-zSP6MXZ2F!#wa{1FV~`DH5h@M!mZJ@+^TQkF=Zc0Qi8 zzBGYE4-#7$U{?drk87`EE&+D0YzOwF!#>BN#VMAG@H=jr`OLmOwu)5XYwe4sgA8|G?s`!@yt`@G#g9?gV?EkgUL z-%r-AJ)BF!XS0!xTPCO99}Ov%G@~8ruN#(ZMovJgp?)rzoTP|`=KX@^$sAq|!jDU< z;N(x8ua5$9j@z*k5AIsJFEAJX=<{e}z`WT?FuGst^J7XdqT@4*M4_y@c5yBrVZ|x) zCXho(@qM2`%=Ka|$m8(0+Hdvay@FUwi)G8o_FaV#KMI)YF>^lw6iA`%+8R429E4Fz zpI4VafCCC+4NK8&fr6~;PZyO5hrSK(xe8sj6VS4nxHt>)k+DPl4EJqHK|r^1rSie! z3DhQQIkNvH?VM;heDR#VkM3ozPTi>7#-%(&t%biV&oVT0q0S zUG=OKAcn~wyGZr|!Qz$#Fu%uoa`r&`vK;|X3ON80CINXQkQT=9^37jBlW6hUHOa#X zBa-KuM8vn*Op+4gFq>)B$Vjq2-}$P$n15ehJLNl0LnG%EvR4S z8JHg2Y)Dfb70tVNk&X}|g+B}#QjiX&e5fy3f#hbue&QWq-AkzJN`uoA?4#|{ zMipd~(S*xmx@hKlgn6obsHL}QEY=mgkB=RM;glG$$8VDMXx^Kz6C6+NhFVANdzpLD zjjr3xds$PwDF^%X!Q9qpKrLSmaVXhaqqj81PmvN7bPX$nSd;nD3W7m;X!8VhiO|`Q73}s7#a0D4-V=yr`+({Mj-HV7# zI}PgvY5Qrop=gNjVvNp$!LyM&Hr!DI9LiL?7js)rT4dsRN<%AZ{URI@(NHx6ASx~2 z87yYwS&CM53_$e4%vSh9KWvxrdLRa9LTRhy;@*E*@L*gHE_%=FRwekZ7oRPwr-F%vCNV}!Vkcw`?hkHhRK8mKJ1qH`jrd#7IF_ud{_E4}nF&nDs&Y9beHV#LPc?ur5@=JoaJQpsC48SXU*BybM z!X;DmEgqh*Zj`{;;5qJkW)*tcuceFF{5*p`yYn8}vlH-e=I!5p+wP}r8%k8xjvu=< zG`VOffRznG5Pq~Uv>`U3ava%&mms_^D;7>iFFRh}DRXbZK|dmq2P~7AR5gr1%*Jov znU8aBA=%F|*sD-pH?Ry`iu+tz@XX5R9J|@T6%l)}SAXSPcEnK|jegk-frEO=6s}8! z6CrrFb&70x45(wsX3#)|VF~r+CdpGo4Fs%0Gy8=|K_Sreb%9`9KQu}~Q%sU4yLncO zq9Z6~-@Yzn#1})LFEn(gH(67$757A=elv+4+HxHQUrVvDM=uyQq%#!KEC~X0N}n%A z0F#D1GxZHhQtT{D=erEUvta!nEMTAj1@4{T7QxQEF9D<;BD+DoAJ2ooZw&x!)Vy2M zG&A2hm#_#Q(g^V#TG}NPQkO&|UOsWFtwb&V{QBHxyF@}H_XJ}oLQ@ileYj8ybQ5^U z57az~C599OFjNhhWo|0Mq9rdRZ^JM_d_~j;v+hM`c{`>PWHs1_@I4?71Dt%Qi6>Fg zzyo*`TKzj65gp-Bk}e2;hY9gE#C}wwWkuKr!=4|*;`m4JH$z$d0_;Pn5tDG{{7$3u z&xuHW{w&#y3<*GjYWqf+ebfOjYuA2IQcBM~hn`Eg|CMC%^C|rp2|d8A9bAnrR34M6 z$ux-|LjKu}r-}x|c_U&3E(CpsUThdKNlgDLnbAqX zO$jzulA^6|-%mRJ_FFgkHvlz9+*g=(Ity+|1VS|w494%>_Gc{&ddzocMi%cOHqT1y zwaLg*qD%s)(hq=;9B|n9+n*J`c4y`XsWZ!%LS71fnT6@q=JMNP_hrj5=t16J$42dj zIb@$0s65l7gP`XcwB5I24sUzMn-(g?gTV z!nxT-@!e>Q29QCl61HihY2C$w19QJTf_E)`{a!ybEbeWakuS#U53ib$4%WB_`zr#> z4=Un&`&t!2kAsO%@|`CedeBu8u|Sc*E0b)a81Yxk#dSUjqJNZ3aH=3=Bj>?*o)M!6 z0?H~Tc%K`tiaw>wY`*|L)wg>cKY#(L#jsnLSt!U6Y0w%h(hqMo4Pda2`YXVrEkLAQ zvzKYRwhayP-JqtY64WeQrqiSpw`AWZKH1bC0CY%EI2u{q-pT{J_DEFkVhJCsW2|Ap zZ@=^r%|3cJOZcJ*duvcIXCimM+ZXG?T@2}_hN3Az%`G3R4i1k@*7J~>GhcYX4Gohk95c#kS?p(}YpRnFBsyb8B-jMZ1E81{`!B26q|tcU@A~-4T>T7sSwBL- zk!W;G*qqjWI6>3Xh=4gvE3fUI7Q`%XV1!o_u#DcBL2&3$Y-o3UcbA?&xY=et^02&U z+D0NzV)HXD?J1M61t}OFhvkh2(^5}zB5+3S@zA^yyaH_^4g5&A6F8X4P5rI@oy1Rk z-7UdvvA?6`hD7oHGY!DEX?Sv)XU71AqivAnwes^_NdAxk&Kmg4T@HFboz~4DbvXmO zz=)#2=ur-N>DSdQ;TVual~8&ip$7YK^{u2(xYL}eD)sMU=NE$=W%+u#)(XHsE6`Z! zAckPrC4iM;%>oUr5y~jhlr)%8?L)5h$9XTmYGE@^41i&NMvX#chCT;Hp{m_*FdB^) zJ%~($$Q;YrmLdjWzB_~d$T8*CW+9o{cs!rit;g8c10jW>om29`_N@R`0qzSmW z&1et?-$dJ}My7gC8`J%(zpxJBe8L45|v6Mz2Kb@~GT=oN2+@q)2 zwFw?JcJ{qY+s`HUkZ<_Y2M73UJzAsPED=TdAK1Gzi5S*1`#l402WA&HhNjYIo<=u5a(=d@vJ|+HukmcCj9L*9ayfC1x7k1bT@Y}8`wl`q zV&E6D_l$EJ2g=@TUyE>y-sYn|kBi*BBoVoe`vxO?0{B@JEj^Q|?L)Ts>6fiKMObc} z;99a&dG~D6dBHA#W56jv+;9Zz|1vD^X4=;H2p-}8eCgjQR~FXwiGc%6xzfB!W1V1X z<7|YD9$5p|D2e=#k)FOvd|lmLL-35QdC645C$^E*PeK;82a82Ewx{yxuAbo7e@?JT zQ^HdbrT~kCp-jmPvCJ^V1lT$G!@<`_#sJy(&{6|D2G!a(5z<|e(c3&^2VQUQtn>xU zRI1B~?ujksd?l2EFu!?E(b&K%F@u$F8R8ddgPp+>)KOJwbu=blZo>z4; zwE6swn29ijJo$}JU*Px3uA%=Q&@~)Hji@SZYcj!qB7s=-OK={+-{TuU?C66gYQJYb z>#3I$xL8H}nLOvLw|^O4+va!6Y;en+0|GZh`7_82)-%-MWFz>YZwAsSD?eOBh&(D* zCWbgcC+@U|xTLrt(XXYW++J+@-gWWA?uy_AK5crr%G~H?)I_AQS2~GB)~>ou1+ZzP zYE*f;)baXBgfeU+ZWE-8d1k7ofp-1f?M?#M1$#Q^fPD0 z{D&g~^upOiehTG_xv+?O9!I)%A20Vlecf)`bNMELafZ>w<9|E2N}=PqmhOLI^B3hdBh?N-CR16J!LKgY@Of!1d0siAQ>|QOtjzgd4Z5-Q$JOw zZ%f;;2zTjPIukR>Ej`oxqL<~z;qzH*^UE!RXTgp(H`t^QOWU(ui8emiY0K`VthULm zjy^Jhv37gvf#E@>)q@%%gO;`j6Ci1HWBV*D9pvpXANy)kh=KWpiS3g8`213Hejl6v z%vS5e;nHM|Y+zj7$|BHzVA1%vQHz1U(iOqHeV@VF8<;C^>1m)>8vTzd6I6tTrHn1r z`StMTMS=Rvr*8E%)qOl-lAJF2)rbzET!c!_@KnV67IJM=@U!2ksrL@l?nvqK)5UHg z&dyFEegkuXQSd2F0Nt7QZsK&k{`T4Q3l zUw9_M{oX^D;RJimvba%3-K`9P1Hm!fFncu8EAu^x3>okSLYxFJZhd&e!GOyKviXOU z=)d?m;Y1LOUc{d&ytsp{NNH@K>OV)nXQ9qV#dqF|`JxQsAqrMw-ctN)Z>Up+!4UVO zJHgNmyd7b<4I>IOmH`I8eY$G+jR zPq!EbX;$+=()te$+mhC(H*P{zEthly2iyN}5C2s|2xo#+FeER55Ph)hhEF$3wmTOi zOmj({XFZ55;4JK}S6W|U#zkY)o93Gd5f8n+_uF=i%CY#9bsWOHtIjFtjemro-ZrA+ z%QQHpOOis}NRj+} z#Z=}NNv`a2im$Maw{TG&vrV`cZcApzp@AwB z&^#51mqs<;okB=qmX0xg1j^lUL&fe$+bsK5Nc&}-Z;P2AW0V5j|{ z+D<8`s7j#`NQZmjCsUz|GSaoS4<`8QFs*|X{c$_v75Ft9Y}Xho+b+V9h^Hi|iK|xF{W9di zJv-YUSD-A%UnP8|RQ<4}awwk4k|PF(_uYTj&)W+?!kd8OY^s{Zdj1MWEp4^kvs5J* z#FR-l+N{tfLYFWn=m|m7se(#Dou6@UvfhANr|t#2vpC98c?dfKY9bRS`q3 zPVus+Ka63v|C_973la`X2?ViaEZhA+UW8ZZ-3-_3LWtEk~L2Cb~bAKx3g1qfuEeoqLUbiR6PdDXLi&j05?q+);uLZl510?w%a>#tFl>M<%B8Yu=P zmrA74N(`|8%rb?u-P?oG%7frOruM72o6$hE#NX6Bf9__*VmLbs{r7x~=^pGLF28%a zjz*>6?lvr!qQq^Y8<7{E!-gItdH;p{wc76#o+@MARaU7DBQ@=h3H61N76;X$#hs6Q zYz!-6&~A`CU2Wg#h9$;23JMfNPo%05ChJC4p=fp5C;cuForI~ts{|@j_|Ds%{&&gF z@SE1;{RDgU70AdPcD;gj`m7cJ>YcPa7z$4?Dsjc(C`cmGq*t;nsR{AtK(N4r`oQ9c zowQu0^ri#=F^ha39Cu~IA|vxdI(oXWH#*9b!$U)3KrEdCh%BZO2}X*|5eab!*GIo& z`Kx$gA7tA~t~4I_&JEYdkWRjN9MG(MnQOaE{Hz*crPQET0xBrK0C}JhkSC^1=ddkM zJ&5rlo45aWg%!O6*>_;qbGmJZ67CAFu9MRHgfY_5t&RP{Zxzh^FLy@5e=Y}rp92MG zpGr3wI`@DHaB*pZVgX7}MJS1GW-`W7*JrD6ePLmL*)&$EiuM~V&1Q%Ei#_f-L9MpG z1m}pLd*;hxL(qaehdlQ_Im1F3Rld$BiGBvx73W*MHmLj{mSfdzE5=k9>`y^zSJ!CewwwljvXsQ^B;OwHmeof#xZBxU>_EF=s(5!|<%@~Dz zg@kH(DuNWwzYx$=5M}^Rl2;SE+~w1v@4jTt?tY?Ep;`p9=9HdbquL-Ylc{+G3@ejC zY=qzgnS*Xks#qKK96LLE)Qm#uzfZJ07y#Gg9#rglg#aN4lNEpzQ86YiMT_@VtNA_M z{I=a0j%QK26PVr3T!w{rwAZ4YYXMYBAQq+b?djNS-;(m*munsrMuRP5z`3e1>lKU* z=72wh0pMp!I4hwPqO-7n!c3DfGb;_n;8Wah1oQyGu}p&aMWxj57jgCuZxOM=M(1vl z{4Ve_#L*!`AO2U4M(Ow$9ZvYBUEf=9i1b5n32)1fvN<_R{p_s*nUEa=f zs?rrjhynrs4&p)L@jxC$<+q8 zs}<8ffEnx+a9VOe0>-&M_M=GZ>faIgAMy@+cc)qO21tR-04@0&vS$b*^KL`mH&YTD z!dgW~9eqP-)8s#04RHiO+M^X@iD4UPetgz(o)$0kB;)^jtDVVa`qlM(U2t*x|H;$y z5rBw_a8>Y4=K+w;{Rjm@1+1>xU-OXb7_aO7iU^%n$AVAR_3mGe<|+UzElq4tL$a12 z`1rBg?>Wj#l@Z0@2Z0v14h}3TakcK28qsCF6rlFqqo0~Nh}wnz*QM$u69p=Zkzw1< zCYRfTSHL72ArxyMbr4q9R%J=0&QB#UQh;IN%xmYGuf-J|9Ua8f9L`sz8>zeic_M_( zG>}+W3_cxy_tn}5E**X6wPK_`)52a5IiDlT)ox_J~MBO?=|ncHQ`ZkDdYO03^r9zk){MiqoQ zf%N_Kn=yJ$N5-H2|Hw)jG$tQ-HXY*WPR$2s^*?%4Y1Q25tcO@Bi1&*SpZ&=KQsNAM zZXoctZ3ZLG3El(Wk=f5s_#=@5pv1UZvdohOK%c!ib>&;cU?A9y0bSo}=C0rHI=Mq} zL{;ibfTU^uC*=Kx4nwGw%Y}7#LT)Z|GPNRbeSAPvAM66Jj&prFs6d(vAr`>@z%r@x z)o7>($V@*WmXb9)uHQ%L30GUe!^6*%DiXAvHm^Y8@G(GUzhA}&lvnxieCP}R_lpPU z#YCbt(56>mPPqJ#(+HrVQ42>tTlV!5&yIo22SBkHgge0AqM@R3pmFN_35gB<9>fUq z4n*=I5q*nl`ku??BhpMlJ9|}`osi2N~%{AFxCi=&D=XKmF zaQF}`UbF{*hl2X?JV=aZ^?d`Qb1l`9^$jf=Ep8FDk_TJ?z4scZbkwV~@`U`HvB7bu zS%JUH{&rt(1o7Wr1id0~eH-Nch8m_iSlRjRkNe0zY zwE$BTC)0yD$OT^hL*4tmpA#d0?)inTp*KGVcD@&78tjoB5wX-aQGfeUC08k(a z3IGSOL61PGNSedvHiHqxu7cW73DLMjO0qx&r7$SY%7C9uO`753KCqI)&!gIl)rmsH zCj;`t-waeDr~N9AV!Ycw$@Hh{DgJW-Y;i{oa0cuJp+52^-$&=$ z^?TrHs0@RJK#XJfz-e9V*84526K=5|o*ZTtVr#3mFj#kBvygZlCYpf$N*AZ4Mj_w} zdcDW&0?G1ifn-n@)qj+KK~bPYp_e0r-=nsc?B_B$6p}s~t0NnkroJ@HClNaTB`l%p z`S!bIo7X99(v$i;451+K+4D^m2p=&M2TG_L<^_1KAdqhOF&Is8y)K zuwZT;NEhpx2f@QT(ePGU9R9p!)Z=27_Gjjc={AR_z$b2}7=5z}cz^X^Is<%>5`dId zw>~@xxw?}uVT6BC?8A>Wo-M;o-T}B7Xc$boi0}yZp@{p1!y1W^H2AW6*fSH378C(8QD zW-1B}UhBZu{DxSa$qzf&xQ{!`UHsI}#*$9}v8+kkD+fbnP7YypWs}Yw+cY*KhP2dH z(g<|^i*HNm%BOle&AsgG8V)p)?(-Q}MjK@FTcJQrw4kE&TtS~P&OiD^&jTRkpF;t? zy_drODtI))AbOf&)`a2GMv$DB#Z^~VS2QsQ8uIhRYL- zQBhGzAgd{2fS+;0fedG(kd8}1w~yzs9kRozRI%dzUr$9dRfLmUry8S3k^t--XgFco z3h8|-01EctCV01_IaOs>T7nQ4BpudMR3+T%x0&h|T5~TFBh^qb)v@Xcs}tUn)i(7h z!F&BR`AYWta66iyPPkrpq;)qQwIIz8#O&w*{$BDKQ4CSZh_rKpFH4&@hT{LUs&Kg? zfWBr4rW(k#)T6OV-JvO1C&+KFr-lCk$wRlqIspdx3ivL@kLIh~q57>+60Ir6YRYOW7M8WrkFypn@Fce~^pzr1p7BOPi;7G(QEsMqaW zyj_;e(<>VBJ;w!rR)g98MCKLngZvvWjHT}D^51$Q-xf}Tmn_W4YUPI&g${=?@u^%e z$}<^RulYl&wjtf?^L;vy_Z_ixMcjW1b(}=|0Cl0i+1v%tqrtJ^(HN}ASRooZq+4$} zK~Q3eBG%TGp#I(R$CoT9;2V9m;GZ>qL_yEEu#D5cSz{_#9G43pET5UAJkqkqsm< zCpp8Yve)~Qvt7HaM2`T+IUj)d=%9fwh=n8nPqWE!1fx7j3<1)D;G^lL$-$uCSOzuZ z{SpuPq6oD4Px>%p!>yZOBQo*c+*HAi;B`nOR&>VLE*5tp_MFe+C|(dcRq)4{rB{82 zP!Q{GomC+4OXzBZ!W^hoW~#JVa@IY7wELljzy4ki^ZI{DHt83Tg9Nl91?i5eG*JoL zdQfxDPP!o!LH#jwkJ#5h!Lp~@<8XznMKlarI&cNS!5`*OFDsC$W)NdzQiSUPtv=CYY+~#+U{r78e*v zcj5uUmB#>6xLPHbG)CRRS#Y{iv9rYQqW}48Q)9sX$Ur~#vWg5Bo&%jm7+r|QR4Q;C z1fbXE*(3i2VXdKfvOzo*Y~2Trx>Y2cACz%h{i)5V)^Ah$)NFg`p@Cj`YT5DMUk1>@ z{6&sE0CX@pbS7RO5Ax)tq@>VFpppj_=lfi9#-7kOG>VM94d(=?GdABIfuiEhhn~UO zzmy`VQVLA?Nkn0Acz7J3Gbwbq-GGE?1b8E@GApf7vtIvGKGSN0a{o;(GIJCf+nkg^ zSXDD{UFXo`(sGzOoYZ@Y+Eci8_o5rg)Jvl-? zRhN{D^c$&=^UZKjj6qUn_sVPg2^W6z$ZDI|)!3tus{wrS#Rw04w8F@twvzhmE2Gye z8y?g<0Px$GEdIbMn9Ge!8)CsI&xTS^;Iv!T0Hi&zYc`d#$<)ppe)qC~{^#)RS(6w2 z`}}{eqY5MN7Z3j2laC4Fg*kPs@jJ-zKp-T@c3-wC;WoaHi1wqiF&l2rGe3{RPReOo z^K)-)=P(d=p3_7-WqD^r3o}s@oUpScOrvGv)RQ?am6o?XT@X^;J9Lm;wg`ql7G`eU zgfM^%zGCdAH0J;YqRJKf)5fpJMZ1Z(S!d7$TiN^hg&mOS>ESg)gji~?mV0)nQ zP-e{H_3y8O5PjdvL1%!3M<|`t@l60E5_^@n8N#2kt zAu*p|J>MV9++E0SVM&rO@!CPv?{`-^kv=cnMa}skm+5vTk4DwW9>EZPFvV%O+Yr=? z&-U8ST-J=PEB9|S!~i7X)F~ibYE91?_}`OKQi|)fhXsDmSBF0( zKlpNg-Hw~^<3e6R-8HXzy@C6dN7nEywL0U8+iLlhEmE1UaF=|J(-=t0uYmez6p^J< z#nS~)yrBTxfWstRSvL83&wF=NIgqXWHB`q^5C1FQXAh{%fCf4iC?tGOd}Pl^um+kI zAO}ZkrEFRNMRrRlX|q+T&j-$gYvkJ@x4f8|gVpLCW#SP!xxJ!>yZ1MF4R3}b(mV}< zusxj?l-5)QU07#Dyt$7i7t-6M>=qW*&%sWe;i3d%mBd?yEfb7PEX@|;R!g@INu5{H zy8NcaKLnnbA{A{7K96H9_>=yyKOOrF)Yc=(f!d5YfMd!NBvdJla;|_b3<_a}C9%~1 z4FTYHsYUAi+Ptuy=>-Jzacw|U;L+zrD6LBbXZHSzZzt7$4QDNul(p_OR;&II(TFZi zjx{KWnrk#>`TlZSI|MS3C#$)Id45*Ut0|)Qe4b;q`w|Oh20rH6N6ZyC#08D#o%hk+ zhD`KomD18}-fC1F*MU|2x3uFm*UoFT8{UC)_8;eNpG`*PlyB!rVm^(7zs+kKjTXfV z(LcBmk+mKqs_Q@#8^dXUBcN9vw4%Ih=AQdQbERr(t3gng{(1pAJtKg(C=1|~lcd{d zakzoJ&h)>=Q_u_0@86p){l&kS1zq5<(G7CK$OMvPL%mKztgZ1O20d-1w22>sGvzgy zs(Agq{3K9m@{S^~6Wu|Lsqwx%sDu|^ym141bu%GC*2I0&wap~V^QS$ z@y>?#etU(=>zYg3Jq~-KCQFQjfRIb2a$IPs^G4oC7M6QOy}z%l95owI22rE^xQq6# zbtlYb+WqW1J^RP&578;+(;ol5)x_#S>p@5SRG{FPF=M9?9TLxF#K4Bs-F3?v+IJzds>C6#1ei1k%f+J7Tz z@VkXgn%lO8kFTIw!hiPLGtMW(-^y}6fC^c;8MNeG&N~Z5K9hVb9XIby&YO->okWhR zlx&wqBumXDTRQt|Q_0VFobV}E|LtNCkJm(nrS16beGKSRNfs7L7{R5(l93B2@HN}e zC1xargSU?p;Gh6TDPM4*&}X(k+*fHQAF(BNqYIC@{HfBiOSl)pW4RZ22%w#@v}x$; zW}sNvt0Bf`xYf&HCP?dKiDZJ2(}?~0H=>SkQu88RFdWd^Ag=QEgTn<6;%0YzK7R_0 zKPT$t5X-t;bU$5UdA_$Cxife+JD&Qul$=KP%)!c}wt5Yh{DBP}@NSKv9Fp!B#v!|O zQazi$=!%!w!E8Olf5i^_0)xR50k+QD$pUm(*a5Wcp1VVix$8#l5y;EtIOqnjI$zoT zH)q|Y&fnQzRqdpw2Pe8|7nL$ID^`jP$_QRL1#DIS_7>AZdRdA)k^?`l_3hOxOZ&8M zw=2BriON`_5hqJs+G#a9RiDfKU-=s7ys~UvW-p&e)->-<6qEDHrjwiPKFMS)OlqWP zxD*~t-)8N$c*z%QO?nxy`@g`a3rCEx)oDuDZp@$4AFumrOO_{c;~BZ(tha774~tlavep9MUAJX z48D@aWCoY?3g>2qJ;vTulkK|9S@(|Iqv;{Nm%|BE9_bCznOpw(6M<1nNoZX}(%i13 z+B7`fj@n{lFBn?(R*l2M#n#ob%@jd?)`I0_*k0Ypb3oH?STlnwX5DU!D#n!rat5x` z#N8A>jKcxO0X19K)?EGys>3PkR*yVovr~_QTN^3)V;HBIQYS^Lwa&PYHF=x3i2?Jf ziGt>91xmedO#RdgEIls`KWvxOK+OTK!;YO6LcM5NX$^z_1cC+fU=`pdy z5;F)ZJu7b842+IiZjdqrysy5?UpKOJvz+GK!n+B&zu!7P11f&b2}dLf?o#f zez#=0^`2o!JtxdGtKY^qcb-RFSYDOw+wXk6_P?)CuIaX(5#9zDpDj~FHrb5**?2n{ zYf~*Oq|n?fInbFvf!UD=&Dz$U^rDTam=~RKf7?nza(!i4X4$01Vr{|zugw-0)XBO$ z>z(=GxQCM|;FD9mltc~BSy*Bo@@=Nh#?MNH+i=^N(RaRpN7Jo+o%b@LBA4e(Rz6)( z&ttS*k$*3XcQol~=Yy3Yw-k(<13loI;qsW%h%vd1nGLc<9SI4vTU;rWMAYr{{QkpX z!?3>MwjWjaN zwa^^qdpBsNdssSaz-n9nsUg>UOAz1dkr!hpTM#gjk>0C{&rMZ2)f@lIEu2b3*O6V) zZ<)w3lVQA!dCVLfs%#zl2ojy|K z;75+Se(jMdsV= z^}?r~IC%U^I& z@SPzT1>u!+s2tXq{d|bF^owaKzx17I92lV14xV@JIn0!}_;~$|{IgpHFKj)Epm^l#)KAx5B6QN9q}J?>zBaC( znjNhS__=IXD;Jn_vW#oQ9?VwFa@0K6G!Nty)1|P3)-U}s880Lrm8<&X3wSJXnW+OF zca_f$5Lrw?@_B*l2@Y=#tTM7C{$Lnp!zN9kj7|-%60@=1qHl6#3$d##Z}z9(FlBV( zF5g~x)%n-l3<*=nuo4mL70k^ar~22vwNWEulsLUE&m52J1GXi3SL^pxdmT&1=8I0@ z^5JQ2nj>B-g<*iNga8z2=|!CD*{beZLH{z!I2IRCi%-FWcvu;t0J18CXWhm3!25FA zbG7=%YT|zovixcgApoIEpx^tu4`hm#TL+BZinZupQ1gGit8NBR&r|G)lwyiQLm@R> zM!L=3%*1LA$Zu>p)FLel`oJ* z(5Cl#on1#3A<=v#%}4vG&-;aAfsRa~cPSL{uBS&qVbapc=)d zn8tMyW5kT3T}Ai(=_*`FD4%!`Q2eRhpO*%)UV;}T>}qT@)Y&A~8yLy#0zH)!K&@|` zhfX5bw;tqr@br3|mp0xZT%CxS6UcYny06ASCdIDb*&YUK(uoMu2$%D#EH9Z@i5{iZ z67msyk1*xtY*lQzsgk8<%825=i$j|ESOyowozX%TO?ny@$NtN#@7J z%`R`7SaCyJ!~)l%KLoDkj*||dv_h{}@5f`jTKBTWs#6zgz8yX06d7trV>(TtLoq6{ zv2|DYp{dQg3{2&X>gAM=CS4G6?_KY+gzR4#_5X$!w*G*rNqz&lv@ zf_(C4t!CvDqawkdbRo#KIiUkq)8Q!tZ<5>cXBSJV#?7dOcdOF2$q0&VK0d;3t~MQc z=U>08l`c|vfR?itV0nuwhlzmxkSCb?zE9+{cgun^@Smok6pSns>LTxaRl!pL z<7x)86_TYThp%iho9^mF%fHLv9xUYhtp6v^{NYLE)qt%QAH&p-pqiPH#C1tBaS}pd{)q&pyH{yCXFSpAJA+A`AC%0@WqqX#lWWuWDI;y{rP$qfljg8FwV@dpoLHoA6*qX z`gO2Q&NswxYHJ`bo2(>S>Z|wE^WVnvHHg_lFH_wXA$2ahRmvK@5}6da%>4<2!ifp$ zgUtfVdz5yOy|LlE_rHW@^MY0I(l_j52l)ty0%^_tWd~eQwBl>HCrV_E2mGE zDb&x@$D`Hnsf1klEolY00<{?R!+HMVC`U`WN<#kiw!d78485*7Zox9h4l5K59#=|B zWU{@n25xx=0&14M{pXt%8{GR~lYU)EM8-Ls%`jK4^>QF`Hh)=k7TE>nGCvezz`ge9 zAfJG-fh7R;YmjAHaCbJUV4yY0=w(#cYspG+!gVEzhoEvQ_FL=f>h7rT^(cnD`y zG{nRo>Du&YPz{3lLDhGE_6vS=1)6SGKm%;-ATNNKrSl^Ay83AVQ<=vhVOr?9no*}- zWTDpJc;9frMxeIg>ra_JjXpD{>Op`ep#WJ`z8na@JAEPnag5bE-+vB<)Ag$}KYl&j zC(-u5QSKevA$}1Sg7F1z39Tj4+|1~77W6pp)|gV9HLFQ?Ic&o^AV*~fm_-lfsII!0 zD;>j=R-nhlpOwQ<*V%ygd4AM1rDBo#I-pZAN1jqP*?kPl_FmxrJKis`@*$_`pte%$ z^r}jitxETBdSW#O9Q3hz#SQ6-fyjuZF|5IXp)a?b=*TUq24$FaqB~}mJt<|D*onDi zT;;W^Zr~wp*MvkTu2#YP{1NH(eS4d{3XzO6MpZM_GJ+13hMxt43vDiQT!Tx{rftH}$zjpp-}nrEE^ z{lbT7P>jUPbE@Wi_p2^@fD^&pSgAQPvd%YoZHFWuqgR7vPUR#D)Wg41frZEnK2Bu< zFWs{l@5Ej@0k#F>z^Vv`OhKRKA9gDX zlhD+Di}!3>e^}8VCkS33R!D}dKv)9cPs#T++#kY`aRhK+Ju8+6%-CHIUdc=lVhA$a zP3~ptfeLm%qY7l8F;gPH2ilGScqEKM4>M=Bb*PsqCFSNC)+dfhKe4*ln@@Ly&G!c! znD!3$c87tN`1{eaAhF7Fn7Gtasj+Doqwwyc9sVvFHiSc=nsZ|l%c(KxP&Yq(h%WZ; zpTfw=SIazI=?sJUoC__V84rht)&)zhI=-WoM?@jc_kj!H0PXQ8P$vGP3vwZN>i5Vx zqj)C1b2OvaPty2=(Iiq1Bg&Bb3QOGg)!cC=J7XT&C{$x>qu-y#uC(H0KYUb!iS10$ zE&5{efms1Hp{bs^=fzdUt_mB6tW6g%zWbS8;*{BfSy|&Uv18#uqUYza)B0pg1$y@ubdc` z$+{*#)VK)rnHJGGSLR%Fvn-~eFBt952s(aY1f*{`wX_?P7UA$(SjcRPXt zA)^H#Yp_{uQwZ``g+PIJ(==azRi&%?YP?8FGyCglJu)7v2nhNFnF*~)O#5UF+|z!3 zD>kgV48q8B{?3?rmu)+}R9)$pdxTBzh@t~^g$r>XV~`@al8PF1AF67+wRvC0t`k7t zuMUQjX4BW@kL*kBxXE$jw#%T7Ayiq(O7L1ER$Gd2=Y_J!E?daHF5dZ(%}=Ok`( zxY-7kQtHuLeSA8GjcRTyP>%4c@J9=OU*=7vbdbZiUORWfL`s^F| zPhkJxouRP}GWi7f|7;5gbOV#=xQd_l*cC!3fe9jg5`8f%bt6sncroZKtjJHM09Bh! ze^=A8P7knR>A?ONK57JN#1(oxK7o#(7pm_5#akiq;3SYjFk_IALKK?BI!JQ;)K(`y z7MuKlxnYPH*4RfDF=wtCEeko#Z8U6NdhWcXt@Yf*XQ&O_yX<$R zKWGLrBf_M_8xR4x88+G(y*Cg&*hydIH>m2YVEkJ=i%V`RN{)Qnmhc-KXRfa-Yj~{O zd!auVb`*_ALq)-yWElECnjci4b|7C&ZtD>BMqU2CcQIF=QsT8&q0y{RpC7kYle&=5 zt+y+(&1!k2Bk>m=E|?lKB)a&UUhrhnn?a}1P_GJdAIVumXpXw6n!y;}Q&qpJ9eWus zLYe19&N8*KBot?Tr>3SxkoG6^yZO!Tur2@^pzVcXDkX5)Ew@(cWG$w?nfA;~{TV=1 z=$~n3I0omc1!kVEN=(N2DthMtyu@Gq3Pcq5X6<5dv}st-;rSw^Ktpz08|l_-OXyQu z;SjrT|H3|BX{oz8{xeG%_{$^el);Drh3jD3`~bqaF;eLRIa9k4{Z^v2`e30J6sLn{|J^CT^m|xS0_Ymb zbl8rI$BEVuW;8qE>ws?6Zr4WI=pN0ThW2gZ6m#{DP;tqWtOnz2sU;=6Ini8Shu|Nw zvI)fAwDt1^@C8f+*3-dd-?k%@b(hn*oz7C2})57C-_&7yCgvia!afNCHY+{v$*4jPy|U9N ze1Azj%wRrI;A|XO-CorF=lO%f2Cf-4eW@JYRD=&)tYxm3RK^d$=8FNqCz!uIgh#57 zQ!$b&eY$zcGGjicINtB3%VZcXXHtiZjId{PE5w|ak|h%BbqUoe6qBeB#CM53=k3+1(R~lGtX6l$c#VIh{q1*6_K|A^8Hr_ zOG8jDkatPLBmteKCu1E*Y4>l=&;c(x!wG*ky-@G(TWJG;@!ReQMZ9aX@+L*SpY2+e zoPaJq#CVt~7GVi!h*SFlw4|N6nz>gULx37^>6Svzva;dCe;stHa{@>YF&fa#_?Wzi zg1LJjnZ!X8oF?SdV7-HnS>4tx1y=B!gL}dN_aU|J@&4F?B7^z!LTZCz0I(qC3Jz{Z z=T64#9yh3OOpYX3%ft$*yXA#3n|ZXIR9ePU!6s7v1F5Q0Y>BF`jPXipPMnI77PuVc&psP z>M@)qos!DYED7Sw1M<`~<+3-9j@{&rzTiaq-l0YmLeHfI?=2)gXpJ>cV!P{p-Nh1E zGPb!0Yw8*^y2K<#M~ompyuS4S!Py9~^+P2JUcR&hFfj#S!-%-du7-}PWkYtnupEv4 z;9Pk+dMQkQuHuPwn5!_k)Ipq?%ho&Kg~Yp33rpgw!rcD15Q3#Ag_70&F?~Y$_i$i* zkRLoK1P0ZNnoU5(wJ;$4_)P?wV63%Cdy>*8q5twkNx2xzGRpS8+&SpFo+7y-ww^L- zVI}iIfHKXm?he|xS-*fp0gz{tGOb__Y80yvkilNmuCouYqotbsOPgRUYA-L;iDfOKTdg)iySpzTHVf zi>*^Gwk!wQU-L!zK?&&*^sbrRMx%Y1JmyJCI-z#3`=G5?9}>u~;B^6B1d@PGC>_|L z74Xw!*HcOS6Z&Eqlaf_kHce;xD1-v(E*tc5p^t<52^P*yqJna8^ z!RUXJGfOJ{bHL*eBhJ*gR}rvQmQ+S07wCNuqwCd{bnh;xSLud$TU~Mx@LKD3(KjqY zi&yV2uTy+x>yAXgrQrX#hl{)RWI=0%jWe|Ek^*&rmI&HXM9IN1TY>KU6%Dp^`-Pl~ z%c$KzN9?WRGaFd#Bbx{5UdMJ610iOZB37KFap8k~Oxon+AhrG^go?fY5Yp$4?8L;a z)l$>(39`OyZD``^cg;rnhrvtGL4GsowJfGD5Ec08UN*ZHo+(B@2!AR<%BH5FYCV57 z*Aew(n7rI-pJap^P4GR*`A`|V32z7Y{P!} z&Ec^uL$Pw11xCdL66^PlUp%^0}Jp@xMgGbIu&MNJ(9PW3;+<+r=-uD|*v%XH${O`>W|W|v}Z_jfjXwLR~O#N7uLx*ZlxmE7}*UX7Nc!uDOCYTI^^yj}|U z&H7fot|rq-#)-HMnR7aBXYMTzJ9Oz&>21sAT_V>jB_VawQ)!~L%)IB<?I1T@+s+A|#;oexQt{|;6(_&#OU zyULz8y~}ymJzrfpYZcdf>V2o4Z67@y*loibh?5?_+8c-Vcq9BcX%Sen;$r|wUs3JP z1$t>Tl#x#CKdK6jw@#--K}W-9axfW}0@Ug0RJ$?vg!}8=JTya!l$mJJ8!J1ofTweX z=mpTeAh_q+^`TPLZ2!aq?<+_;adja^keZW;uD60zQ3XVXsg3Bge1fg9pV+MydcEeeLk8 zvk$%O8}40aHFnW+c5W~ph}Tv`p%_M&0AN-MEqRU>T|!%J6oCs7(MeS74L$p|LG0n7 z@2RntBx3!6TX76WvxE$n0At!Q$pAEJd49}U+tVJK&f#jm{ew0G1S7D85;(aNLPAyk zer>>h*vJ`_VA4ZCc1u)}*SDvCjNJ^({Kn^2xzg7KP(*8F;R=77`J2-z??#oIzwG3k z0}!c)!kk0Tt5&Z>DtD0CDcDwePR^-&i<5dh76N|^pgZZWplz~WTRvUHSzwNvRAHv{ zcvi~n@J%k}tf@nq&UKdwLYB&Yc#=JvFQHIQYNF4Pvzb?V%lO#WMQ}fk%I?%@Y07T7 z;5w{xMMh1nrNwMSf8O&bEUU49xKbyS_{F@|k&?}bN)cexlTy;GX4sojO@Ao~z3Q7I z9G5d`7B7qJRU*q8G!0ima&BJ1zd`y_kXy^QFPN-HDb^~XNcVxiT9QCY8VA1p8F(7**Hp|Ot?-c@I$9%dSBhSPGdQndYOOfu% zSV8)!S^r@PXs(J|)u%rouUMcMa<&PqNLl`djyK!`da!zj1E*y)MM>)6Gp`v`0=D!~ z-1QHp^UmQQaoS$P(CXV8A_}yE=r@GlE>S)9aWMG%A$h83NtLe-eo1At014eHQ!yGI zph|FC`jmC51!G5JJm1c!n_dIUN;61D)}|M+Mm$2n;!$>UC@f>&zGI@>^tocy`E>B= z*+F=G2>IAxQA+XK*ms&->K%*Pv^Pjteyda}0Nw;FhHd_IH~I>jN9wy=3o4z)vlb_p zDX5z_QYQ$cle|WA z(c7xCukiQ~jQ~5*;CcCZScdFXBsI|CuIorn=aN@Fpo&3_)V)D>)r7e(%&)FIDS=x8 zQjKnv8#0BAlXHH}1&qqrU1NcI06d0nDZAt>)8mCs_V(CXr%s7k4WOExkpQ)Ct>@aHZ44PAxady{0JedBycJF=WXP26?HHi9PGGU2P!GCdi zi)lU+`aPHeVKc0ZB-P8sV(csO68l;DhDLiS^IVIW?6cCECVlK*^E6{oqA8HU~ z)*P8X>tR`1yRptmAqS&6BJU-+>VwN&-@>(?%x*0 zJLJ>TzAGhRV`9=7wBM-^RBY_!_88F}|EW!MbhB2~2oNL3u3CRPs<=j&8h`-y^>`S4 z6E62C_=$iAF1GSi?_k=y&y4cRLcz#wUbEx$C|u3wm}XIe2&(u+UAtE9xE zeher(TqRq&b=Qu!(x*Y!IBHmgDTg+y^o3sN-FdtZClas#_+4SHn8{wUS?sJQFvisA ztVEk&^#EvcbDS8M160|L*Sk8#V_CV{(p~OSzv&wrTy_uE{?&8n+QTWQh~p{DuT5<$ z*US9N4eY0_^WftZe@3<05d~C&8xWM@tC#m48l385jy@vYG_n{QccO zS(n$Iiv( zG4TOvU)N?NRwkb1RzL}v(+E2QH_*R6!#M4bb^w(A*$3UQ#GbvWbgZb|1(gFPsh;=G ze|p}iWWGE0zw{_u%4pS&)V~o?DibMV(G2!6#0Kk=F|foU>xaf6zw)hMd-w6=vFT7h_BQ6 zD7vezBp?B@y;Uj{UsM);ojX+$KUoRZz`i9@9)oX;>evU2lI1`her?0&uNvjRE^K>g zBL3Q*mpzslZ7QY%qb3n`y#Mn8@GPia&Lvm;)^dYaWK4;5)oD8HK1{nrbG4bC-m+%c z#&F(zxYZie%b!6B8IAbnJf9c_ehyBl`?L|%b)AdwTyJ9luYc}sfVmGC$9!hJVuM4mJdbYw5J#hw{WpX5MAW1ZY-QCM^({|~&G>XU71Aeh z7ucM>UJ*3Qg4`1(b!YSh^s4KpO@UgjQWvwaeraIhb9!bb9mw1{+wmv7{nZ;VH#$5i zk16=9x~H9kBQZ8zXex}91)#*ol(v7rnWA#}K3qW1WzDzHXZ?)85ePBi2W&7r!_~)z zP<;t%d2P(jR@<(s7OY;M_She8_)FT_iSK)Nr6vEsbYj7Tb+L@|U(qu9n|Q%?&l1zt z#lFc<$Lm(5Xr(Ny@MH0?D$w3683|jRSaN8MbhmCUK9_7txU_@!z6R{O+c3D zcH|_pm}ntxrsp}nk{exVW1M%8OX9+ZL_`0JLMq15lblFC`Um8Wu3?b2JZyGR+z=1{ zado49!uB>yX=Beuh|4kx_w(Xa;eL1y{{4^S`0>}S@Q2%tJhsiMJI$R1RDIW_OW&PX z?%i)Twdl4`Dr_KC-O6MOJ!>Wb6m#=kZDFnC-gu@8p>^~QjBO8T?liywK?L*G`(tZ1 zCzVEbfyD{gn(U5=jheKA+!FXn!auV4gRuPFP5XDVIw22%i9?7W$exU1-Vm}b zwwwrL*bE&Ck51o6l3wiZznEm&JH#LJaRT=`U`S?J3_seS+g{2!bF(NUcxVO6R}EUbsV@*a6AgInL2S)!MWHRdFHl(uAOP1 zRU()ro4N}TwCBtc{6?BuTJ4$BPbEk|uz({f`a1lX)z~wnO>+Ku&11vOU45`glx_4W z4>Ywd@58&mudrVCRQMH$p3++R3bdDVqQK8DZ~g${p@X)upBK5FTy3!32YOz;>od}~ zCcyGa=s1G<*uYEUdwjTBb+44N!r2FaTqI14_vQ>c>w#>Ni#fi)R8mPXPSO)_xSraF^7D(pSeZwv%`L%v5lXQf z=jUPMFFq;t2Zj!DBF2}){R_aRi>;}x-Y$M)FWgim$9BzI+r8bWj|jJNgw#lQti8M$ zB2>t%!Mo8$`{OK)>}I={UD-Qu^AL2L2{cMHj*{IjH1uxfGS#6a`O%W)_r18nge-o` zR3nM7E@`-2bW{qo!aS!jlS^Z%412)0irgzx`o&$BBCTjgws2+L6f5jDX1%Ri1a z75q`$PFw5KyIP^K_J=nEaF{Ov%fxh)!^E2rP}sc1>321xj=XEFOS4&G9wjk;HKO7`D4s%o-aPL6k{q-5JPSoXbUl_cL8i z3BUW6)m#*M`$}H@{by$NOakLe037`Oknf_{$P~@`^y1cTHnzq=#vFL3p5V(xzmhfy zr-(l6HA=nfsyN-d3XFRS?xC0bBJZXy8jBPdeK@eYGJJd%4`p4it*!3Nn5TiEHDs}I z&Ejagd(Qm0xbsH4qc$%hjh&$L@CCI+{q?Q6<5V?!>ax$odcB0mP9G z(H(v?O0q#myH-RC4$IWY$37F}Rw-PH7XwOsxyaySBdGgxq`$hs3b8(8&@m9Bt3rnb zBHifV>*G|llxTmeVn~;QN-oaZsP>`6CbW+Sdy7tB1&iyWn&<~)@#lCJa{`Gs%PpJk zD0EwaB1ovGc4x>a>Z`PuU*$w-pNjvmbaAn?pF&dTF7fmnC5WRW0{y_DCI8<6>gwtS zDjE-Afo3wWDKfpdIBr>M8!mzh?bUCU+b1WV?Ib-0)$k)Ceb1Iz$YUU zV{ukY@fPFO3=i>6CsC)m6!2`kL+t+ce8}jD* zB2=X3=&*a4nZK^QEZ!Ocz$QrPuXGE^T>}ug3bD(6PI2O4!zG?5OGfqKd`%h0Q{!$?P}12LQ7Dx8vA!d!5@o0rrntYbe|pSr!+I?BDGMQQe)#AocfR4; z7q{triOC^bs{|cweEOqPp|F?mALJDMRHm;mZF|end$q86Nu`q?WEL!Lk#O(}kkZ;E z{wO$lph!rBb_ie%o>dah10s^zNjY&w)zpjnr=7>Ieb zi)KpcR{J6f?_(7v6$mT2R!34ZnRxY6hBVBi2%;j+PkcsPk3$+8)}GQjL|1>vNa%UD zaQ`S{wKg_76dSIp=JW$Gwc8Inrnu+)fG-}4^_%x+HTQ{nkAST5xLCnfi!8BMb`qG! z9Z_OVMOED7F1{G!hnUg1zcSEHcRp3LI*xC;rTKmw90?qVbAJyb$O=7hKJ zY{4NUTyyo}X5B6KI8!}o#E_dg->1I~F9q|2Y%gN$^OH=M3|EoN4vQ!b0PT_a0j*i~ z#*u4N_JiSpIBReP4B^1*%z9zbyu1)qrx(XrMea6-l1E66uXbpNg6=G>H5N{&FgO@v zT8rc)gw5^31Z?I!D2#RJ)}!!j7`VFf3k;YuJiJQH9XXB^laQuZ{>sZ}(G1$vDFH%` z9l#deKkQaN>&8SRkAgbr8f2(68cP)u_nBsZ4YJJNWSBD9Zc#Pq09+V8GS2H$vsr=b z`I2^tj;vgUm!%ALuAZs`oj8+jdM1^V_g4+I-~)?|BOQ(E?R9nTeiJ8?SRPUO@Z$h3 z=v>cj_?F}NSon@^DUna@Xhr`YP3QcU=lk&ST&q^CmTlW^)w134vTZNh#p`&-%RFo&4s=$8|!u@{ct)J)fX*4=rTf zZXpYJaBlicPCvTQo#kZt4RrNHx@y)|`Nzae8?)KrC!{#GhBw?P&7$A8E&K=rVD# zf#3bN$BzVLYLN5n|EstC51XgceBb;nUrIH0Y=YE&x*lVewF`3CA&yMz=3E;yoBx=1i*t^YA5a8tGw0LGQ zXr+=G7gr@}k6lNXRXapPMaK6xOO;k7HZL3?M9kclR8SL>HXq3^UL zrXfyo{5S{v@p$%7lmx`Lb#o? z^{iD^cTyf(Eji64`qTe;hB_d%*J^!BSD7^vjx*^G)$6HatlXG&-nuA9L^SZUC&l`{}zi)IkxLi&{+~S3++j7f1SX@Hc<;taG4- z4OXb?Th~{=+*&oi>_&;kgUtc>MoWqz6brhW;{mT=H?`)zknNDm4Cb_0t^&lOdI2(> zMo_<%Y^)4%)A0CL>)qF;A{l4OPIxz426$IHstZ@apsQ}53Wir#MP!&pcA{sJXXQij zAF-pKVdx}L)4dpJ|DY@}>aH(TBF`lAmhFU%jG2P-V!*XV*)FzeWb*hkNv$=1EA{>& zQ^ZFogGnFCt5_M(wfcXjmZ&H`1bS)CZvkPjjep#;y4g#KZChZ#tRjV#X>U40dy?@ zp>a~71WW3n>J}6gwX%u`4caJk{)_O%R1wHl`xn!BP*rAF{0!*>T|$&!(_=LB{`F)X zb~!ciKl}VBJP^^mAo`c#7azb|%HC(1aa7>l{p{W=h=IyvqHW>eJ8nmS^a&&R1w_7O zC7N*?D!ep6@^5Wz5vs>|qeBqBtIp$TeO5<`zsZUuzcZb@Y|b{Zes4skVHck{1%lKQ zuxjf#mj{N`pCUiv9-B;#mpUC~V3){CHiI6;+wAEz8B&TS1kI>hzKE$4s@xQW#;i8& z%3`-zN4#1XloWp6wPJjdprqk1qa;`i3>%ghK<&jGq8i5} zfQQ(O+3cF&cB70LFSljeYYMqs?8gK%&?>;)*_|#SgksMM7JNkO=3N%xPwh}A6&0l8 zl8a6XmA)y^^CHL1?vUSO&}B5RS|X0oqoXERQF(NgO`uVSzaBl4Exrv^>|E4^Qib@0 zxE9Ys_IHzL2(g~gR$L27*!K0?!$#Oct%q;ywcWfH6K_onXm3_Mi|b^unzolKA0lVMw`dn=;kZH`2SVhcJS4Jmw)n@HdsLL4IiraFK|} z>WrEK^m>?=B4H4BqsNd0ITz0}a(5IY`jQ9*+nKuvZsU?k79iAm;oR&fx~8bi&I2@KZnQ%%|K`h)0dV_yq}r6k_RiO6u)^SetroR-W`lFKgs)&^IYjpXf$vWAPmB z9ey{Za{l%(a3xQk(xj~O&EprOQ)aSn zz;W&tY-{Sds)w*C@3jXvGJfbVdFgqKTjw5aV!kgu{fV=%8`cbUJhc&mu=PuRS-Fn| zgA)aXYG)kT?Ezjq*pe99v=^eE)4^fBh|O8MXvxZiuBOuJ`*^7OxtEQknEA$}}s}2BP-b z?DbmjoaU9@lm_{}&8dIe%NXNo=0lr5`fSCANjMq)^|D8dGAxN~;1wv_76SI!WGv$H z=YVQ$Ue8MqQS$S`0WP@7Ir8}?SRV}fDmLV2H>U1R@?hJSt&0Br2Nz;AbZ)%K=4_Sq z)1K4f2)y(*+|6!m8ue(JG4`mNngD0w?mnW{fIrAs+kxQ{FrfJ?$x$RA$kCKN$3_;9 za?KgEsNG{9<*~{q5@Z!mNW*R{nJ*mwT5Y$idX7=#HlTW-V*c3PsD=$iKHR7s#`06%*2B3kaxyhsu9tbD&8qV9CxbmJ-sJ1U;*v|l z?dt>ICaE$m!kxV~A~&R+_sGF1zgtSibn&osaZIw&P!7q}{&-3ssulgkQNww^>g1fg zztqpypdBAqfk2Kiw{fwNwnk7>%&S?Zc!+J9tfCN0GL1SGT%R9--1GX|i@_J%e)20*&Y^>mf-5nBsCWJF9K>X7FViR)1m~v@XOq!oy5m+aVc!(_M|Wn2QWFlu7z*6_Cl!>UZG_D^iun&!teiC7hk&wX|D4Mjom&J)pmQ zX$x{ijYvjK34=WD8&~9k^#qig)uMpEg1dU?+24L$M(~lM4{vpkO7d9Hsi7(-akZ8; z-@m%Q`|**3A|YkB+H)+*BEXnyj$|HSek`zR?2%znA?AN~1M|pk0~4bTNOVNIh6O%A zGbD%A5NWpB@6hQuYI`@p?KmgWprmq`6h_+x9)+k#ff}w9y9=SIMx^!4?@M9hSgMrh z_r!kMgo5iLHoe@m=FL%YJ?AUonWyhX99Ancg`ep9M3$uTv6%GellH0-+9bWe3}v#e zy-g_}_R$xVMUmyQxF&GdjfqYFEnrNI{*;H~o4+U~oc${J?<**bD`kz=k3MpN%05oG ztk4!(`=z*E*1CX;pY`&{Ty~oC5naGxbUFzi!DGO^4ZSq0}PX zOKoHq+r_6w5YIvOdsOX;Y{Huq@L{BLLX-oT`mV~eFjDaR3@clGB^kId`mT;lKj0L@ zPzVuMPB6mUePre3U3c>c7N*`ox_0b{rOAZ1avsG0bub9w=)N{gMGlHXr|Ut3e5HOm zbbKViGcR6rbFTi$My{{(2e=Z&#RXc0LLUFjW_A%h-@EL&t22q>BSRRYb`hgxLRv*p zr`~#|fe`d?Lk>(WwX}M}C?*=bW>G9`V4wK}TW!&!Ypw~4@zn#20E-~GNV5$uJnnW& zj57_bk){MLEoaf7T9NqaI;TBZnV19kEDz$R^qQkb>h!xoii5*?>y;^wUVGEwq^mi1 z{kAd?7)`YlaG5E_ASbn123}GH38G~&wi(R3E_IcR2F|GzQ+wN;Q5jEZsX+R1l?`ui zbGc+e=Z5+gxXxnYPksrlqn*IaB~m)go(a;>9SCX^0?44Tvext(b#SkEBnjMZVP7uy z67X6Y@(+pk;M65+WslOZdm8xz~b%xJ)R-5$f{&XU(C4URs;u+ z1J%rC#s;xK+HpBbPx!l^iT&C?s~^f|~w0dm7dGnUoHc*ZLgs2Xlh-+ned`)9VxnUzN+W-$p! z+pgg89Ohvc)ynR%+95j@7_IU*#!tgFqz$cQF{$oX%(|6iav}=+4Xxh=TxRNbNNJJ= zPLS?{){?(-fdLdS_M8k5Pui}gJeW@5VD~-(=~!}U9(?TrNg$71)Gf!F3(4Vy5EMEE zY~t=uhV&Q3`vo99x1Dz`XYh8LIP2qHV|BaFh}dSgq`eZqzYb+W0##U4T8-1+6w_kk z*-We1{*j_MusKd}&hzloqTs?P?Hd;-kG%Dv3cC=s@WjW55Ze0%2yYPLJqyp`KugFC zNX#a55lu_`<<^K&P3Y!{ceL~}`EJH^Q^4-fi^cIut3RHl(%Uzywz?c24u&!A4D9x5 z<;f+-u)W@B*?7pEmNze`KjrX=raLt7LdIE7bqFih-U$36g>RZ|J^a%8$fV|@rWP|y zj8CL%{q{AZR5;Zpamw#D`^O(mdMz;yrt4hE^yH;o~yUIG1|);`Ted9(ct`|+Pr zXQ(_I5T3wzXm%h!-E@h#b2rz+WIAh~ptA>qq`ZjkyIFcUr(@4+_F?{l;GPeF^%lzv zF>xyh{PPPpsiSj*7aI1z-MYCvS2P=`fp1FGpp}LNq{RxlN{%cnMaOzE6g9J>bRb;b z4?k@(5o)7pW<55_gfDjm&|Y>|?33Kp8x2h7J6d<)&#IcY5PK{mZRL^+qWiuE2h&D_0rTAk4#`y!hCV*cHT4s2M7<%e$Kqk3Bwzal-%L(Ql@a9(T7KWx1|o|ti^$){7`L|)5* zI%DV59N>L(8Q-(CQQHD4UaAK39E ztP9nHHQaSZO}U1rg7tB@ov$NSNXz#wlum1$0KsCI()`(?wG&XZ^ldAaJudoid0FhW zaHd}ngVmH*$>RuBodjuhq~#V;)#D%irH7w2(S)K)M0zMuy2dcFy5V~?8#jSt|EIRX zu+R9_KvDvSv0(iA!yaP54H=qT9_Ibze?hcgE;tMHZ0RRE8WqOXcA;%&6V{d>tHte>Xn~svH(Z782=n6II7|tN~GTAUJ z;ncW5uyHpQC-PrrqB#u-Um-vZ(b~nqQDL--PNt9q8~$BsaaL?<3Mqu%gzpwtPE|IL zOG1jXA@{u@AnG1HA+qWwK2prUu3DjC_7PN6Pomp^U6D*?2McS3ybEn6mQN^@|8CYKex4S`8z`2NQOV>e zpRW%HH`=Pc|2&QL+D(>49?|XIa|fX6{89ZR8elOqy4!>L?x%nZCmwWp7Mf0ww|DSy z-Om?NbAnuI_Z?u$ECn$Hj!KzgK`9M+jwr*NEw)mj5_CbYJI<@R%O>I%dp(H-S8BX;gMlE{dLB{0GutsO!esFcm z89+327@tUxB%&=5Cg;uT`bQY>x2Dg^C>x@TThpync}i^c7+^)XMSnK(kO*l7>^s;^ z(wXY71BK60oZ^zbnC;&3E29u)Z{=T)cgKV^#we+;9Z_<>Me65N<^?OX{_C&a#nzY*?maA^a~+EWjZPtu>NS_IMgWS={dD?Es74j z`Iq{10j?hF8Og7C>*YaZ^ss%yL1OOA7lol^)T(P>4?>*aFQsk2!DOeN`W`^u;G4Icj z6;})wQlz9uWqC@{#@`f@)V!Z<#>P?yGOkQ7LByu}vAWWVM^s_;xJ7d6tvdHm(_3rZ zR72hFw~Y5zMnUV?H{(oEpRm7%T&zFglvp4iczqL*>!}u(3>`}%o=UvQSe!i8tY4z| zLIOp3y8SzKbr73rvT1K;WZ}YjcT8G#=PmY_EO4bUe1BPAFJj0!Z1evSO6nZq&o~-2u_zl^lGr1{(N8CbtICTf=cYZo3m*$}7VC z&&`^R9G;rEB7XeW5Zix+C)33EIFpLJUNv1!^N@*?acM1{aGcD871B5P&Z!c-yBxpcB?aNzvF;W8E^boPt-_jYMdGZ^>UER)`Cm zXvApQ{DRiY6-m%u6NMOn$I;K^1f55QZ}KVWZwEReJA~6z+swi}{0q}oIi5Ooy5kir zl$ZJ8ZExy~%6Wk5@nhrldOif)rHWx0kE2;(V-YDzxm#2*ok=bGE#K?%H2AC2qf)rt zDa~_XAI+~=XRtIiuUW*ZNdTcaKXa6xosB%CT3F}nqxJsZxP4&Q`D^u*$HDI~nD58U z+p*gX(fHhxIe&xu5LX8KY=AarNeq$T(x>l5qyZJ`#R|z~m)$7zxkCOGJatFHH-`tW-*_NV@0J(_m4yD40e-nt?sn4aIyO zI20

      diff --git a/v0.20.3/internals/index.html b/v0.20.3/internals/index.html new file mode 100644 index 000000000..66c95f90c --- /dev/null +++ b/v0.20.3/internals/index.html @@ -0,0 +1,66 @@ + +Internals · MLJ

      Internals

      The machine interface, simplified

      The following is a simplified description of the Machine interface. It predates the introduction of an optional data front-end for models (see Implementing a data front-end). See also the Glossary

      The Machine type

      mutable struct Machine{M<Model}
      +
      +    model::M
      +    fitresult
      +    cache
      +    args::Tuple    # e.g., (X, y) for supervised models
      +    report
      +    previous_rows # remember the last rows used
      +
      +    function Machine{M}(model::M, args...) where M<:Model
      +        machine = new{M}(model)
      +        machine.args = args
      +        machine.report = Dict{Symbol,Any}()
      +        return machine
      +    end
      +
      +end

      Constructor

      machine(model::M, Xtable, y) = Machine{M}(model, Xtable, y)

      fit! and predict/transform

      function fit!(mach::Machine; rows=nothing, force=false, verbosity=1)
      +
      +    warning = clean!(mach.model)
      +    isempty(warning) || verbosity < 0 || @warn warning
      +
      +    if rows === nothing
      +        rows = (:)
      +    end
      +
      +    rows_have_changed  = (!isdefined(mach, :previous_rows) ||
      +	    rows != mach.previous_rows)
      +
      +    args = [MLJ.selectrows(arg, rows) for arg in mach.args]
      +
      +    if !isdefined(mach, :fitresult) || rows_have_changed || force
      +        mach.fitresult, mach.cache, report =
      +            fit(mach.model, verbosity, args...)
      +    else # call `update`:
      +        mach.fitresult, mach.cache, report =
      +            update(mach.model, verbosity, mach.fitresult, mach.cache, args...)
      +    end
      +
      +    if rows_have_changed
      +        mach.previous_rows = deepcopy(rows)
      +    end
      +
      +    if report !== nothing
      +        merge!(mach.report, report)
      +    end
      +
      +    return mach
      +
      +end
      +
      +function predict(machine::Machine{<:Supervised}, Xnew)
      +    if isdefined(machine, :fitresult)
      +        return predict(machine.model, machine.fitresult, Xnew))
      +    else
      +        throw(error("$machine is not trained and so cannot predict."))
      +    end
      +end
      +
      +function transform(machine::Machine{<:Unsupervised}, Xnew)
      +    if isdefined(machine, :fitresult)
      +        return transform(machine.model, machine.fitresult, Xnew))
      +    else
      +        throw(error("$machine is not trained and so cannot transform."))
      +    end
      +end
      diff --git a/v0.20.3/learning_curves/index.html b/v0.20.3/learning_curves/index.html new file mode 100644 index 000000000..140e374b3 --- /dev/null +++ b/v0.20.3/learning_curves/index.html @@ -0,0 +1,65 @@ + +Learning Curves · MLJ

      Learning Curves

      A learning curve in MLJ is a plot of some performance estimate, as a function of some model hyperparameter. This can be useful when tuning a single model hyperparameter, or when deciding how many iterations are required for some iterative model. The learning_curve method does not actually generate a plot but generates the data needed to do so.

      To generate learning curves you can bind data to a model by instantiating a machine. You can choose to supply all available data, as performance estimates are computed using a resampling strategy, defaulting to Holdout(fraction_train=0.7).

      using MLJ
      +X, y = @load_boston;
      +
      +atom = (@load RidgeRegressor pkg=MLJLinearModels)()
      +ensemble = EnsembleModel(model=atom, n=1000)
      +mach = machine(ensemble, X, y)
      +
      +r_lambda = range(ensemble, :(model.lambda), lower=1e-1, upper=100, scale=:log10)
      +curve = MLJ.learning_curve(mach;
      +                           range=r_lambda,
      +                           resampling=CV(nfolds=3),
      +                           measure=l1)
      (parameter_name = "model.lambda",
      + parameter_scale = :log10,
      + parameter_values = [0.1, 0.12689610031679222, 0.16102620275609392, 0.20433597178569418, 0.25929437974046676, 0.3290344562312668, 0.41753189365604015, 0.5298316906283709, 0.6723357536499337, 0.8531678524172809  …  11.7210229753348, 14.87352107293511, 18.873918221350976, 23.95026619987486, 30.39195382313198, 38.56620421163472, 48.93900918477494, 62.10169418915616, 78.80462815669912, 100.0],
      + measurements = [4.632539244758051, 4.53955143279312, 4.40415226222209, 4.343004549542559, 4.252379374057707, 4.201566764413649, 4.151096973392104, 4.096263924843325, 4.0752008689055685, 4.068918655403295  …  4.80725029909915, 4.925881036403566, 5.030270192106893, 5.138806222411213, 5.270342009100555, 5.384189361403437, 5.507957417312457, 5.604219538269433, 5.705475501835234, 5.796368065858009],)
      using Plots
      +plot(curve.parameter_values,
      +     curve.measurements,
      +     xlab=curve.parameter_name,
      +     xscale=curve.parameter_scale,
      +     ylab = "CV estimate of RMS error")

      If the range hyperparameter is the number of iterations in some iterative model, learning_curve will not restart the training from scratch for each new value, unless a non-holdout resampling strategy is specified (and provided the model implements an appropriate update method). To obtain multiple curves (that are distinct) you will need to pass the name of the model random number generator, rng_name, and specify the random number generators to be used using rngs=... (an integer automatically generates the number specified):

      atom.lambda= 7.3
      +r_n = range(ensemble, :n, lower=1, upper=50)
      +curves = MLJ.learning_curve(mach;
      +                            range=r_n,
      +                            measure=l1,
      +                            verbosity=0,
      +                            rng_name=:rng,
      +                            rngs=4)
      (parameter_name = "n",
      + parameter_scale = :linear,
      + parameter_values = [1, 3, 4, 6, 8, 9, 11, 13, 15, 16  …  35, 36, 38, 40, 42, 43, 45, 47, 48, 50],
      + measurements = [4.583413316633618 4.561159023696713 5.077586001728131 4.189046435910582; 4.604668816427499 4.691712972691241 5.0899469323902 4.377049612854087; … ; 4.609794532454427 4.717324447783916 5.09250090588918 4.424817777696763; 4.610136246856221 4.719120279472783 5.092685930714222 4.428007555919485],)
      plot(curves.parameter_values,
      +     curves.measurements,
      +     xlab=curves.parameter_name,
      +     ylab="Holdout estimate of RMS error")

      API reference

      MLJTuning.learning_curveFunction
      curve = learning_curve(mach; resolution=30,
      +                             resampling=Holdout(),
      +                             repeats=1,
      +                             measure=default_measure(machine.model),
      +                             rows=nothing,
      +                             weights=nothing,
      +                             operation=nothing,
      +                             range=nothing,
      +                             acceleration=default_resource(),
      +                             acceleration_grid=CPU1(),
      +                             rngs=nothing,
      +                             rng_name=nothing)

      Given a supervised machine mach, returns a named tuple of objects suitable for generating a plot of performance estimates, as a function of the single hyperparameter specified in range. The tuple curve has the following keys: :parameter_name, :parameter_scale, :parameter_values, :measurements.

      To generate multiple curves for a model with a random number generator (RNG) as a hyperparameter, specify the name, rng_name, of the (possibly nested) RNG field, and a vector rngs of RNG's, one for each curve. Alternatively, set rngs to the number of curves desired, in which case RNG's are automatically generated. The individual curve computations can be distributed across multiple processes using acceleration=CPUProcesses() or acceleration=CPUThreads(). See the second example below for a demonstration.

      X, y = @load_boston;
      +atom = @load RidgeRegressor pkg=MultivariateStats
      +ensemble = EnsembleModel(atom=atom, n=1000)
      +mach = machine(ensemble, X, y)
      +r_lambda = range(ensemble, :(atom.lambda), lower=10, upper=500, scale=:log10)
      +curve = learning_curve(mach; range=r_lambda, resampling=CV(), measure=mav)
      +using Plots
      +plot(curve.parameter_values,
      +     curve.measurements,
      +     xlab=curve.parameter_name,
      +     xscale=curve.parameter_scale,
      +     ylab = "CV estimate of RMS error")

      If using a Holdout() resampling strategy (with no shuffling) and if the specified hyperparameter is the number of iterations in some iterative model (and that model has an appropriately overloaded MLJModelInterface.update method) then training is not restarted from scratch for each increment of the parameter, ie the model is trained progressively.

      atom.lambda=200
      +r_n = range(ensemble, :n, lower=1, upper=250)
      +curves = learning_curve(mach; range=r_n, verbosity=0, rng_name=:rng, rngs=3)
      +plot!(curves.parameter_values,
      +     curves.measurements,
      +     xlab=curves.parameter_name,
      +     ylab="Holdout estimate of RMS error")
      +
      +
      learning_curve(model::Supervised, X, y; kwargs...)
      +learning_curve(model::Supervised, X, y, w; kwargs...)

      Plot a learning curve (or curves) directly, without first constructing a machine.

      Summary of key-word options

      • resolution - number of points generated from range (number model evaluations); default is 30

      • acceleration - parallelization option for passing to evaluate!; an instance of CPU1, CPUProcesses or CPUThreads from the ComputationalResources.jl; default is default_resource()

      • acceleration_grid - parallelization option for distributing each performancde evaluation

      • rngs - for specifying random number generator(s) to be passed to the model (see above)

      • rng_name - name of the model hyper-parameter representing a random number generator (see above); possibly nested

      Other key-word options are documented at TunedModel.

      source
      diff --git a/v0.20.3/learning_mlj/index.html b/v0.20.3/learning_mlj/index.html new file mode 100644 index 000000000..38dbbc928 --- /dev/null +++ b/v0.20.3/learning_mlj/index.html @@ -0,0 +1,2 @@ + +Learning MLJ · MLJ

      Learning MLJ

      MLJ Cheatsheet

      See also Getting help and reporting problems.

      The present document, although littered with examples, is primarily intended as a complete reference.

      Where to start?

      Completely new to Julia?

      Julia's learning resources page | Learn X in Y minutes | HelloJulia

      New to data science?

      Julia Data Science

      New to machine learning?

      Introduction to Statistical Learning with Julia versions of the R labs here

      Know some ML and just want MLJ basics?

      Getting Started | Common MLJ Workflows

      An ML practitioner transitioning from another platform?

      MLJ for Data Scientists in Two Hours | MLJTutorial

      Other resources

      diff --git a/v0.20.3/learning_networks/index.html b/v0.20.3/learning_networks/index.html new file mode 100644 index 000000000..a0ef20aee --- /dev/null +++ b/v0.20.3/learning_networks/index.html @@ -0,0 +1,417 @@ + +Learning Networks · MLJ

      Learning Networks

      Below is a practical guide to the MLJ implementation of learning networks, which have been described more abstractly in the article:

      Anthony D. Blaom and Sebastian J. Voller (2020): Flexible model composition in machine learning and its implementation in MLJ. Preprint, arXiv:2012.15505.

      Learning networks, an advanced but powerful MLJ feature, are "blueprints" for combining models in flexible ways, beyond ordinary linear pipelines and simple model ensembles. They are simple transformations of your existing workflows which can be "exported" to define new, re-usable composite model types (models which typically have other models as hyperparameters).

      Pipeline models (see Pipeline), and model stacks (see Stack) are both implemented internally as exported learning networks.

      Note

      While learning networks can be used for complex machine learning workflows, their main purpose is for defining new stand-alone model types, which behave just like any other model type: Instances can be evaluated, tuned, inserted into pipelines, etc. In serious applications, users are encouraged to export their learning networks, as explained under Exporting a learning network as a new model type below, after testing the network, using a small training dataset.

      Learning networks by example

      Learning networks are best explained by way of example.

      Lazy computation

      The core idea of a learning network is delayed or lazy computation. Instead of

      X = 4
      +Y = 3
      +Z = 2*X
      +W = Y + Z
      +W
      11

      we can do

      using MLJ
      +
      +X = source(4)
      +Y = source(3)
      +Z = 2*X
      +W = Y + Z
      +W()
      11

      In the first computation X, Y, Z and W are all bound to ordinary data. In the second, they are bound to objects called nodes. The special nodes X and Y constitute "entry points" for data, and are called source nodes. As the terminology suggests, we can imagine these objects as part of a "network" (a directed acyclic graph) which can aid conceptualization (but is less useful in more complicated examples):

      The origin of a node

      The source nodes on which a given node depends are called the origins of the node:

      os = origins(W)
      2-element Vector{Source}:
      + Source @215 ⏎ `Count`
      + Source @822 ⏎ `Count`
      X in os
      true

      Re-using a network

      The advantage of lazy evaluation is that we can change data at a source node to repeat the calculation with new data. One way to do this (discouraged in practice) is to use rebind!:

      Z()
      8
      rebind!(X, 6) # demonstration only!
      +Z()
      12

      However, if a node has a unique origin, then one instead calls the node on the new data one would like to rebind to that origin:

      origins(Z)
      1-element Vector{Source}:
      + Source @822 ⏎ `Count`
      Z(6)
      12
      Z(4)
      8

      This has the advantage that you don't need to locate the origin and rebind data directly, and the unique-origin restriction turns out to be sufficient for the applications to learning we have in mind.

      Overloading functions for use on nodes

      Several built-in function like * and + above are overloaded in MLJBase to work on nodes, as illustrated above. Others that work out-of-the-box include: MLJBase.matrix, MLJBase.table, vcat, hcat, mean, median, mode, first, last, as well as broadcasted versions of log, exp, mean, mode and median. A function like sqrt is not overloaded, so that Q = sqrt(Z) will throw an error. Instead, we do

      Q = node(z->sqrt(z), Z)
      +Z()
      12
      Q()
      3.4641016151377544

      You can learn more about the node function under More on defining new nodes

      A network that learns

      To incorporate learning in a network of nodes MLJ:

      • Allows binding of machines to nodes instead of data

      • Generates "operation" nodes when calling an operation like predict or transform on a machine and node input data. Such nodes point to both a machine (storing learned parameters) and the node from which to fetch data for applying the operation (which, unlike the nodes seen so far, depend on learned parameters to generate output).

      For an example of a learning network that actually learns, we first synthesize some training data X, y, and production data Xnew:

      using MLJ
      +X, y = make_blobs(cluster_std=10.0, rng=123)  # `X` is a table, `y` a vector
      +Xnew, _ = make_blobs(3) # `Xnew` is a table with the same number of columns

      We choose a model do some dimension reduction, and another to perform classification:

      pca = (@load PCA pkg=MultivariateStats verbosity=0)()
      +tree = (@load DecisionTreeClassifier pkg=DecisionTree verbosity=0)()

      To make our learning lazy, we wrap the training data as source nodes:

      Xs = source(X)
      +ys = source(y)

      And, finally, proceed as we would in an ordinary MLJ workflow, with the exception that there is no need to fit! our machines, as training will be carried out lazily later:

      mach1 = machine(pca, Xs)
      +x = transform(mach1, Xs) # defines a new node because `Xs` is a node
      +
      +mach2 = machine(tree, x, ys)
      +yhat = predict(mach2, x) # defines a new node because `x` is a node
      Node @749 → DecisionTreeClassifier(…)
      +  args:
      +    1:	Node @369 → PCA(…)
      +  formula:
      +    predict(
      +      machine(DecisionTreeClassifier(max_depth = -1, …), …), 
      +      transform(
      +        machine(PCA(maxoutdim = 0, …), …), 
      +        Source @356))

      Note that mach1 and mach2 are not themselves nodes. They point to the nodes they need to call to get training data and they are in turn pointed to by other nodes. In fact, an interesting implementation detail is that an "ordinary" machine is not actually bound directly to data, but bound to data wrapped in source nodes.

      machine(pca, Xnew).args[1] # `Xnew` is ordinary data
      Source @727 ⏎ `Table{AbstractVector{Continuous}}`

      Before calling a node, we need to fit! the node, to trigger training of all the machines on which it depends:

      julia> fit!(yhat)   # can include same keyword options for `fit!(::Machine, ...)`[ Info: Training machine(PCA(maxoutdim = 0, …), …).
      +[ Info: Training machine(DecisionTreeClassifier(max_depth = -1, …), …).
      +Node @749 → DecisionTreeClassifier(…)
      +  args:
      +    1:	Node @369 → PCA(…)
      +  formula:
      +    predict(
      +      machine(DecisionTreeClassifier(max_depth = -1, …), …),
      +      transform(
      +        machine(PCA(maxoutdim = 0, …), …),
      +        Source @356))
      julia> yhat()[1:2] # or `yhat(rows=2)`2-element UnivariateFiniteVector{Multiclass{3}, Int64, UInt32, Float64}: + UnivariateFinite{Multiclass{3}}(1=>1.0, 2=>0.0, 3=>0.0) + UnivariateFinite{Multiclass{3}}(1=>1.0, 2=>0.0, 3=>0.0)

      This last represents the prediction on the training data, because that's what resides at our source nodes. However, yhat has the unique origin X (because "training edges" in the complete associated directed graph are excluded for this purpose). We can therefore call yhat on our production data to get the corresponding predictions:

      yhat(Xnew)
      3-element UnivariateFiniteVector{Multiclass{3}, Int64, UInt32, Float64}:
      + UnivariateFinite{Multiclass{3}}(1=>0.0, 2=>0.0, 3=>1.0)
      + UnivariateFinite{Multiclass{3}}(1=>0.0, 2=>0.0, 3=>1.0)
      + UnivariateFinite{Multiclass{3}}(1=>1.0, 2=>0.0, 3=>0.0)

      Training is smart, in the sense that mutating a hyper-parameter of some component model does not force retraining of upstream machines:

      julia> tree.max_depth = 11
      julia> fit!(yhat)[ Info: Not retraining machine(PCA(maxoutdim = 0, …), …). Use `force=true` to force. +[ Info: Updating machine(DecisionTreeClassifier(max_depth = 1, …), …). +Node @749 → DecisionTreeClassifier(…) + args: + 1: Node @369 → PCA(…) + formula: + predict( + machine(DecisionTreeClassifier(max_depth = 1, …), …), + transform( + machine(PCA(maxoutdim = 0, …), …), + Source @356))
      julia> yhat(Xnew)3-element UnivariateFiniteVector{Multiclass{3}, Int64, UInt32, Float64}: + UnivariateFinite{Multiclass{3}}(1=>0.357, 2=>0.4, 3=>0.243) + UnivariateFinite{Multiclass{3}}(1=>0.357, 2=>0.4, 3=>0.243) + UnivariateFinite{Multiclass{3}}(1=>0.357, 2=>0.4, 3=>0.243)

      Multithreaded training

      A more complicated learning network may contain machines that can be trained in parallel. In that case, a call like the following may speed up training:

      tree.max_depth=2
      +fit!(yhat, acceleration=CPUThreads())
      [ Info: Not retraining machine(PCA(maxoutdim = 0, …), …). Use `force=true` to force.
      +[ Info: Updating machine(DecisionTreeClassifier(max_depth = 2, …), …).

      Currently, only CPU1() (default) and CPUThreads() are supported here.

      Exporting a learning network as a new model type

      Once a learning network has been tested, typically on some small dummy data set, it is ready to be exported as a new, stand-alone, re-usable model type (unattached to any data). We demonstrate the process by way of examples of increasing complexity:

      Example A - Mini-pipeline

      First we export the simple learning network defined above. (This is for illustration purposes; in practice using the Pipeline syntax model1 |> model2 syntax is more convenient.)

      Step 1 - Define a new model struct

      We need a type with two fields, one for the preprocessor (pca in the network above) and one for the classifier (tree in the network above).

      The DecisionTreeClassifier type of tree has supertype Probabilistic, because it makes probabilistic predictions, and we assume any other classifier we want to swap out will be the same.

      supertype(typeof(tree))
      Probabilistic

      In particular, our composite model will also need Probabilistic as supertype. In fact, we must give it the intermediate supertype ProbabilisticNetworkComposite <: Probabilistic, so that we additionally flag it as an exported learning network model type:

      mutable struct CompositeA <: ProbabilisticNetworkComposite
      +    preprocessor
      +    classifier
      +end

      The common alternatives are DeterministicNetworkComposite and UnsupervisedNetworkComposite. But all options can be viewed as follows:

      using MLJBase
      +NetworkComposite
      NetworkComposite (alias for Union{AnnotatorNetworkComposite, DeterministicNetworkComposite, DeterministicSupervisedDetectorNetworkComposite, DeterministicUnsupervisedDetectorNetworkComposite, IntervalNetworkComposite, JointProbabilisticNetworkComposite, ProbabilisticNetworkComposite, ProbabilisticSetNetworkComposite, ProbabilisticSupervisedDetectorNetworkComposite, ProbabilisticUnsupervisedDetectorNetworkComposite, StaticNetworkComposite, SupervisedAnnotatorNetworkComposite, SupervisedDetectorNetworkComposite, SupervisedNetworkComposite, UnsupervisedAnnotatorNetworkComposite, UnsupervisedDetectorNetworkComposite, UnsupervisedNetworkComposite})

      We next make our learning network model-generic by substituting each model instance with the corresponding symbol representing a property (field) of the new model struct:

      mach1 = machine(:preprocessor, Xs)   # <---- `pca` swapped out for `:preprocessor`
      +x = transform(mach1, Xs)
      +mach2 = machine(:classifier, x, ys)  # <---- `tree` swapped out for `:classifier`
      +yhat = predict(mach2, x)
      Node @567 → :classifier
      +  args:
      +    1:	Node @016 → :preprocessor
      +  formula:
      +    predict(
      +      machine(:classifier, …), 
      +      transform(
      +        machine(:preprocessor, …), 
      +        Source @356))

      Incidentally, this network can be used as before except we must provide an instance of CompositeA in our fit! calls, to indicate what actual models the symbols are being substituted with:

      composite_a = CompositeA(pca, ConstantClassifier())
      +fit!(yhat, composite=composite_a)
      +yhat(Xnew)
      3-element UnivariateFiniteVector{Multiclass{3}, Int64, UInt32, Float64}:
      + UnivariateFinite{Multiclass{3}}(1=>0.33, 2=>0.33, 3=>0.34)
      + UnivariateFinite{Multiclass{3}}(1=>0.33, 2=>0.33, 3=>0.34)
      + UnivariateFinite{Multiclass{3}}(1=>0.33, 2=>0.33, 3=>0.34)

      In this case :preprocessor is being substituted by pca, and :classifier by ConstantClassifier() for training.

      Step 2 - Wrap the learning network in prefit

      Literally copy and paste the learning network above into the definition of a method called prefit, as shown below (if you have implemented your own MLJ model, you will notice this has the same signature as MLJModelInterface.fit):

      import MLJBase
      +function MLJBase.prefit(composite::CompositeA, verbosity, X, y)
      +
      +        # the learning network from above:
      +        Xs = source(X)
      +        ys = source(y)
      +        mach1 = machine(:preprocessor, Xs)
      +        x = transform(mach1, Xs)
      +        mach2 = machine(:classifier, x, ys)
      +        yhat = predict(mach2, x)
      +
      +        verbosity > 0 && @info "I'm a noisy fellow!"
      +
      +        # return "learning network interface":
      +        return (; predict=yhat)
      +end

      That's it.

      Generally, prefit always returns a learning network interface; see MLJBase.prefit for what this means in general. In this example, the interface dictates that calling predict(mach, Xnew) on a machine mach bound to some instance of CompositeA should internally call yhat(Xnew).

      Here's our new composite model type CompositeA in action, combining standardization with KNN classification:

      using MLJ
      +X, y = @load_iris
      +
      +knn = (@load KNNClassifier pkg=NearestNeighborModels verbosity=0)()
      +composite_a = CompositeA(Standardizer(), knn)
      CompositeA(
      +  preprocessor = Standardizer(
      +        features = Symbol[], 
      +        ignore = false, 
      +        ordered_factor = false, 
      +        count = false), 
      +  classifier = KNNClassifier(
      +        K = 5, 
      +        algorithm = :kdtree, 
      +        metric = Distances.Euclidean(0.0), 
      +        leafsize = 10, 
      +        reorder = true, 
      +        weights = NearestNeighborModels.Uniform()))
      mach = machine(composite_a, X, y) |> fit!
      +predict(mach, X)[1:2]
      2-element UnivariateFiniteVector{Multiclass{3}, String, UInt32, Float64}:
      + UnivariateFinite{Multiclass{3}}(setosa=>1.0, versicolor=>0.0, virginica=>0.0)
      + UnivariateFinite{Multiclass{3}}(setosa=>1.0, versicolor=>0.0, virginica=>0.0)
      report(mach).preprocessor
      (features_fit = [:sepal_length, :petal_width, :petal_length, :sepal_width],)
      fitted_params(mach).classifier
      (tree = NearestNeighbors.KDTree{StaticArraysCore.SVector{4, Float64}, Distances.Euclidean, Float64, StaticArraysCore.SVector{4, Float64}}
      +  Number of points: 150
      +  Dimensions: 4
      +  Metric: Distances.Euclidean(0.0)
      +  Reordered: true,)

      More on replacing models with symbols

      Only the first argument model in some expression machine(model, ...) can be replaced with a symbol. These replacements function as hooks for exposing reports and fitted parameters of component models in the report and fitted parameters of the composite model, but these replacements are not absolutely necessary. For example, instead of the line mach1 = machine(:preprocessor, Xs) in the prefit definition, we can do mach1 = machine(composite.preprocessor, Xs). However, report and fittted_params will not include items for the :preprocessor component model in that case.

      If a component model is not explicitly bound to data in a machine (for example, because it is first wrapped in TunedModel) then there are ways to explicitly expose associated fitted parameters or report items. See Example F below.

      Example B - Multiple operations: transform and inverse transform

      Here's a second mini-pipeline example composing two transformers which both implement inverse transform. We show how to implement an inverse_transform for the composite model too.

      Step 1 - Define a new model struct

      using MLJ
      +import MLJBase
      +
      +mutable struct CompositeB <: DeterministicNetworkComposite
      +    transformer1
      +    transformer2
      +end

      Step 2 - Wrap the learning network in prefit

      function MLJBase.prefit(composite::CompositeB, verbosity, X)
      +    Xs = source(X)
      +
      +    mach1 = machine(:transformer1, Xs)
      +    X1 = transform(mach1, Xs)
      +    mach2 = machine(:transformer2, X1)
      +    X2 = transform(mach2, X1)
      +
      +    W1 = inverse_transform(mach2, Xs)
      +    W2 = inverse_transform(mach1, W1)
      +
      +    # the learning network interface:
      +    return (; transform=X2, inverse_transform=W2)
      +end

      Here's a demonstration:

      X = rand(100)
      +
      +composite_b = CompositeB(UnivariateBoxCoxTransformer(), Standardizer())
      +mach = machine(composite_b, X) |> fit!
      +W =  transform(mach, X)
      +@assert inverse_transform(mach, W) ≈ X

      Example C - Blending predictions and exposing internal network state in reports

      The code below defines a new composite model type CompositeC that predicts by taking the weighted average of two regressors, and additionally exposes, in the model's report, a measure of disagreement between the two models at time of training. In addition to the two regressors, the new model has two other fields:

      • mix, controlling the weighting

      • acceleration, for the mode of acceleration for training the model (e.g., CPUThreads()).

      Step 1 - Define a new model struct

      using MLJ
      +import MLJBase
      +
      +mutable struct CompositeC <: DeterministicNetworkComposite
      +    regressor1
      +    regressor2
      +    mix::Float64
      +    acceleration
      +end

      Step 2 - Wrap the learning network in prefit

      function MLJBase.prefit(composite::CompositeC, verbosity, X, y)
      +
      +    Xs = source(X)
      +    ys = source(y)
      +
      +    mach1 = machine(:regressor1, Xs, ys)
      +    mach2 = machine(:regressor2, Xs, ys)
      +
      +    yhat1 = predict(mach1, Xs)
      +    yhat2 = predict(mach2, Xs)
      +
      +    # node to return disagreement between the regressor predictions:
      +    disagreement = node((y1, y2) -> l2(y1, y2) |> mean, yhat1, yhat2)
      +
      +    # get the weighted average the predictions of the regressors:
      +    λ = composite.mix
      +    yhat = (1 - λ)*yhat1 + λ*yhat2
      +
      +    # the learning network interface:
      +    return (
      +        predict = yhat,
      +        report= (; training_disagreement=disagreement),
      +        acceleration = composite.acceleration,
      +    )
      +
      +end

      Here's a demonstration:

      X, y = make_regression() # a table and a vector
      +
      +knn = (@load KNNRegressor pkg=NearestNeighborModels verbosity=0)()
      +tree =  (@load DecisionTreeRegressor pkg=DecisionTree verbosity=0)()
      +composite_c = CompositeC(knn, tree, 0.2, CPUThreads())
      +mach = machine(composite_c, X, y) |> fit!
      +Xnew, _ = make_regression(3)
      +predict(mach, Xnew)
      3-element Vector{Float64}:
      + -0.7029764091986238
      + -0.7553231974592023
      + -0.6270125193923791
      report(mach)
      (regressor2 = (features = [:x1, :x2],),
      + training_disagreement = 0.0027608495393292233,
      + predict = (regressor2 = (features = [:x1, :x2],),),)

      Example D - Multiple nodes pointing to the same machine

      When incorporating learned target transformations (such as a standardization) in supervised learning, it is desirable to apply the inverse transformation to predictions, to return them to the original scale. This means re-using learned parameters from an earlier part of your workflow. This poses no problem here, as the next example demonstrates.

      The model type CompositeD defined below applies a preprocessing transformation to input data X (e.g., standardization), learns a transformation for the target y (e.g., an optimal Box-Cox transformation), predicts new target values using a regressor (e.g., Ridge regression), and then inverse-transforms those predictions to restore them to the original scale. (This represents a model we could alternatively build using the TransformedTargetModel wrapper and a Pipeline.)

      Step 1 - Define a new model struct

      using MLJ
      +import MLJBase
      +
      +mutable struct CompositeD <: DeterministicNetworkComposite
      +    preprocessor
      +    target_transformer
      +    regressor
      +    acceleration
      +end

      Step 2 - Wrap the learning network in prefit

      Notice that both of the nodes z and yhat in the wrapped learning network point to the same machine (learned parameters) mach2.

      function MLJBase.prefit(composite::CompositeD, verbosity, X, y)
      +
      +    Xs = source(X)
      +    ys = source(y)
      +
      +    mach1 = machine(:preprocessor, Xs)
      +    W = transform(mach1, Xs)
      +
      +    mach2 = machine(:target_transformer, ys)
      +    z = transform(mach2, ys)
      +
      +    mach3 =machine(:regressor, W, z)
      +    zhat = predict(mach3, W)
      +
      +    yhat = inverse_transform(mach2, zhat)
      +
      +    # the learning network interface:
      +    return (
      +        predict = yhat,
      +        acceleration = composite.acceleration,
      +    )
      +
      +end

      The flow of information in the wrapped learning network is visualized below.

      Here's an application of our new composite to the Boston dataset:

      X, y = @load_boston
      +
      +stand = Standardizer()
      +box = UnivariateBoxCoxTransformer()
      +ridge = (@load RidgeRegressor pkg=MultivariateStats verbosity=0)(lambda=92)
      +composite_d = CompositeD(stand, box, ridge, CPU1())
      +evaluate(composite_d, X, y, resampling=CV(nfolds=5), measure=l2, verbosity=0)
      PerformanceEvaluation object with these fields:
      +  model, measure, operation, measurement, per_fold,
      +  per_observation, fitted_params_per_fold,
      +  report_per_fold, train_test_rows, resampling, repeats
      +Extract:
      +┌──────────┬───────────┬─────────────┬─────────┬────────────────────────────────
      +│ measure  │ operation │ measurement │ 1.96*SE │ per_fold                      ⋯
      +├──────────┼───────────┼─────────────┼─────────┼────────────────────────────────
      +│ LPLoss(  │ predict   │ 27.9        │ 20.0    │ [9.26, 23.1, 33.6, 60.4, 13.4 ⋯
      +│   p = 2) │           │             │         │                               ⋯
      +└──────────┴───────────┴─────────────┴─────────┴────────────────────────────────
      +                                                                1 column omitted
      +

      Example E - Coupling component model hyper-parameters

      The composite model in this example combines a clustering model used to reduce the dimension of the feature space (KMeans or KMedoids from Clustering.jl) with ridge regression, but has the following "coupling" of the hyperparameters: The amount of ridge regularization depends on the number of specified clusters k, with less regularization for a greater number of clusters. It includes a user-specified coupling coefficient c, and exposes the solver hyper-parameter of the ridge regressor. (Neither the clusterer nor ridge regressor are themselves hyperparameters of the composite.)

      Step 1 - Define a new model struct

      using MLJ
      +import MLJBase
      +
      +mutable struct CompositeE <: DeterministicNetworkComposite
      +        clusterer     # `:kmeans` or `:kmedoids`
      +        k::Int        # number of clusters
      +        solver        # a ridge regression parameter we want to expose
      +        c::Float64    # a "coupling" coefficient
      +end

      Step 2 - Wrap the learning network in prefit

      RidgeRegressor = @load RidgeRegressor pkg=MLJLinearModels verbosity=0
      +KMeans   = @load KMeans pkg=Clustering verbosity=0
      +KMedoids = @load KMedoids pkg=Clustering verbosity=0
      +
      +function MLJBase.prefit(composite::CompositeE, verbosity, X, y)
      +
      +        Xs = source(X)
      +        ys = source(y)
      +
      +        k = composite.k
      +        solver = composite.solver
      +        c = composite.c
      +
      +        clusterer = composite.clusterer == :kmeans ? KMeans(; k) : KMedoids(; k)
      +        mach1 = machine(clusterer, Xs)
      +        Xsmall = transform(mach1, Xs)
      +
      +        # the coupling - ridge regularization depends on the number of
      +        # clusters `k` and the coupling coefficient `c`:
      +        lambda = exp(-c/k)
      +
      +        ridge = RidgeRegressor(; lambda, solver)
      +        mach2 = machine(ridge, Xsmall, ys)
      +        yhat = predict(mach2, Xsmall)
      +
      +        return (predict=yhat,)
      +end

      Here's an application to the Boston dataset in which we optimize the coupling coefficient (see Tuning Models for more on hyper-parameter optimization):

      X, y = @load_boston # a table and a vector
      +
      +composite_e = CompositeE(:kmeans, 3, nothing, 0.5)
      +r = range(composite_e, :c, lower = -2, upper=2, scale=x->10^x)
      +tuned_composite_e = TunedModel(
      +    composite_e,
      +    range=r,
      +    tuning=RandomSearch(rng=123),
      +    measure=l2,
      +    resampling=CV(nfolds=6),
      +    n=100,
      +)
      +mach = machine(tuned_composite_e, X, y) |> fit!
      +report(mach).best_model
      CompositeE(
      +  clusterer = :kmeans, 
      +  k = 3, 
      +  solver = nothing, 
      +  c = 0.14720178188876742)

      More on defining new nodes

      Overloading ordinary functions for nodes has already been discussed above. Here's another example:

      divide(x, y) = x/y
      +
      +X = source(2)
      +Y = source(3)
      +
      +Z = node(divide, X, Y)

      This means Z() returns divide(X(), Y()), which is divide(2, 3) in this case:

      Z()
      0.6666666666666666

      We cannot call Z with arguments (e.g., Z(2)) because it does not have a unique origin.

      In all the node examples so far, the first argument of node is a function, and all other arguments are nodes - one node for each argument of the function. A node constructed in this way is called a static node. A dynamic node, which directly depends on the outcome of a training event, is constructed by giving a machine as the second argument, to be passed as the first argument of the function in a node call. For example, we can do

      Xs = source(rand(4))
      +mach = machine(Standardizer(), Xs)
      +N = node(transform, mach, Xs) |> fit!
      [ Info: Training machine(Standardizer(features = Symbol[], …), …).

      Then N has the following calling properties:

      • N() returns transform(mach, Xs())
      • N(Xnew) returns transform(mach, Xs(Xnew)); here Xs(Xnew) is just Xnew because Xs is just a source node.)
      N()
      4-element Vector{Float64}:
      +  0.3097277718673464
      + -1.4244083620630208
      +  0.20105872362694022
      +  0.9136218665687336
      N(rand(2))
      2-element Vector{Float64}:
      +  0.6707127594719003
      + -0.8770485483419167

      In fact, this is precisely how the transform method is internally overloaded to work, when called with a node argument (to return a node instead of data). That is, internally there exists code that amounts to the definition

      transform(mach, X::AbstractNode) = node(transform, mach, X)

      Here AbstractNode is the common super-type of Node and Source.

      It sometimes useful to create dynamic nodes with no node arguments, as in

      Xs = source(rand(10))
      +mach = machine(Standardizer(), Xs)
      +N = node(fitted_params, mach) |> fit!
      +N()
      (mean = 0.46023189664355446,
      + std = 0.3287060402337959,)

      Static nodes can have also have zero node arguments. These may be viewed as "constant" nodes:

      N = Node(()-> 42)
      +N()
      42

      Example F below demonstrates the use of static and dynamic nodes. For more details, see the node docstring.

      There is also an experimental macro @node. If Z is an AbstractNode (Z = source(16), say) then instead of

      Q = node(z->sqrt(z), Z)

      one can do

      Q = @node sqrt(Z)

      (so that Q() == 4). Here's a more complicated application of @node to row-shuffle a table:

      using Random
      +X = (x1 = [1, 2, 3, 4, 5],
      +         x2 = [:one, :two, :three, :four, :five])
      +rows(X) = 1:nrows(X)
      +
      +Xs = source(X)
      +rs  = @node rows(Xs)
      +W = @node selectrows(Xs, @node shuffle(rs))
      +
      +julia> W()
      +(x1 = [5, 1, 3, 2, 4],
      + x2 = Symbol[:five, :one, :three, :two, :four],)
      +

      Important. An argument not in global scope is assumed by @node to be a node or source.

      Example F - Wrapping a model in a data-dependent tuning strategy

      When the regularization parameter of a Lasso model is optimized, one commonly searches over a parameter range depending on properties of the training data. Indeed, Lasso (and, more generally, elastic net) implementations commonly provide a method to carry out this data-dependent optimization automatically, using cross-validation. The following example shows how to transform the LassoRegressor model type from MLJLinearModels.jl into a self-tuning model type LassoCVRegressor using the commonly implemented data-dependent tuning strategy. A new dimensionless hyperparameter epsilon controls the lower bound on the parameter range.

      Step 1 - Define a new model struct

      using MLJ
      +import MLJBase
      +
      +mutable struct LassoCVRegressor <: DeterministicNetworkComposite
      +    lasso              # the atomic lasso model (`lasso.lambda` is ignored)
      +    epsilon::Float64   # controls lower bound of `lasso.lambda` in tuning
      +    resampling         # resampling strategy for optimization of `lambda`
      +end
      +
      +# keyword constructor for convenience:
      +LassoRegressor = @load LassoRegressor pkg=MLJLinearModels verbosity=0
      +LassoCVRegressor(;
      +    lasso=LassoRegressor(),
      +    epsilon=0.001,
      +    resampling=CV(nfolds=6),
      +) = LassoCVRegressor(
      +    lasso,
      +    epsilon,
      +    resampling,
      +)

      Step 2 - Wrap the learning network in prefit

      In this case, there is no model -> :symbol replacement that makes sense here, because the model is getting wrapped by TunedModel before being bound to nodes in a machine. However, we can expose the the learned lasso coefs and intercept using fitted parameter nodes; and expose the optimal lambda, and range searched, using report nodes (as previously demonstrated in Example C).

      function MLJBase.prefit(composite::LassoCVRegressor, verbosity, X, y)
      +
      +    λ_max = maximum(abs.(MLJ.matrix(X)'y))
      +
      +    Xs = source(X)
      +    ys = source(y)
      +
      +    r = range(
      +        composite.lasso,
      +        :lambda,
      +        lower=composite.epsilon*λ_max,
      +        upper=λ_max,
      +        scale=:log10,
      +    )
      +
      +    lambda_range = node(()->r)  # a "constant" report node
      +
      +    tuned_lasso = TunedModel(
      +        composite.lasso,
      +        tuning=Grid(shuffle=false),
      +        range = r,
      +        measure = l2,
      +        resampling=composite.resampling,
      +    )
      +    mach = machine(tuned_lasso, Xs, ys)
      +
      +    R = node(report, mach)                                 # `R()` returns `report(mach)`
      +    lambda = node(r -> r.best_model.lambda, R)             # a report node
      +
      +    F = node(fitted_params, mach)             # `F()` returns `fitted_params(mach)`
      +    coefs = node(f->f.best_fitted_params.coefs, F)         # a fitted params node
      +    intercept = node(f->f.best_fitted_params.intercept, F) # a fitted params node
      +
      +    yhat = predict(mach, Xs)
      +
      +    return (
      +        predict=yhat,
      +        fitted_params=(; coefs, intercept),
      +        report=(; lambda, lambda_range),
      +   )
      +
      +end

      Here's a demonstration:

      X, _ = make_regression(1000, 3, rng=123)
      +y = X.x2 - X.x2 + 0.005*X.x3 + 0.05*rand(1000)
      +lasso_cv = LassoCVRegressor(epsilon=1e-5)
      +mach = machine(lasso_cv, X, y) |> fit!
      +report(mach)
      (lambda = 0.00042546886155141466,
      + lambda_range = NumericRange(3.294e-5 ≤ lambda ≤ 3.294; origin=1.647, unit=1.647; on log10 scale),)
      fitted_params(mach)
      (coefs = [:x1 => 0.0, :x2 => -0.0, :x3 => 0.004147704827722941],
      + intercept = 0.02486029602840344,)

      The learning network API

      Two new julia types are part of learning networks: Source and Node, which share a common abstract supertype AbstractNode.

      Formally, a learning network defines two labeled directed acyclic graphs (DAG's) whose nodes are Node or Source objects, and whose labels are Machine objects. We obtain the first DAG from directed edges of the form $N1 -> N2$ whenever $N1$ is an argument of $N2$ (see below). Only this DAG is relevant when calling a node, as discussed in the examples above and below. To form the second DAG (relevant when calling or calling fit! on a node) one adds edges for which $N1$ is training argument of the machine which labels $N1$. We call the second, larger DAG, the completed learning network (but note only edges of the smaller network are explicitly drawn in diagrams, for simplicity).

      Source nodes

      Only source nodes can reference concrete data. A Source object has a single field, data.

      MLJBase.sourceMethod
      Xs = source(X=nothing)

      Define, a learning network Source object, wrapping some input data X, which can be nothing for purposes of exporting the network as stand-alone model. For training and testing the unexported network, appropriate vectors, tables, or other data containers are expected.

      The calling behaviour of a Source object is this:

      Xs() = X
      +Xs(rows=r) = selectrows(X, r)  # eg, X[r,:] for a DataFrame
      +Xs(Xnew) = Xnew

      See also: MLJBase.prefit, sources, origins, node.

      source
      MLJBase.rebind!Function
      rebind!(s, X)

      Attach new data X to an existing source node s. Not a public method.

      source
      MLJBase.sourcesFunction
      sources(N::AbstractNode)

      A vector of all sources referenced by calls N() and fit!(N). These are the sources of the ancestor graph of N when including training edges.

      Not to be confused with origins(N), in which training edges are excluded.

      See also: origins, source.

      source
      MLJBase.originsFunction
      origins(N)

      Return a list of all origins of a node N accessed by a call N(). These are the source nodes of ancestor graph of N if edges corresponding to training arguments are excluded. A Node object cannot be called on new data unless it has a unique origin.

      Not to be confused with sources(N) which refers to the same graph but without the training edge deletions.

      See also: node, source.

      source

      Nodes

      MLJBase.NodeType
      Node{T<:Union{Machine,Nothing}}

      Type for nodes in a learning network that are not Source nodes.

      The key components of a Node are:

      • An operation, which will either be static (a fixed function) or dynamic (such as predict or transform).

      • A Machine object, on which to dispatch the operation (nothing if the operation is static). The training arguments of the machine are generally other nodes, including Source nodes.

      • Upstream connections to other nodes, called its arguments, possibly including Source nodes, one for each data argument of the operation (typically there's just one).

      When a node N is called, as in N(), it applies the operation on the machine (if there is one) together with the outcome of calls to its node arguments, to compute the return value. For details on a node's calling behavior, see node.

      See also node, Source, origins, sources, fit!.

      source
      MLJBase.nodeFunction
      J = node(f, mach::Machine, args...)

      Defines a dynamic Node object J wrapping a dynamic operation f (predict, predict_mean, transform, etc), a nodal machine mach and arguments args. Its calling behaviour, which depends on the outcome of training mach (and, implicitly, on training outcomes affecting its arguments) is this:

      J() = f(mach, args[1](), args[2](), ..., args[n]())
      +J(rows=r) = f(mach, args[1](rows=r), args[2](rows=r), ..., args[n](rows=r))
      +J(X) = f(mach, args[1](X), args[2](X), ..., args[n](X))

      Generally n=1 or n=2 in this latter case.

      predict(mach, X::AbsractNode, y::AbstractNode)
      +predict_mean(mach, X::AbstractNode, y::AbstractNode)
      +predict_median(mach, X::AbstractNode, y::AbstractNode)
      +predict_mode(mach, X::AbstractNode, y::AbstractNode)
      +transform(mach, X::AbstractNode)
      +inverse_transform(mach, X::AbstractNode)

      Shortcuts for J = node(predict, mach, X, y), etc.

      Calling a node is a recursive operation which terminates in the call to a source node (or nodes). Calling nodes on new data X fails unless the number of such nodes is one.

      See also: Node, @node, source, origins.

      source
      MLJBase.@nodeMacro
      @node f(...)

      Construct a new node that applies the function f to some combination of nodes, sources and other arguments.

      Important. An argument not in global scope is assumed to be a node or source.

      Examples

      X = source(π)
      +W = @node sin(X)
      +julia> W()
      +0
      +
      +X = source(1:10)
      +Y = @node selectrows(X, 3:4)
      +julia> Y()
      +3:4
      +
      +julia> Y(["one", "two", "three", "four"])
      +2-element Array{Symbol,1}:
      + "three"
      + "four"
      +
      +X1 = source(4)
      +X2 = source(5)
      +add(a, b, c) = a + b + c
      +N = @node add(X1, 1, X2)
      +julia> N()
      +10
      +

      See also node

      source
      MLJBase.prefitFunction
      MLJBase.prefit(model, verbosity, data...)

      Returns a learning network interface (see below) for a learning network with source nodes that wrap data.

      A user overloads MLJBase.prefit when exporting a learning network as a new stand-alone model type, of which model above will be an instance. See the MLJ reference manual for details.

      A learning network interface is a named tuple declaring certain interface points in a learning network, to be used when "exporting" the network as a new stand-alone model type. Examples are

       (predict=yhat,)
      + (transform=Xsmall, acceleration=CPUThreads())
      + (predict=yhat, transform=W, report=(loss=loss_node,))

      Here yhat, Xsmall, W and loss_node are nodes in the network.

      The keys of the learning network interface always one of the following:

      • The name of an operation, such as :predict, :predict_mode, :transform, :inverse_transform. See "Operation keys" below.

      • :report, for exposing results of calling a node with no arguments in the composite model report. See "Including report nodes" below.

      • :fitted_params, for exposing results of calling a node with no arguments as fitted parameters of the composite model. See "Including fitted parameter nodes" below.

      • :acceleration, for articulating acceleration mode for training the network, e.g., CPUThreads(). Corresponding value must be an AbstractResource. If not included, CPU1() is used.

      Operation keys

      If the key is an operation, then the value must be a node n in the network with a unique origin (length(origins(n)) === 1). The intention of a declaration such as predict=yhat is that the exported model type implements predict, which, when applied to new data Xnew, should return yhat(Xnew).

      Including report nodes

      If the key is :report, then the corresponding value must be a named tuple

       (k1=n1, k2=n2, ...)

      whose values are all nodes. For each k=n pair, the key k will appear as a key in the composite model report, with a corresponding value of deepcopy(n()), called immediatately after training or updating the network. For examples, refer to the "Learning Networks" section of the MLJ manual.

      Including fitted parameter nodes

      If the key is :fitted_params, then the behaviour is as for report nodes but results are exposed as fitted parameters of the composite model instead of the report.

      source

      See more on fitting nodes at fit! and fit_only!.

      diff --git a/v0.20.3/linear_pipelines/index.html b/v0.20.3/linear_pipelines/index.html new file mode 100644 index 000000000..48df28801 --- /dev/null +++ b/v0.20.3/linear_pipelines/index.html @@ -0,0 +1,38 @@ + +Linear Pipelines · MLJ

      Linear Pipelines

      In MLJ a pipeline is a composite model in which models are chained together in a linear (non-branching) chain. For other arrangements, including custom architectures via learning networks, see Composing Models.

      For purposes of illustration, consider a supervised learning problem with the following toy data:

      using MLJ
      +X = (age    = [23, 45, 34, 25, 67],
      +     gender = categorical(['m', 'm', 'f', 'm', 'f']));
      +y = [67.0, 81.5, 55.6, 90.0, 61.1]

      We would like to train using a K-nearest neighbor model, but the model type KNNRegressor assumes the features are all Continuous. This can be fixed by first:

      • coercing the :age feature to have Continuous type by replacing X with coerce(X, :age=>Continuous)
      • standardizing continuous features and one-hot encoding the Multiclass features using the ContinuousEncoder model

      However, we can avoid separately applying these preprocessing steps (two of which require fit! steps) by combining them with the supervised KKNRegressor model in a new pipeline model, using Julia's |> syntax:

      KNNRegressor = @load KNNRegressor pkg=NearestNeighborModels
      +pipe = (X -> coerce(X, :age=>Continuous)) |> ContinuousEncoder() |> KNNRegressor(K=2)
      DeterministicPipeline(
      +  f = Main.var"#1#2"(), 
      +  continuous_encoder = ContinuousEncoder(
      +        drop_last = false, 
      +        one_hot_ordered_factors = false), 
      +  knn_regressor = KNNRegressor(
      +        K = 2, 
      +        algorithm = :kdtree, 
      +        metric = Distances.Euclidean(0.0), 
      +        leafsize = 10, 
      +        reorder = true, 
      +        weights = NearestNeighborModels.Uniform()), 
      +  cache = true)

      We see above that pipe is a model whose hyperparameters are themselves other models or a function. (The names of these hyper-parameters are automatically generated. To specify your own names, use the explicit Pipeline constructor instead.)

      The |> syntax can also be used to extend an existing pipeline or concatenate two existing pipelines. So, we could instead have defined:

      pipe_transformer = (X -> coerce(X, :age=>Continuous)) |> ContinuousEncoder()
      +pipe = pipe_transformer |> KNNRegressor(K=2)

      A pipeline is just a model like any other. For example, we can evaluate its performance on the data above:

      evaluate(pipe, X, y, resampling=CV(nfolds=3), measure=mae)
      PerformanceEvaluation object with these fields:
      +  model, measure, operation, measurement, per_fold,
      +  per_observation, fitted_params_per_fold,
      +  report_per_fold, train_test_rows, resampling, repeats
      +Extract:
      +┌──────────┬───────────┬─────────────┬─────────┬────────────────────┐
      +│ measure  │ operation │ measurement │ 1.96*SE │ per_fold           │
      +├──────────┼───────────┼─────────────┼─────────┼────────────────────┤
      +│ LPLoss(  │ predict   │ 11.3        │ 7.88    │ [7.25, 17.2, 7.45] │
      +│   p = 1) │           │             │         │                    │
      +└──────────┴───────────┴─────────────┴─────────┴────────────────────┘
      +

      To include target transformations in a pipeline, wrap the supervised component using TransformedTargetModel.

      MLJBase.PipelineFunction
      Pipeline(component1, component2, ... , componentk; options...)
      +Pipeline(name1=component1, name2=component2, ..., namek=componentk; options...)
      +component1 |> component2 |> ... |> componentk

      Create an instance of a composite model type which sequentially composes the specified components in order. This means component1 receives inputs, whose output is passed to component2, and so forth. A "component" is either a Model instance, a model type (converted immediately to its default instance) or any callable object. Here the "output" of a model is what predict returns if it is Supervised, or what transform returns if it is Unsupervised.

      Names for the component fields are automatically generated unless explicitly specified, as in

      Pipeline(encoder=ContinuousEncoder(drop_last=false),
      +         stand=Standardizer())

      The Pipeline constructor accepts keyword options discussed further below.

      Ordinary functions (and other callables) may be inserted in the pipeline as shown in the following example:

      Pipeline(X->coerce(X, :age=>Continuous), OneHotEncoder, ConstantClassifier)

      Syntactic sugar

      The |> operator is overloaded to construct pipelines out of models, callables, and existing pipelines:

      LinearRegressor = @load LinearRegressor pkg=MLJLinearModels add=true
      +PCA = @load PCA pkg=MultivariateStats add=true
      +
      +pipe1 = MLJBase.table |> ContinuousEncoder |> Standardizer
      +pipe2 = PCA |> LinearRegressor
      +pipe1 |> pipe2

      At most one of the components may be a supervised model, but this model can appear in any position. A pipeline with a Supervised component is itself Supervised and implements the predict operation. It is otherwise Unsupervised (possibly Static) and implements transform.

      Special operations

      If all the components are invertible unsupervised models (ie, implement inverse_transform) then inverse_transform is implemented for the pipeline. If there are no supervised models, then predict is nevertheless implemented, assuming the last component is a model that implements it (some clustering models). Similarly, calling transform on a supervised pipeline calls transform on the supervised component.

      Optional key-word arguments

      • prediction_type - prediction type of the pipeline; possible values: :deterministic, :probabilistic, :interval (default=:deterministic if not inferable)

      • operation - operation applied to the supervised component model, when present; possible values: predict, predict_mean, predict_median, predict_mode (default=predict)

      • cache - whether the internal machines created for component models should cache model-specific representations of data (see machine) (default=true)

      Warning

      Set cache=false to guarantee data anonymization.

      To build more complicated non-branching pipelines, refer to the MLJ manual sections on composing models.

      source
      diff --git a/v0.20.3/list_of_supported_models/index.html b/v0.20.3/list_of_supported_models/index.html new file mode 100644 index 000000000..c1bfc63ce --- /dev/null +++ b/v0.20.3/list_of_supported_models/index.html @@ -0,0 +1,2 @@ + +List of Supported Models · MLJ

      List of Supported Models

      For a list of models organized around function ("classification", "regression", etc.), see the Model Browser.

      MLJ provides access to a wide variety of machine learning models. We are always looking for help adding new models or testing existing ones. Currently available models are listed below; for the most up-to-date list, run using MLJ; models().

      Indications of "maturity" in the table below are approximate, surjective, and possibly out-of-date. A decision to use or not use a model in a critical application should be based on a user's independent assessment.

      • experimental: indicates the package is fairly new and/or is under active development; you can help by testing these packages and making them more robust,
      • low: indicate a package that has reached a roughly stable form in terms of interface and which is unlikely to contain serious bugs. It may be missing some functionality found in similar packages. It has not benefited from a high level of use
      • medium: indicates the package is fairly mature but may benefit from optimizations and/or extra features; you can help by suggesting either,
      • high: indicates the package is very mature and functionalities are expected to have been fairly optimiser and tested.
      PackageInterface PkgModelsMaturityNote
      BetaML.jl-DecisionTreeClassifier, RandomForestClassifier, NeuralNetworkClassifier, PerceptronClassifier, KernelPerceptronClassifier, PegasosClassifier, DecisionTreeRegressor, RandomForestRegressor, NeuralNetworkRegressor, MultitargetNeuralNetworkRegressor, GaussianMixtureRegressor, MultitargetGaussianMixtureRegressor, KMeansClusterer, KMedoidsClusterer, GaussianMixtureClusterer, SimpleImputer, GaussianMixtureImputer, RandomForestImputer, GeneralImputer, AutoEncodermedium
      CatBoost.jl-CatBoostRegressor, CatBoostClassifierhigh
      Clustering.jlMLJClusteringInterface.jlKMeans, KMedoids, DBSCAN, HierarchicalClusteringhigh²
      DecisionTree.jlMLJDecisionTreeInterface.jlDecisionTreeClassifier, DecisionTreeRegressor, AdaBoostStumpClassifier, RandomForestClassifier, RandomForestRegressorhigh
      EvoTrees.jl-EvoTreeRegressor, EvoTreeClassifier, EvoTreeCount, EvoTreeGaussian, EvoTreeMLEmediumtree-based gradient boosting models
      EvoLinear.jl-EvoLinearRegressormediumlinear boosting models
      GLM.jlMLJGLMInterface.jlLinearRegressor, LinearBinaryClassifier, LinearCountRegressormedium²
      Imbalance.jl-RandomOversampler, RandomWalkOversampler, ROSE, SMOTE, BorderlineSMOTE1, SMOTEN, SMOTENC, RandomUndersampler, ClusterUndersampler, ENNUndersampler, TomekUndersampler,low
      LIBSVM.jlMLJLIBSVMInterface.jlLinearSVC, SVC, NuSVC, NuSVR, EpsilonSVR, OneClassSVMhighalso via ScikitLearn.jl
      LightGBM.jl-LGBMClassifier, LGBMRegressorhigh
      Flux.jlMLJFlux.jlNeuralNetworkRegressor, NeuralNetworkClassifier, MultitargetNeuralNetworkRegressor, ImageClassifierlow
      MLJBalancing.jl-BalancedBaggingClassifierlow
      MLJLinearModels.jl-LinearRegressor, RidgeRegressor, LassoRegressor, ElasticNetRegressor, QuantileRegressor, HuberRegressor, RobustRegressor, LADRegressor, LogisticClassifier, MultinomialClassifiermedium
      MLJModels.jl (built-in)-ConstantClassifier, ConstantRegressor, ContinuousEncoder, DeterministicConstantClassifier, DeterministicConstantRegressor, FeatureSelector, FillImputer, InteractionTransformer, OneHotEncoder, Standardizer, UnivariateBoxCoxTransformer, UnivariateDiscretizer, UnivariateFillImputer, UnivariateTimeTypeToContinuous, Standardizer, BinaryThreshholdPredictormedium
      MLJText.jl-TfidfTransformer, BM25Transformer, CountTransformerlow
      MultivariateStats.jlMLJMultivariateStatsInterface.jlLinearRegressor, MultitargetLinearRegressor, RidgeRegressor, MultitargetRidgeRegressor, PCA, KernelPCA, ICA, LDA, BayesianLDA, SubspaceLDA, BayesianSubspaceLDA, FactorAnalysis, PPCAhigh
      NaiveBayes.jlMLJNaiveBayesInterface.jlGaussianNBClassifier, MultinomialNBClassifier, HybridNBClassifierlow
      NearestNeighborModels.jl-KNNClassifier, KNNRegressor, MultitargetKNNClassifier, MultitargetKNNRegressorhigh
      OneRule.jl-OneRuleClassifierexperimental
      OutlierDetectionNeighbors.jl-ABODDetector, COFDetector, DNNDetector, KNNDetector, LOFDetectormedium
      OutlierDetectionNetworks.jl-AEDetector, DSADDetector, ESADDetectormedium
      OutlierDetectionPython.jl-ABODDetector, CBLOFDetector, CDDetector, COFDetector, COPODDetector, ECODDetector, GMMDetector, HBOSDetector, IForestDetector, INNEDetector, KDEDetector, KNNDetector, LMDDDetector, LOCIDetector, LODADetector, LOFDetector, MCDDetector, OCSVMDetector, PCADetector, RODDetector, SODDetector, SOSDetectorhigh
      ParallelKMeans.jl-KMeansexperimental
      PartialLeastSquaresRegressor.jl-PLSRegressor, KPLSRegressorexperimental
      ScikitLearn.jlMLJScikitLearnInterface.jlARDRegressor, AdaBoostClassifier, AdaBoostRegressor, AffinityPropagation, AgglomerativeClustering, BaggingClassifier, BaggingRegressor, BayesianLDA, BayesianQDA, BayesianRidgeRegressor, BernoulliNBClassifier, Birch, ComplementNBClassifier, DBSCAN, DummyClassifier, DummyRegressor, ElasticNetCVRegressor, ElasticNetRegressor, ExtraTreesClassifier, ExtraTreesRegressor, FeatureAgglomeration, GaussianNBClassifier, GaussianProcessClassifier, GaussianProcessRegressor, GradientBoostingClassifier, GradientBoostingRegressor, HuberRegressor, KMeans, KNeighborsClassifier, KNeighborsRegressor, LarsCVRegressor, LarsRegressor, LassoCVRegressor, LassoLarsCVRegressor, LassoLarsICRegressor, LassoLarsRegressor, LassoRegressor, LinearRegressor, LogisticCVClassifier, LogisticClassifier, MeanShift, MiniBatchKMeans, MultiTaskElasticNetCVRegressor, MultiTaskElasticNetRegressor, MultiTaskLassoCVRegressor, MultiTaskLassoRegressor, MultinomialNBClassifier, OPTICS, OrthogonalMatchingPursuitCVRegressor, OrthogonalMatchingPursuitRegressor, PassiveAggressiveClassifier, PassiveAggressiveRegressor, PerceptronClassifier, ProbabilisticSGDClassifier, RANSACRegressor, RandomForestClassifier, RandomForestRegressor, RidgeCVClassifier, RidgeCVRegressor, RidgeClassifier, RidgeRegressor, SGDClassifier, SGDRegressor, SVMClassifier, SVMLClassifier, SVMLRegressor, SVMNuClassifier, SVMNuRegressor, SVMRegressor, SpectralClustering, TheilSenRegressorhigh²
      SIRUS.jl-StableForestClassifier, StableForestRegressor, StableRulesClassifier, StableRulesRegressorlow
      SymbolicRegression.jl-MultitargetSRRegressor, SRRegressorexperimental
      TSVD.jlMLJTSVDInterface.jlTSVDTransformerhigh
      XGBoost.jlMLJXGBoostInterface.jlXGBoostRegressor, XGBoostClassifier, XGBoostCounthigh

      Notes

      ¹Models not in the MLJ registry are not included in integration tests. Consult package documentation to see how to load them. There may be issues loading these models simultaneously with other registered models.

      ²Some models are missing and assistance is welcome to complete the interface. Post a message on the Julia #mlj Slack channel if you would like to help, thanks!

      diff --git a/v0.20.3/loading_model_code/index.html b/v0.20.3/loading_model_code/index.html new file mode 100644 index 000000000..4e874909a --- /dev/null +++ b/v0.20.3/loading_model_code/index.html @@ -0,0 +1,12 @@ + +Loading Model Code · MLJ

      Loading Model Code

      Once the name of a model, and the package providing that model, have been identified (see Model Search) one can either import the model type interactively with @iload, as shown under Installation, or use @load as shown below. The @load macro works from within a module, a package or a function, provided the relevant package providing the MLJ interface has been added to your package environment. It will attempt to load the model type into the global namespace of the module in which @load is invoked (Main if invoked at the REPL).

      In general, the code providing core functionality for the model (living in a package you should consult for documentation) may be different from the package providing the MLJ interface. Since the core package is a dependency of the interface package, only the interface package needs to be added to your environment.

      For instance, suppose you have activated a Julia package environment my_env that you wish to use for your MLJ project; for example, you have run:

      using Pkg
      +Pkg.activate("my_env", shared=true)

      Furthermore, suppose you want to use DecisionTreeClassifier, provided by the DecisionTree.jl package. Then, to determine which package provides the MLJ interface you call load_path:

      julia> load_path("DecisionTreeClassifier", pkg="DecisionTree")
      +"MLJDecisionTreeInterface.DecisionTreeClassifier"

      In this case, we see that the package required is MLJDecisionTreeInterface.jl. If this package is not in my_env (do Pkg.status() to check) you add it by running

      julia> Pkg.add("MLJDecisionTreeInterface");

      So long as my_env is the active environment, this action need never be repeated (unless you run Pkg.rm("MLJDecisionTreeInterface")). You are now ready to instantiate a decision tree classifier:

      julia> Tree = @load DecisionTree pkg=DecisionTree
      +julia> tree = Tree()

      which is equivalent to

      julia> import MLJDecisionTreeInterface.DecisionTreeClassifier
      +julia> Tree = MLJDecisionTreeInterface.DecisionTreeClassifier
      +julia> tree = Tree()

      Tip. The specification pkg=... above can be dropped for the many models that are provided by only a single package.

      API

      StatisticalTraits.load_pathFunction
      load_path(model_name::String, pkg=nothing)

      Return the load path for model type with name model_name, specifying the algorithm=providing package name pkg to resolve name conflicts, if necessary.

      load_path(proxy::NamedTuple)

      Return the load path for the model whose name is proxy.name and whose algorithm-providing package has name proxy.package_name. For example, proxy could be any element of the vector returned by models().

      load_path(model)

      Return the load path of a model instance or type. Usually requires necessary model code to have been separately loaded. Supply strings as above if code is not loaded.

      source
      MLJModels.@loadMacro
      @load ModelName pkg=nothing verbosity=0 add=false

      Import the model type the model named in the first argument into the calling module, specfying pkg in the case of an ambiguous name (to packages providing a model type with the same name). Returns the model type.

      Warning In older versions of MLJ/MLJModels, @load returned an instance instead.

      To automatically add required interface packages to the current environment, specify add=true. For interactive loading, use @iload instead.

      Examples

      Tree = @load DecisionTreeRegressor
      +tree = Tree()
      +tree2 = Tree(min_samples_split=6)
      +
      +SVM = @load SVC pkg=LIBSVM
      +svm = SVM()

      See also @iload

      source
      MLJModels.@iloadMacro
      @iload ModelName

      Interactive alternative to @load. Provides user with an optioin to install (add) the required interface package to the current environment, and to choose the relevant model-providing package in ambiguous cases. See @load

      source
      diff --git a/v0.20.3/logging_workflows/index.html b/v0.20.3/logging_workflows/index.html new file mode 100644 index 000000000..16aaad88a --- /dev/null +++ b/v0.20.3/logging_workflows/index.html @@ -0,0 +1,2 @@ + +Logging Workflows · MLJ

      Logging Workflows

      MLflow integration

      MLflow is a popular, language-agnostic, tool for externally logging the outcomes of machine learning experiments, including those carried out using MLJ.

      MLJ logging examples are given in the MLJFlow.jl documentation. MLJ includes and re-exports all the methods of MLJFlow.jl, so there is no need to import MLJFlow.jl if using MLJ.

      Warning

      MLJFlow.jl is a new package still under active development and should be regarded as experimental. At this time, breaking changes to MLJFlow.jl will not necessarily trigger new breaking releases of MLJ.jl.

      diff --git a/v0.20.3/machines/index.html b/v0.20.3/machines/index.html new file mode 100644 index 000000000..338ca94cb --- /dev/null +++ b/v0.20.3/machines/index.html @@ -0,0 +1,116 @@ + +Machines · MLJ

      Machines

      Recall from Getting Started that a machine binds a model (i.e., a choice of algorithm + hyperparameters) to data (see more at Constructing machines below). A machine is also the object storing learned parameters. Under the hood, calling fit! on a machine calls either MLJBase.fit or MLJBase.update, depending on the machine's internal state (as recorded in private fields old_model and old_rows). These lower-level fit and update methods, which are not ordinarily called directly by the user, dispatch on the model and a view of the data defined by the optional rows keyword argument of fit! (all rows by default).

      Warm restarts

      If a model update method has been implemented for the model, calls to fit! will avoid redundant calculations for certain kinds of model mutations. The main use-case is increasing an iteration parameter, such as the number of epochs in a neural network. To test if SomeIterativeModel supports this feature, check iteration_parameter(SomeIterativeModel) is different from nothing.

      tree = (@load DecisionTreeClassifier pkg=DecisionTree verbosity=0)()
      +forest = EnsembleModel(model=tree, n=10);
      +X, y = @load_iris;
      +mach = machine(forest, X, y)
      +fit!(mach, verbosity=2);
      trained Machine; caches model-specific representations of data
      +  model: ProbabilisticEnsembleModel(model = DecisionTreeClassifier(max_depth = -1, …), …)
      +  args: 
      +    1:	Source @925 ⏎ Table{AbstractVector{Continuous}}
      +    2:	Source @787 ⏎ AbstractVector{Multiclass{3}}
      +

      Generally, changing a hyperparameter triggers retraining on calls to subsequent fit!:

      julia> forest.bagging_fraction=0.50.5
      julia> fit!(mach, verbosity=2);[ Info: Updating machine(ProbabilisticEnsembleModel(model = DecisionTreeClassifier(max_depth = -1, …), …), …). +[ Info: Truncating existing ensemble.

      However, for this iterative model, increasing the iteration parameter only adds models to the existing ensemble:

      julia> forest.n=1515
      julia> fit!(mach, verbosity=2);[ Info: Updating machine(ProbabilisticEnsembleModel(model = DecisionTreeClassifier(max_depth = -1, …), …), …). +[ Info: Building on existing ensemble of length 10 +[ Info: One hash per new atom trained: +#####

      Call fit! again without making a change and no retraining occurs:

      julia> fit!(mach);[ Info: Not retraining machine(ProbabilisticEnsembleModel(model = DecisionTreeClassifier(max_depth = -1, …), …), …). Use `force=true` to force.

      However, retraining can be forced:

      julia> fit!(mach, force=true);[ Info: Training machine(ProbabilisticEnsembleModel(model = DecisionTreeClassifier(max_depth = -1, …), …), …).

      And is re-triggered if the view of the data changes:

      julia> fit!(mach, rows=1:100);[ Info: Training machine(ProbabilisticEnsembleModel(model = DecisionTreeClassifier(max_depth = -1, …), …), …).
      julia> fit!(mach, rows=1:100);[ Info: Not retraining machine(ProbabilisticEnsembleModel(model = DecisionTreeClassifier(max_depth = -1, …), …), …). Use `force=true` to force.

      If an iterative model exposes its iteration parameter as a hyperparameter, and it implements the warm restart behavior above, then it can be wrapped in a "control strategy", like an early stopping criterion. See Controlling Iterative Models for details.

      Inspecting machines

      There are two principal methods for inspecting the outcomes of training in MLJ. To obtain a named-tuple describing the learned parameters (in a user-friendly way where possible) use fitted_params(mach). All other training-related outcomes are inspected with report(mach).

      X, y = @load_iris
      +pca = (@load PCA verbosity=0)()
      +mach = machine(pca, X)
      +fit!(mach)
      trained Machine; caches model-specific representations of data
      +  model: PCA(maxoutdim = 0, …)
      +  args: 
      +    1:	Source @618 ⏎ Table{AbstractVector{Continuous}}
      +
      julia> fitted_params(mach)(projection = [-0.36158967738145 0.6565398832858296 0.5809972798276162; 0.08226888989221415 0.7297123713264985 -0.5964180879380994; -0.8565721052905275 -0.175767403428653 -0.07252407548695988; -0.3588439262482158 -0.07470647013503479 -0.5490609107266099],)
      julia> report(mach)(indim = 4, + outdim = 3, + tprincipalvar = 4.545608248041779, + tresidualvar = 0.023683027126000233, + tvar = 4.569291275167779, + mean = [5.843333333333334, 3.0540000000000003, 3.758666666666667, 1.198666666666667], + principalvars = [4.224840768320109, 0.24224357162751498, 0.0785239080941545], + loadings = [-0.7432265175592332 0.3231374133069471 0.16280774164399525; 0.16909891062391016 0.3591516283038468 -0.16712897864451629; -1.7606340630732822 -0.0865096325959021 -0.02032278180089568; -0.73758278605778 -0.03676921407410996 -0.15385849470227703],)
      MLJModelInterface.fitted_paramsMethod
      fitted_params(mach)

      Return the learned parameters for a machine mach that has been fit!, for example the coefficients in a linear model.

      This is a named tuple and human-readable if possible.

      If mach is a machine for a composite model, such as a model constructed using the pipeline syntax model1 |> model2 |> ..., then the returned named tuple has the composite type's field names as keys. The corresponding value is the fitted parameters for the machine in the underlying learning network bound to that model. (If multiple machines share the same model, then the value is a vector.)

      using MLJ
      +@load LogisticClassifier pkg=MLJLinearModels
      +X, y = @load_crabs;
      +pipe = Standardizer() |> LogisticClassifier()
      +mach = machine(pipe, X, y) |> fit!
      +
      +julia> fitted_params(mach).logistic_classifier
      +(classes = CategoricalArrays.CategoricalValue{String,UInt32}["B", "O"],
      + coefs = Pair{Symbol,Float64}[:FL => 3.7095037897680405, :RW => 0.1135739140854546, :CL => -1.6036892745322038, :CW => -4.415667573486482, :BD => 3.238476051092471],
      + intercept = 0.0883301599726305,)

      Additional keys, machines and fitted_params_given_machine, give a list of all machines in the underlying network, and a dictionary of fitted parameters keyed on those machines.

      See also report

      source
      MLJBase.reportMethod
      report(mach)

      Return the report for a machine mach that has been fit!, for example the coefficients in a linear model.

      This is a named tuple and human-readable if possible.

      If mach is a machine for a composite model, such as a model constructed using the pipeline syntax model1 |> model2 |> ..., then the returned named tuple has the composite type's field names as keys. The corresponding value is the report for the machine in the underlying learning network bound to that model. (If multiple machines share the same model, then the value is a vector.)

      using MLJ
      +@load LinearBinaryClassifier pkg=GLM
      +X, y = @load_crabs;
      +pipe = Standardizer() |> LinearBinaryClassifier()
      +mach = machine(pipe, X, y) |> fit!
      +
      +julia> report(mach).linear_binary_classifier
      +(deviance = 3.8893386087844543e-7,
      + dof_residual = 195.0,
      + stderror = [18954.83496713119, 6502.845740757159, 48484.240246060406, 34971.131004997274, 20654.82322484894, 2111.1294584763386],
      + vcov = [3.592857686311793e8 9.122732393971942e6 … -8.454645589364915e7 5.38856837634321e6; 9.122732393971942e6 4.228700272808351e7 … -4.978433790526467e7 -8.442545425533723e6; … ; -8.454645589364915e7 -4.978433790526467e7 … 4.2662172244975924e8 2.1799125705781363e7; 5.38856837634321e6 -8.442545425533723e6 … 2.1799125705781363e7 4.456867590446599e6],)
      +

      Additional keys, machines and report_given_machine, give a list of all machines in the underlying network, and a dictionary of reports keyed on those machines.

      See also fitted_params

      source

      Training losses and feature importances

      Training losses and feature importances, if reported by a model, will be available in the machine's report (see above). However, there are also direct access methods where supported:

      training_losses(mach::Machine) -> vector_of_losses

      Here vector_of_losses will be in historical order (most recent loss last). This kind of access is supported for model = mach.model if supports_training_losses(model) == true.

      feature_importances(mach::Machine) -> vector_of_pairs

      Here a vector_of_pairs is a vector of elements of the form feature => importance_value, where feature is a symbol. For example, vector_of_pairs = [:gender => 0.23, :height => 0.7, :weight => 0.1]. If a model does not support feature importances for some model hyperparameters, every importance_value will be zero. This kind of access is supported for model = mach.model if reports_feature_importances(model) == true.

      If a model can report multiple types of feature importances, then there will be a model hyper-parameter controlling the active type.

      Constructing machines

      A machine is constructed with the syntax machine(model, args...) where the possibilities for args (called training arguments) are summarized in the table below. Here X and y represent inputs and target, respectively, and Xout is the output of a transform call. Machines for supervised models may have additional training arguments, such as a vector of per-observation weights (in which case supports_weights(model) == true).

      model supertypemachine constructor callsoperation calls (first compulsory)
      Deterministic <: Supervisedmachine(model, X, y, extras...)predict(mach, Xnew), transform(mach, Xnew), inverse_transform(mach, Xout)
      Probabilistic <: Supervisedmachine(model, X, y, extras...)predict(mach, Xnew), predict_mean(mach, Xnew), predict_median(mach, Xnew), predict_mode(mach, Xnew), transform(mach, Xnew), inverse_transform(mach, Xout)
      Unsupervised (except Static)machine(model, X)transform(mach, Xnew), inverse_transform(mach, Xout), predict(mach, Xnew)
      Staticmachine(model)transform(mach, Xnews...), inverse_transform(mach, Xout)

      All operations on machines (predict, transform, etc) have exactly one argument (Xnew or Xout above) after mach, the machine instance. An exception is a machine bound to a Static model, which can have any number of arguments after mach. For more on Static transformers (which have no training arguments) see Static transformers.

      A machine is reconstructed from a file using the syntax machine("my_machine.jlso"), or machine("my_machine.jlso", args...) if retraining using new data. See Saving machines below.

      Lowering memory demands

      For large data sets, you may be able to save memory by suppressing data caching that some models perform to increase speed. To do this, specify cache=false, as in

      machine(model, X, y, cache=false)

      Constructing machines in learning networks

      Instead of data X, y, etc, the machine constructor is provided Node or Source objects ("dynamic data") when building a learning network. See Learning Networks for more on this advanced feature.

      Saving machines

      Users can save and restore MLJ machines using any external serialization package by suitably preparing their Machine object, and applying a post-processing step to the deserialized object. This is explained under Using an arbitrary serializer below.

      However, if a user is happy to use Julia's standard library Serialization module, there is a simplified workflow described first.

      The usual serialization provisos apply. For example, when deserializing you need to have all code on which the serialization object depended loaded at the time of deserialization also. If a hyper-parameter happens to be a user-defined function, then that function must be defined at deserialization. And you should only deserialize objects from trusted sources.

      Using Julia's native serializer

      MLJModelInterface.saveFunction
      MLJ.save(filename, mach::Machine)
      +MLJ.save(io, mach::Machine)
      +
      +MLJBase.save(filename, mach::Machine)
      +MLJBase.save(io, mach::Machine)

      Serialize the machine mach to a file with path filename, or to an input/output stream io (at least IOBuffer instances are supported) using the Serialization module.

      To serialise using a different format, see serializable.

      Machines are deserialized using the machine constructor as shown in the example below.

      The implementation of save for machines changed in MLJ 0.18 (MLJBase 0.20). You can only restore a machine saved using older versions of MLJ using an older version.

      Example

      using MLJ
      +Tree = @load DecisionTreeClassifier
      +X, y = @load_iris
      +mach = fit!(machine(Tree(), X, y))
      +
      +MLJ.save("tree.jls", mach)
      +mach_predict_only = machine("tree.jls")
      +predict(mach_predict_only, X)
      +
      +# using a buffer:
      +io = IOBuffer()
      +MLJ.save(io, mach)
      +seekstart(io)
      +predict_only_mach = machine(io)
      +predict(predict_only_mach, X)
      Only load files from trusted sources

      Maliciously constructed JLS files, like pickles, and most other general purpose serialization formats, can allow for arbitrary code execution during loading. This means it is possible for someone to use a JLS file that looks like a serialized MLJ machine as a Trojan horse.

      See also serializable, machine.

      source

      Using an arbitrary serializer

      Since machines contain training data, serializing a machine directly is not recommended. Also, the learned parameters of models implemented in a language other than Julia may not have persistent representations, which means serializing them is useless. To address these two issues, users:

      • Call serializable(mach) on a machine mach they wish to save (to remove data and create persistent learned parameters)

      • Serialize the returned object using SomeSerializationPkg

      To restore the original machine (minus training data) they:

      • Deserialize using SomeSerializationPkg to obtain a new object mach
      • Call restore!(mach) to ensure mach can be used to predict or transform new data.
      MLJBase.serializableFunction
      serializable(mach::Machine)

      Returns a shallow copy of the machine to make it serializable. In particular, all training data is removed and, if necessary, learned parameters are replaced with persistent representations.

      Any general purpose Julia serializer may be applied to the output of serializable (eg, JLSO, BSON, JLD) but you must call restore!(mach) on the deserialised object mach before using it. See the example below.

      If using Julia's standard Serialization library, a shorter workflow is available using the MLJBase.save (or MLJ.save) method.

      A machine returned by serializable is characterized by the property mach.state == -1.

      Example using JLSO

      using MLJ
      +using JLSO
      +Tree = @load DecisionTreeClassifier
      +tree = Tree()
      +X, y = @load_iris
      +mach = fit!(machine(tree, X, y))
      +
      +# This machine can now be serialized
      +smach = serializable(mach)
      +JLSO.save("machine.jlso", :machine => smach)
      +
      +# Deserialize and restore learned parameters to useable form:
      +loaded_mach = JLSO.load("machine.jlso")[:machine]
      +restore!(loaded_mach)
      +
      +predict(loaded_mach, X)
      +predict(mach, X)

      See also restore!, MLJBase.save.

      source
      MLJBase.restore!Function
      restore!(mach::Machine)

      Restore the state of a machine that is currently serializable but which may not be otherwise usable. For such a machine, mach, one has mach.state=1. Intended for restoring deserialized machine objects to a useable form.

      For an example see serializable.

      source

      Internals

      For a supervised machine, the predict method calls a lower-level MLJBase.predict method, dispatched on the underlying model and the fitresult (see below). To see predict in action, as well as its unsupervised cousins transform and inverse_transform, see Getting Started.

      Except for model, a Machine instance has several fields which the user should not directly access; these include:

      • model - the struct containing the hyperparameters to be used in calls to fit!

      • fitresult - the learned parameters in a raw form, initially undefined

      • args - a tuple of the data, each element wrapped in a source node; see Learning Networks (in the supervised learning example above, args = (source(X), source(y)))

      • report - outputs of training not encoded in fitresult (eg, feature rankings), initially undefined

      • old_model - a deep copy of the model used in the last call to fit!

      • old_rows - a copy of the row indices used in the last call to fit!

      • cache

      The interested reader can learn more about machine internals by examining the simplified code excerpt in Internals.

      API Reference

      MLJBase.machineFunction
      machine(model, args...; cache=true, scitype_check_level=1)

      Construct a Machine object binding a model, storing hyper-parameters of some machine learning algorithm, to some data, args. Calling fit! on a Machine instance mach stores outcomes of applying the algorithm in mach, which can be inspected using fitted_params(mach) (learned paramters) and report(mach) (other outcomes). This in turn enables generalization to new data using operations such as predict or transform:

      using MLJModels
      +X, y = make_regression()
      +
      +PCA = @load PCA pkg=MultivariateStats
      +model = PCA()
      +mach = machine(model, X)
      +fit!(mach, rows=1:50)
      +transform(mach, selectrows(X, 51:100)) # or transform(mach, rows=51:100)
      +
      +DecisionTreeRegressor = @load DecisionTreeRegressor pkg=DecisionTree
      +model = DecisionTreeRegressor()
      +mach = machine(model, X, y)
      +fit!(mach, rows=1:50)
      +predict(mach, selectrows(X, 51:100)) # or predict(mach, rows=51:100)

      Specify cache=false to prioritize memory management over speed.

      When building a learning network, Node objects can be substituted for the concrete data but no type or dimension checks are applied.

      Checks on the types of training data

      A model articulates its data requirements using scientific types, i.e., using the scitype function instead of the typeof function.

      If scitype_check_level > 0 then the scitype of each arg in args is computed, and this is compared with the scitypes expected by the model, unless args contains Unknown scitypes and scitype_check_level < 4, in which case no further action is taken. Whether warnings are issued or errors thrown depends the level. For details, see default_scitype_check_level, a method to inspect or change the default level (1 at startup).

      Machines with model placeholders

      A symbol can be substituted for a model in machine constructors to act as a placeholder for a model specified at training time. The symbol must be the field name for a struct whose corresponding value is a model, as shown in the following example:

      mutable struct MyComposite
      +    transformer
      +    classifier
      +end
      +
      +my_composite = MyComposite(Standardizer(), ConstantClassifier)
      +
      +X, y = make_blobs()
      +mach = machine(:classifier, X, y)
      +fit!(mach, composite=my_composite)

      The last two lines are equivalent to

      mach = machine(ConstantClassifier(), X, y)
      +fit!(mach)

      Delaying model specification is used when exporting learning networks as new stand-alone model types. See prefit and the MLJ documentation on learning networks.

      See also fit!, default_scitype_check_level, MLJBase.save, serializable.

      source
      StatsAPI.fit!Function
      fit!(mach::Machine, rows=nothing, verbosity=1, force=false, composite=nothing)

      Fit the machine mach. In the case that mach has Node arguments, first train all other machines on which mach depends.

      To attempt to fit a machine without touching any other machine, use fit_only!. For more on options and the the internal logic of fitting see fit_only!

      source
      fit!(N::Node;
      +     rows=nothing,
      +     verbosity=1,
      +     force=false,
      +     acceleration=CPU1())

      Train all machines required to call the node N, in an appropriate order, but parallelizing where possible using specified acceleration mode. These machines are those returned by machines(N).

      Supported modes of acceleration: CPU1(), CPUThreads().

      source
      MLJBase.fit_only!Function
      MLJBase.fit_only!(
      +    mach::Machine;
      +    rows=nothing,
      +    verbosity=1,
      +    force=false,
      +    composite=nothing,
      +)

      Without mutating any other machine on which it may depend, perform one of the following actions to the machine mach, using the data and model bound to it, and restricting the data to rows if specified:

      • Ab initio training. Ignoring any previous learned parameters and cache, compute and store new learned parameters. Increment mach.state.

      • Training update. Making use of previous learned parameters and/or cache, replace or mutate existing learned parameters. The effect is the same (or nearly the same) as in ab initio training, but may be faster or use less memory, assuming the model supports an update option (implements MLJBase.update). Increment mach.state.

      • No-operation. Leave existing learned parameters untouched. Do not increment mach.state.

      If the model, model, bound to mach is a symbol, then instead perform the action using the true model given by getproperty(composite, model). See also machine.

      Training action logic

      For the action to be a no-operation, either mach.frozen == true or or none of the following apply:

      • (i) mach has never been trained (mach.state == 0).

      • (ii) force == true.

      • (iii) The state of some other machine on which mach depends has changed since the last time mach was trained (ie, the last time mach.state was last incremented).

      • (iv) The specified rows have changed since the last retraining and mach.model does not have Static type.

      • (v) mach.model is a model and different from the last model used for training, but has the same type.

      • (vi) mach.model is a model but has a type different from the last model used for training.

      • (vii) mach.model is a symbol and (composite, mach.model) is different from the last model used for training, but has the same type.

      • (viii) mach.model is a symbol and (composite, mach.model) has a different type from the last model used for training.

      In any of the cases (i) - (iv), (vi), or (viii), mach is trained ab initio. If (v) or (vii) is true, then a training update is applied.

      To freeze or unfreeze mach, use freeze!(mach) or thaw!(mach).

      Implementation details

      The data to which a machine is bound is stored in mach.args. Each element of args is either a Node object, or, in the case that concrete data was bound to the machine, it is concrete data wrapped in a Source node. In all cases, to obtain concrete data for actual training, each argument N is called, as in N() or N(rows=rows), and either MLJBase.fit (ab initio training) or MLJBase.update (training update) is dispatched on mach.model and this data. See the "Adding models for general use" section of the MLJ documentation for more on these lower-level training methods.

      source
      diff --git a/v0.20.3/mlj_cheatsheet/index.html b/v0.20.3/mlj_cheatsheet/index.html new file mode 100644 index 000000000..f842cdc32 --- /dev/null +++ b/v0.20.3/mlj_cheatsheet/index.html @@ -0,0 +1,26 @@ + +MLJ Cheatsheet · MLJ

      MLJ Cheatsheet

      Starting an interactive MLJ session

      julia> using MLJ
      julia> MLJ_VERSION # version of MLJ for this cheatsheetv"0.20.3"

      Model search and code loading

      info("PCA") retrieves registry metadata for the model called "PCA"

      info("RidgeRegressor", pkg="MultivariateStats") retrieves metadata for "RidgeRegresssor", which is provided by multiple packages

      doc("DecisionTreeClassifier", pkg="DecisionTree") retrieves the model document string for the classifier, without loading model code

      models() lists metadata of every registered model.

      models("Tree") lists models with "Tree" in the model or package name.

      models(x -> x.is_supervised && x.is_pure_julia) lists all supervised models written in pure julia.

      models(matching(X)) lists all unsupervised models compatible with input X.

      models(matching(X, y)) lists all supervised models compatible with input/target X/y.

      With additional conditions:

      models() do model
      +    matching(model, X, y) &&
      +    model.prediction_type == :probabilistic &&
      +        model.is_pure_julia
      +end

      Tree = @load DecisionTreeClassifier pkg=DecisionTree imports "DecisionTreeClassifier" type and binds it to Tree tree = Tree() to instantiate a Tree.

      tree2 = Tree(max_depth=2) instantiates a tree with different hyperparameter

      Ridge = @load RidgeRegressor pkg=MultivariateStats imports a type for a model provided by multiple packages

      For interactive loading instead, use @iload

      Scitypes and coercion

      scitype(x) is the scientific type of x. For example scitype(2.4) == Continuous

      scitypes_small.png

      typescitype
      AbstractFloatContinuous
      IntegerCount
      CategoricalValue and CategoricalStringMulticlass or OrderedFactor
      AbstractStringTextual

      Figure and Table for common scalar scitypes

      Use schema(X) to get the column scitypes of a table X

      coerce(y, Multiclass) attempts coercion of all elements of y into scitype Multiclass

      coerce(X, :x1 => Continuous, :x2 => OrderedFactor) to coerce columns :x1 and :x2 of table X.

      coerce(X, Count => Continuous) to coerce all columns with Count scitype to Continuous.

      Ingesting data

      Split the table channing into target y (the :Exit column) and features X (everything else), after a seeded row shuffling:

      using RDatasets
      +channing = dataset("boot", "channing")
      +y, X =  unpack(channing, ==(:Exit); rng=123)

      Same as above but exclude :Time column from X:

      using RDatasets
      +channing = dataset("boot", "channing")
      +y, X =  unpack(channing,
      +               ==(:Exit),            # y is the :Exit column
      +               !=(:Time);            # X is the rest, except :Time
      +               rng=123)

      Splitting row indices into train/validation/test, with seeded shuffling:

      train, valid, test = partition(eachindex(y), 0.7, 0.2, rng=1234) for 70:20:10 ratio

      For a stratified split:

      train, test = partition(eachindex(y), 0.8, stratify=y)

      Split a table or matrix X, instead of indices:

      Xtrain, Xvalid, Xtest = partition(X, 0.5, 0.3, rng=123)

      Getting data from OpenML:

      table = OpenML.load(91)

      Creating synthetic classification data:

      X, y = make_blobs(100, 2) (also: make_moons, make_circles)

      Creating synthetic regression data:

      X, y = make_regression(100, 2)

      Machine construction

      Supervised case:

      model = KNNRegressor(K=1) and mach = machine(model, X, y)

      Unsupervised case:

      model = OneHotEncoder() and mach = machine(model, X)

      Fitting

      fit!(mach, rows=1:100, verbosity=1, force=false) (defaults shown)

      Prediction

      Supervised case: predict(mach, Xnew) or predict(mach, rows=1:100)

      Similarly, for probabilistic models: predict_mode, predict_mean and predict_median.

      Unsupervised case: transform(mach, rows=1:100) or inverse_transform(mach, rows), etc.

      Inspecting objects

      @more gets detail on the last object in REPL

      params(model) gets a nested-tuple of all hyperparameters, even nested ones

      info(ConstantRegressor()), info("PCA"), info("RidgeRegressor", pkg="MultivariateStats") gets all properties (aka traits) of registered models

      info(rms) gets all properties of a performance measure

      schema(X) get column names, types and scitypes, and nrows, of a table X

      scitype(X) gets the scientific type of X

      fitted_params(mach) gets learned parameters of the fitted machine

      report(mach) gets other training results (e.g. feature rankings)

      Saving and retrieving machines using Julia serializer

      MLJ.save("trained_for_five_days.jls", mach) to save machine mach (without data)

      predict_only_mach = machine("trained_for_five_days.jlso") to deserialize.

      Performance estimation

      evaluate(model, X, y, resampling=CV(), measure=rms, operation=predict, weights=..., verbosity=1)

      evaluate!(mach, resampling=Holdout(), measure=[rms, mav], operation=predict, weights=..., verbosity=1)

      evaluate!(mach, resampling=[(fold1, fold2), (fold2, fold1)], measure=rms)

      Resampling strategies (resampling=...)

      Holdout(fraction_train=0.7, rng=1234) for simple holdout

      CV(nfolds=6, rng=1234) for cross-validation

      StratifiedCV(nfolds=6, rng=1234) for stratified cross-validation

      TimeSeriesSV(nfolds=4) for time-series cross-validation

      or a list of pairs of row indices:

      [(train1, eval1), (train2, eval2), ... (traink, evalk)]

      Tuning

      Tuning model wrapper

      tuned_model = TunedModel(model=…, tuning=RandomSearch(), resampling=Holdout(), measure=…, operation=predict, range=…)

      Ranges for tuning (range=...)

      If r = range(KNNRegressor(), :K, lower=1, upper = 20, scale=:log)

      then Grid() search uses iterator(r, 6) == [1, 2, 3, 6, 11, 20].

      lower=-Inf and upper=Inf are allowed.

      Non-numeric ranges: r = range(model, :parameter, values=…)

      Nested ranges: Use dot syntax, as in r = range(EnsembleModel(atom=tree), :(atom.max_depth), ...)

      Can specify multiple ranges, as in range=[r1, r2, r3]. For more range options do ?Grid or ?RandomSearch

      Tuning strategies

      RandomSearch(rng=1234) for basic random search

      Grid(resolution=10) or Grid(goal=50) for basic grid search

      Also available: LatinHyperCube, Explicit (built-in), MLJTreeParzenTuning, ParticleSwarm, AdaptiveParticleSwarm (3rd-party packages)

      Learning curves

      For generating a plot of performance against parameter specified by range:

      curve = learning_curve(mach, resolution=30, resampling=Holdout(), measure=…, operation=predict, range=…, n=1)

      curve = learning_curve(model, X, y, resolution=30, resampling=Holdout(), measure=…, operation=predict, range=…, n=1)

      If using Plots.jl:

      plot(curve.parameter_values, curve.measurements, xlab=curve.parameter_name, xscale=curve.parameter_scale)

      Controlling iterative models

      Requires: using MLJIteration

      iterated_model = IteratedModel(model=…, resampling=Holdout(), measure=…, controls=…, retrain=false)

      Controls

      Increment training: Step(n=1)

      Stopping: TimeLimit(t=0.5) (in hours), NumberLimit(n=100), NumberSinceBest(n=6), NotANumber(), Threshold(value=0.0), GL(alpha=2.0), PQ(alpha=0.75, k=5), Patience(n=5)

      Logging: Info(f=identity), Warn(f=""), Error(predicate, f="")

      Callbacks: Callback(f=mach->nothing), WithNumberDo(f=n->@info(n)), WithIterationsDo(f=i->@info("num iterations: $i")), WithLossDo(f=x->@info("loss: $x")), WithTrainingLossesDo(f=v->@info(v))

      Snapshots: Save(filename="machine.jlso")

      Wraps: MLJIteration.skip(control, predicate=1), IterationControl.with_state_do(control)

      Performance measures (metrics)

      Do measures() to get full list.

      info(rms) to list properties (aka traits) of the rms measure

      Transformers

      Built-ins include: Standardizer, OneHotEncoder, UnivariateBoxCoxTransformer, FeatureSelector, FillImputer, UnivariateDiscretizer, ContinuousEncoder, UnivariateTimeTypeToContinuous

      Externals include: PCA (in MultivariateStats), KMeans, KMedoids (in Clustering).

      models(m -> !m.is_supervised) to get full list

      Ensemble model wrapper

      EnsembleModel(atom=…, weights=Float64[], bagging_fraction=0.8, rng=GLOBAL_RNG, n=100, parallel=true, out_of_bag_measure=[])

      Target transformation wrapper

      TransformedTargetModel(model=ConstantClassifier(), target=Standardizer())

      Pipelines

      pipe = (X -> coerce(X, :height=>Continuous)) |> OneHotEncoder |> KNNRegressor(K=3)

      Unsupervised:

      pipe = Standardizer |> OneHotEncoder

      Concatenation:

      pipe1 |> pipe2 or model |> pipe or pipe |> model, etc

      Define a supervised learning network:

      Xs = source(X) ys = source(y)

      ... define further nodal machines and nodes ...

      yhat = predict(knn_machine, W, ys) (final node)

      Exporting a learning network as a stand-alone model:

      Supervised, with final node yhat returning point predictions:

      @from_network machine(Deterministic(), Xs, ys; predict=yhat) begin
      +    mutable struct Composite
      +	    reducer=network_pca
      +		regressor=network_knn
      +    end

      Here network_pca and network_knn are models appearing in the learning network.

      Supervised, with yhat final node returning probabilistic predictions:

      @from_network machine(Probabilistic(), Xs, ys; predict=yhat) begin
      +    mutable struct Composite
      +        reducer=network_pca
      +        classifier=network_tree
      +    end

      Unsupervised, with final node Xout:

      @from_network machine(Unsupervised(), Xs; transform=Xout) begin
      +    mutable struct Composite
      +	    reducer1=network_pca
      +		reducer2=clusterer
      +    end
      +end

      UnivariateTimeTypeToContinuous

      diff --git a/v0.20.3/model_browser/index.html b/v0.20.3/model_browser/index.html new file mode 100644 index 000000000..8733d30d4 --- /dev/null +++ b/v0.20.3/model_browser/index.html @@ -0,0 +1,2 @@ + +Model Browser · MLJ

      Model Browser

      Models may appear under multiple categories.

      Below an encoder is any transformer that does not fall under another category, such as "Missing Value Imputation" or "Dimension Reduction".

      Categories

      Regression | Classification | Outlier Detection | Iterative Models | Ensemble Models | Clustering | Dimension Reduction | Bayesian Models | Class Imbalance | Encoders | Static Models | Missing Value Imputation | Distribution Fitter | Text Analysis | Image Processing

      Regression

      Classification

      Outlier Detection

      Iterative Models

      Ensemble Models

      Clustering

      Dimension Reduction

      Bayesian Models

      Class Imbalance

      Encoders

      Static Models

      Missing Value Imputation

      Distribution Fitter

      Text Analysis

      Image Processing

      diff --git a/v0.20.3/model_search/index.html b/v0.20.3/model_search/index.html new file mode 100644 index 000000000..abc0f3133 --- /dev/null +++ b/v0.20.3/model_search/index.html @@ -0,0 +1,129 @@ + +Model Search · MLJ

      Model Search

      MLJ has a model registry, allowing the user to search models and their properties, without loading all the packages containing model code. In turn, this allows one to efficiently find all models solving a given machine learning task. The task itself is specified with the help of the matching method, and the search executed with the models methods, as detailed below.

      For commonly encountered problems with model search, see also Preparing Data.

      A table of all models is also given at List of Supported Models.

      Model metadata

      Terminology. In this section the word "model" refers to a metadata entry in the model registry, as opposed to an actual model struct that such an entry represents. One can obtain such an entry with the info command:

      julia> info("PCA")(name = "PCA",
      + package_name = "MultivariateStats",
      + is_supervised = false,
      + abstract_type = Unsupervised,
      + deep_properties = (),
      + docstring = "```\nPCA\n```\n\nA model type for constructing a pca, ...",
      + fit_data_scitype = Tuple{Table{<:AbstractVector{<:Continuous}}},
      + human_name = "pca",
      + hyperparameter_ranges = (nothing, nothing, nothing, nothing),
      + hyperparameter_types =
      +     ("Int64", "Symbol", "Float64", "Union{Nothing, Real, Vector{Float64}}"),
      + hyperparameters = (:maxoutdim, :method, :variance_ratio, :mean),
      + implemented_methods =
      +     [:clean!, :fit, :fitted_params, :inverse_transform, :transform],
      + inverse_transform_scitype = Table{<:AbstractVector{<:Continuous}},
      + is_pure_julia = true,
      + is_wrapper = false,
      + iteration_parameter = nothing,
      + load_path = "MLJMultivariateStatsInterface.PCA",
      + package_license = "MIT",
      + package_url = "https://github.com/JuliaStats/MultivariateStats.jl",
      + package_uuid = "6f286f6a-111f-5878-ab1e-185364afe411",
      + predict_scitype = Unknown,
      + prediction_type = :unknown,
      + reporting_operations = (),
      + reports_feature_importances = false,
      + supports_class_weights = false,
      + supports_online = false,
      + supports_training_losses = false,
      + supports_weights = false,
      + transform_scitype = Table{<:AbstractVector{<:Continuous}},
      + input_scitype = Table{<:AbstractVector{<:Continuous}},
      + target_scitype = Unknown,
      + output_scitype = Table{<:AbstractVector{<:Continuous}})

      So a "model" in the present context is just a named tuple containing metadata, and not an actual model type or instance. If two models with the same name occur in different packages, the package name must be specified, as in info("LinearRegressor", pkg="GLM").

      Model document strings can be retreived, without importing the defining code, using the doc function:

      doc("DecisionTreeClassifier", pkg="DecisionTree")

      General model queries

      We list all models (named tuples) using models(), and list the models for which code is already loaded with localmodels():

      julia> localmodels()59-element Vector{NamedTuple{(:name, :package_name, :is_supervised, :abstract_type, :deep_properties, :docstring, :fit_data_scitype, :human_name, :hyperparameter_ranges, :hyperparameter_types, :hyperparameters, :implemented_methods, :inverse_transform_scitype, :is_pure_julia, :is_wrapper, :iteration_parameter, :load_path, :package_license, :package_url, :package_uuid, :predict_scitype, :prediction_type, :reporting_operations, :reports_feature_importances, :supports_class_weights, :supports_online, :supports_training_losses, :supports_weights, :transform_scitype, :input_scitype, :target_scitype, :output_scitype)}}:
      + (name = AdaBoostStumpClassifier, package_name = DecisionTree, ... )
      + (name = BayesianLDA, package_name = MultivariateStats, ... )
      + (name = BayesianSubspaceLDA, package_name = MultivariateStats, ... )
      + (name = ConstantClassifier, package_name = MLJModels, ... )
      + (name = ConstantRegressor, package_name = MLJModels, ... )
      + (name = ContinuousEncoder, package_name = MLJModels, ... )
      + (name = DBSCAN, package_name = Clustering, ... )
      + (name = DecisionTreeClassifier, package_name = DecisionTree, ... )
      + (name = DecisionTreeRegressor, package_name = DecisionTree, ... )
      + (name = DeterministicConstantClassifier, package_name = MLJModels, ... )
      + ⋮
      + (name = RidgeRegressor, package_name = MultivariateStats, ... )
      + (name = RobustRegressor, package_name = MLJLinearModels, ... )
      + (name = Standardizer, package_name = MLJModels, ... )
      + (name = SubspaceLDA, package_name = MultivariateStats, ... )
      + (name = UnivariateBoxCoxTransformer, package_name = MLJModels, ... )
      + (name = UnivariateDiscretizer, package_name = MLJModels, ... )
      + (name = UnivariateFillImputer, package_name = MLJModels, ... )
      + (name = UnivariateStandardizer, package_name = MLJModels, ... )
      + (name = UnivariateTimeTypeToContinuous, package_name = MLJModels, ... )
      julia> localmodels()[2](name = "BayesianLDA", + package_name = "MultivariateStats", + is_supervised = true, + abstract_type = Probabilistic, + deep_properties = (), + docstring = "```\nBayesianLDA\n```\n\nA model type for constructing...", + fit_data_scitype = + Tuple{Table{<:AbstractVector{<:Continuous}}, AbstractVector{<:Finite}}, + human_name = "Bayesian LDA model", + hyperparameter_ranges = + (nothing, nothing, nothing, nothing, nothing, nothing), + hyperparameter_types = + ("Symbol", + "StatsBase.CovarianceEstimator", + "StatsBase.CovarianceEstimator", + "Int64", + "Float64", + "Union{Nothing, Dict{<:Any, <:Real}, CategoricalDistributions.UnivariateFinite{<:Any, <:Any, <:Any, <:Real}}"), + hyperparameters = (:method, :cov_w, :cov_b, :outdim, :regcoef, :priors), + implemented_methods = [:clean!, :fit, :fitted_params, :predict, :transform], + inverse_transform_scitype = Unknown, + is_pure_julia = true, + is_wrapper = false, + iteration_parameter = nothing, + load_path = "MLJMultivariateStatsInterface.BayesianLDA", + package_license = "MIT", + package_url = "https://github.com/JuliaStats/MultivariateStats.jl", + package_uuid = "6f286f6a-111f-5878-ab1e-185364afe411", + predict_scitype = + AbstractVector{ScientificTypesBase.Density{_s25} where _s25<:Finite}, + prediction_type = :probabilistic, + reporting_operations = (), + reports_feature_importances = false, + supports_class_weights = false, + supports_online = false, + supports_training_losses = false, + supports_weights = false, + transform_scitype = Unknown, + input_scitype = Table{<:AbstractVector{<:Continuous}}, + target_scitype = AbstractVector{<:Finite}, + output_scitype = Table{<:AbstractVector{<:Continuous}})

      One can search for models containing specified strings or regular expressions in their docstring attributes, as in

      julia> models("forest")12-element Vector{NamedTuple{(:name, :package_name, :is_supervised, :abstract_type, :deep_properties, :docstring, :fit_data_scitype, :human_name, :hyperparameter_ranges, :hyperparameter_types, :hyperparameters, :implemented_methods, :inverse_transform_scitype, :is_pure_julia, :is_wrapper, :iteration_parameter, :load_path, :package_license, :package_url, :package_uuid, :predict_scitype, :prediction_type, :reporting_operations, :reports_feature_importances, :supports_class_weights, :supports_online, :supports_training_losses, :supports_weights, :transform_scitype, :input_scitype, :target_scitype, :output_scitype)}}:
      + (name = GeneralImputer, package_name = BetaML, ... )
      + (name = IForestDetector, package_name = OutlierDetectionPython, ... )
      + (name = RandomForestClassifier, package_name = DecisionTree, ... )
      + (name = RandomForestClassifier, package_name = MLJScikitLearnInterface, ... )
      + (name = RandomForestImputer, package_name = BetaML, ... )
      + (name = RandomForestRegressor, package_name = BetaML, ... )
      + (name = RandomForestRegressor, package_name = DecisionTree, ... )
      + (name = RandomForestRegressor, package_name = MLJScikitLearnInterface, ... )
      + (name = StableForestClassifier, package_name = SIRUS, ... )
      + (name = StableForestRegressor, package_name = SIRUS, ... )
      + (name = StableRulesClassifier, package_name = SIRUS, ... )
      + (name = StableRulesRegressor, package_name = SIRUS, ... )

      or by specifying a filter (Bool-valued function):

      julia> filter(model) = model.is_supervised &&
      +                       model.input_scitype >: MLJ.Table(Continuous) &&
      +                       model.target_scitype >: AbstractVector{<:Multiclass{3}} &&
      +                       model.prediction_type == :deterministicfilter (generic function with 1 method)
      julia> models(filter)12-element Vector{NamedTuple{(:name, :package_name, :is_supervised, :abstract_type, :deep_properties, :docstring, :fit_data_scitype, :human_name, :hyperparameter_ranges, :hyperparameter_types, :hyperparameters, :implemented_methods, :inverse_transform_scitype, :is_pure_julia, :is_wrapper, :iteration_parameter, :load_path, :package_license, :package_url, :package_uuid, :predict_scitype, :prediction_type, :reporting_operations, :reports_feature_importances, :supports_class_weights, :supports_online, :supports_training_losses, :supports_weights, :transform_scitype, :input_scitype, :target_scitype, :output_scitype)}}: + (name = DeterministicConstantClassifier, package_name = MLJModels, ... ) + (name = LinearSVC, package_name = LIBSVM, ... ) + (name = NuSVC, package_name = LIBSVM, ... ) + (name = PassiveAggressiveClassifier, package_name = MLJScikitLearnInterface, ... ) + (name = PerceptronClassifier, package_name = MLJScikitLearnInterface, ... ) + (name = RidgeCVClassifier, package_name = MLJScikitLearnInterface, ... ) + (name = RidgeClassifier, package_name = MLJScikitLearnInterface, ... ) + (name = SGDClassifier, package_name = MLJScikitLearnInterface, ... ) + (name = SVC, package_name = LIBSVM, ... ) + (name = SVMClassifier, package_name = MLJScikitLearnInterface, ... ) + (name = SVMLinearClassifier, package_name = MLJScikitLearnInterface, ... ) + (name = SVMNuClassifier, package_name = MLJScikitLearnInterface, ... )

      Multiple test arguments may be passed to models, which are applied conjunctively.

      Matching models to data

      Common searches are streamlined with the help of the matching command, defined as follows:

      • matching(model, X, y) == true exactly when model is supervised and admits inputs and targets with the scientific types of X and y, respectively

      • matching(model, X) == true exactly when model is unsupervised and admits inputs with the scientific types of X.

      So, to search for all supervised probabilistic models handling input X and target y, one can define the testing function task by

      task(model) = matching(model, X, y) && model.prediction_type == :probabilistic

      And execute the search with

      models(task)

      Also defined are Bool-valued callable objects matching(model), matching(X, y) and matching(X), with obvious behavior. For example, matching(X, y)(model) = matching(model, X, y).

      So, to search for all models compatible with input X and target y, for example, one executes

      models(matching(X, y))

      while the preceding search can also be written

      models() do model
      +    matching(model, X, y) &&
      +    model.prediction_type == :probabilistic
      +end

      API

      MLJModels.modelsFunction
      models()

      List all models in the MLJ registry. Here and below model means the registry metadata entry for a genuine model type (a proxy for types whose defining code may not be loaded).

      models(filters..)

      List all models m for which filter(m) is true, for each filter in filters.

      models(matching(X, y))

      List all supervised models compatible with training data X, y.

      models(matching(X))

      List all unsupervised models compatible with training data X.

      Excluded in the listings are the built-in model-wraps, like EnsembleModel, TunedModel, and IteratedModel.

      Example

      If

      task(model) = model.is_supervised && model.is_probabilistic

      then models(task) lists all supervised models making probabilistic predictions.

      See also: localmodels.

      source
      models(needle::Union{AbstractString,Regex})

      List all models whole name or docstring matches a given needle.

      source
      MLJModels.localmodelsFunction
      localmodels(; modl=Main)
      +localmodels(filters...; modl=Main)
      +localmodels(needle::Union{AbstractString,Regex}; modl=Main)

      List all models currently available to the user from the module modl without importing a package, and which additional pass through the specified filters. Here a filter is a Bool-valued function on models.

      Use load_path to get the path to some model returned, as in these examples:

      ms = localmodels()
      +model = ms[1]
      +load_path(model)

      See also models, load_path.

      source
      diff --git a/v0.20.3/model_stacking/index.html b/v0.20.3/model_stacking/index.html new file mode 100644 index 000000000..806612db4 --- /dev/null +++ b/v0.20.3/model_stacking/index.html @@ -0,0 +1,24 @@ + +Model Stacking · MLJ

      Model Stacking

      In a model stack, as introduced by Wolpert (1992), an adjudicating model learns the best way to combine the predictions of multiple base models. In MLJ, such models are constructed using the Stack constructor. To learn more about stacking and to see how to construct a stack "by hand" using Learning Networks, see this Data Science in Julia tutorial

      MLJBase.StackType
      Stack(; metalearner=nothing, name1=model1, name2=model2, ..., keyword_options...)

      Implements the two-layer generalized stack algorithm introduced by Wolpert (1992) and generalized by Van der Laan et al (2007). Returns an instance of type ProbabilisticStack or DeterministicStack, depending on the prediction type of metalearner.

      When training a machine bound to such an instance:

      • The data is split into training/validation sets according to the specified resampling strategy.

      • Each base model model1, model2, ... is trained on each training subset and outputs predictions on the corresponding validation sets. The multi-fold predictions are spliced together into a so-called out-of-sample prediction for each model.

      • The adjudicating model, metalearner, is subsequently trained on the out-of-sample predictions to learn the best combination of base model predictions.

      • Each base model is retrained on all supplied data for purposes of passing on new production data onto the adjudicator for making new predictions

      Arguments

      • metalearner::Supervised: The model that will optimize the desired criterion based on its internals. For instance, a LinearRegression model will optimize the squared error.

      • resampling: The resampling strategy used to prepare out-of-sample predictions of the base learners.

      • measures: A measure or iterable over measures, to perform an internal evaluation of the learners in the Stack while training. This is not for the evaluation of the Stack itself.

      • cache: Whether machines created in the learning network will cache data or not.

      • acceleration: A supported AbstractResource to define the training parallelization mode of the stack.

      • name1=model1, name2=model2, ...: the Supervised model instances to be used as base learners. The provided names become properties of the instance created to allow hyper-parameter access

      Example

      The following code defines a DeterministicStack instance for learning a Continuous target, and demonstrates that:

      • Base models can be Probabilistic models even if the stack itself is Deterministic (predict_mean is applied in such cases).

      • As an alternative to hyperparameter optimization, one can stack multiple copies of given model, mutating the hyper-parameter used in each copy.

      using MLJ
      +
      +DecisionTreeRegressor = @load DecisionTreeRegressor pkg=DecisionTree
      +EvoTreeRegressor = @load EvoTreeRegressor
      +XGBoostRegressor = @load XGBoostRegressor
      +KNNRegressor = @load KNNRegressor pkg=NearestNeighborModels
      +LinearRegressor = @load LinearRegressor pkg=MLJLinearModels
      +
      +X, y = make_regression(500, 5)
      +
      +stack = Stack(;metalearner=LinearRegressor(),
      +                resampling=CV(),
      +                measures=rmse,
      +                constant=ConstantRegressor(),
      +                tree_2=DecisionTreeRegressor(max_depth=2),
      +                tree_3=DecisionTreeRegressor(max_depth=3),
      +                evo=EvoTreeRegressor(),
      +                knn=KNNRegressor(),
      +                xgb=XGBoostRegressor())
      +
      +mach = machine(stack, X, y)
      +evaluate!(mach; resampling=Holdout(), measure=rmse)
      +

      The internal evaluation report can be accessed like this and provides a PerformanceEvaluation object for each model:

      report(mach).cv_report
      source
      diff --git a/v0.20.3/models/ABODDetector_OutlierDetectionNeighbors/index.html b/v0.20.3/models/ABODDetector_OutlierDetectionNeighbors/index.html new file mode 100644 index 000000000..ec82f9681 --- /dev/null +++ b/v0.20.3/models/ABODDetector_OutlierDetectionNeighbors/index.html @@ -0,0 +1,13 @@ + +ABODDetector · MLJ

      ABODDetector

      ABODDetector(k = 5,
      +             metric = Euclidean(),
      +             algorithm = :kdtree,
      +             static = :auto,
      +             leafsize = 10,
      +             reorder = true,
      +             parallel = false,
      +             enhanced = false)

      Determine outliers based on the angles to its nearest neighbors. This implements the FastABOD variant described in the paper, that is, it uses the variance of angles to its nearest neighbors, not to the whole dataset, see [1].

      Notice: The scores are inverted, to conform to our notion that higher scores describe higher outlierness.

      Parameters

      k::Integer

      Number of neighbors (must be greater than 0).

      metric::Metric

      This is one of the Metric types defined in the Distances.jl package. It is possible to define your own metrics by creating new types that are subtypes of Metric.

      algorithm::Symbol

      One of (:kdtree, :balltree). In a kdtree, points are recursively split into groups using hyper-planes. Therefore a KDTree only works with axis aligned metrics which are: Euclidean, Chebyshev, Minkowski and Cityblock. A brutetree linearly searches all points in a brute force fashion and works with any Metric. A balltree recursively splits points into groups bounded by hyper-spheres and works with any Metric.

      static::Union{Bool, Symbol}

      One of (true, false, :auto). Whether the input data for fitting and transform should be statically or dynamically allocated. If true, the data is statically allocated. If false, the data is dynamically allocated. If :auto, the data is dynamically allocated if the product of all dimensions except the last is greater than 100.

      leafsize::Int

      Determines at what number of points to stop splitting the tree further. There is a trade-off between traversing the tree and having to evaluate the metric function for increasing number of points.

      reorder::Bool

      While building the tree this will put points close in distance close in memory since this helps with cache locality. In this case, a copy of the original data will be made so that the original data is left unmodified. This can have a significant impact on performance and is by default set to true.

      parallel::Bool

      Parallelize score and predict using all threads available. The number of threads can be set with the JULIA_NUM_THREADS environment variable. Note: fit is not parallel.

      enhanced::Bool

      When enhanced=true, it uses the enhanced ABOD (EABOD) adaptation proposed by [2].

      Examples

      using OutlierDetection: ABODDetector, fit, transform
      +detector = ABODDetector()
      +X = rand(10, 100)
      +model, result = fit(detector, X; verbosity=0)
      +test_scores = transform(detector, model, X)

      References

      [1] Kriegel, Hans-Peter; S hubert, Matthias; Zimek, Arthur (2008): Angle-based outlier detection in high-dimensional data.

      [2] Li, Xiaojie; Lv, Jian Cheng; Cheng, Dongdong (2015): Angle-Based Outlier Detection Algorithm with More Stable Relationships.

      diff --git a/v0.20.3/models/ABODDetector_OutlierDetectionPython/index.html b/v0.20.3/models/ABODDetector_OutlierDetectionPython/index.html new file mode 100644 index 000000000..d7ccb53be --- /dev/null +++ b/v0.20.3/models/ABODDetector_OutlierDetectionPython/index.html @@ -0,0 +1,3 @@ + +ABODDetector · MLJ diff --git a/v0.20.3/models/ARDRegressor_MLJScikitLearnInterface/index.html b/v0.20.3/models/ARDRegressor_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..a992e6e75 --- /dev/null +++ b/v0.20.3/models/ARDRegressor_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +ARDRegressor · MLJ

      ARDRegressor

      ARDRegressor

      A model type for constructing a Bayesian ARD regressor, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      ARDRegressor = @load ARDRegressor pkg=MLJScikitLearnInterface

      Do model = ARDRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in ARDRegressor(n_iter=...).

      Hyper-parameters

      • n_iter = 300
      • tol = 0.001
      • alpha_1 = 1.0e-6
      • alpha_2 = 1.0e-6
      • lambda_1 = 1.0e-6
      • lambda_2 = 1.0e-6
      • compute_score = false
      • threshold_lambda = 10000.0
      • fit_intercept = true
      • copy_X = true
      • verbose = false
      diff --git a/v0.20.3/models/AdaBoostClassifier_MLJScikitLearnInterface/index.html b/v0.20.3/models/AdaBoostClassifier_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..d9f5a026b --- /dev/null +++ b/v0.20.3/models/AdaBoostClassifier_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +AdaBoostClassifier · MLJ

      AdaBoostClassifier

      AdaBoostClassifier

      A model type for constructing a ada boost classifier, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      AdaBoostClassifier = @load AdaBoostClassifier pkg=MLJScikitLearnInterface

      Do model = AdaBoostClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in AdaBoostClassifier(estimator=...).

      An AdaBoost classifier is a meta-estimator that begins by fitting a classifier on the original dataset and then fits additional copies of the classifier on the same dataset but where the weights of incorrectly classified instances are adjusted such that subsequent classifiers focus more on difficult cases.

      This class implements the algorithm known as AdaBoost-SAMME.

      diff --git a/v0.20.3/models/AdaBoostRegressor_MLJScikitLearnInterface/index.html b/v0.20.3/models/AdaBoostRegressor_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..64277ac94 --- /dev/null +++ b/v0.20.3/models/AdaBoostRegressor_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +AdaBoostRegressor · MLJ

      AdaBoostRegressor

      AdaBoostRegressor

      A model type for constructing a AdaBoost ensemble regression, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      AdaBoostRegressor = @load AdaBoostRegressor pkg=MLJScikitLearnInterface

      Do model = AdaBoostRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in AdaBoostRegressor(estimator=...).

      An AdaBoost regressor is a meta-estimator that begins by fitting a regressor on the original dataset and then fits additional copies of the regressor on the same dataset but where the weights of instances are adjusted according to the error of the current prediction. As such, subsequent regressors focus more on difficult cases.

      This class implements the algorithm known as AdaBoost.R2.

      diff --git a/v0.20.3/models/AdaBoostStumpClassifier_DecisionTree/index.html b/v0.20.3/models/AdaBoostStumpClassifier_DecisionTree/index.html new file mode 100644 index 000000000..1969a5343 --- /dev/null +++ b/v0.20.3/models/AdaBoostStumpClassifier_DecisionTree/index.html @@ -0,0 +1,19 @@ + +AdaBoostStumpClassifier · MLJ

      AdaBoostStumpClassifier

      AdaBoostStumpClassifier

      A model type for constructing a Ada-boosted stump classifier, based on DecisionTree.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      AdaBoostStumpClassifier = @load AdaBoostStumpClassifier pkg=DecisionTree

      Do model = AdaBoostStumpClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in AdaBoostStumpClassifier(n_iter=...).

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, X, y)

      where:

      • X: any table of input features (eg, a DataFrame) whose columns each have one of the following element scitypes: Continuous, Count, or <:OrderedFactor; check column scitypes with schema(X)
      • y: the target, which can be any AbstractVector whose element scitype is <:OrderedFactor or <:Multiclass; check the scitype with scitype(y)

      Train the machine with fit!(mach, rows=...).

      Hyperparameters

      • n_iter=10: number of iterations of AdaBoost
      • feature_importance: method to use for computing feature importances. One of (:impurity, :split)
      • rng=Random.GLOBAL_RNG: random number generator or seed

      Operations

      • predict(mach, Xnew): return predictions of the target given features Xnew having the same scitype as X above. Predictions are probabilistic, but uncalibrated.
      • predict_mode(mach, Xnew): instead return the mode of each prediction above.

      Fitted Parameters

      The fields of fitted_params(mach) are:

      • stumps: the Ensemble object returned by the core DecisionTree.jl algorithm.
      • coefficients: the stump coefficients (one per stump)

      Report

      • features: the names of the features encountered in training

      Accessor functions

      • feature_importances(mach) returns a vector of (feature::Symbol => importance) pairs; the type of importance is determined by the hyperparameter feature_importance (see above)

      Examples

      using MLJ
      +Booster = @load AdaBoostStumpClassifier pkg=DecisionTree
      +booster = Booster(n_iter=15)
      +
      +X, y = @load_iris
      +mach = machine(booster, X, y) |> fit!
      +
      +Xnew = (sepal_length = [6.4, 7.2, 7.4],
      +        sepal_width = [2.8, 3.0, 2.8],
      +        petal_length = [5.6, 5.8, 6.1],
      +        petal_width = [2.1, 1.6, 1.9],)
      +yhat = predict(mach, Xnew) ## probabilistic predictions
      +predict_mode(mach, Xnew)   ## point predictions
      +pdf.(yhat, "virginica")    ## probabilities for the "verginica" class
      +
      +fitted_params(mach).stumps ## raw `Ensemble` object from DecisionTree.jl
      +fitted_params(mach).coefs  ## coefficient associated with each stump
      +feature_importances(mach)

      See also DecisionTree.jl and the unwrapped model type MLJDecisionTreeInterface.DecisionTree.AdaBoostStumpClassifier.

      diff --git a/v0.20.3/models/AffinityPropagation_MLJScikitLearnInterface/index.html b/v0.20.3/models/AffinityPropagation_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..955cf5985 --- /dev/null +++ b/v0.20.3/models/AffinityPropagation_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +AffinityPropagation · MLJ

      AffinityPropagation

      AffinityPropagation

      A model type for constructing a Affinity Propagation Clustering of data, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      AffinityPropagation = @load AffinityPropagation pkg=MLJScikitLearnInterface

      Do model = AffinityPropagation() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in AffinityPropagation(damping=...).

      Hyper-parameters

      • damping = 0.5
      • max_iter = 200
      • convergence_iter = 15
      • copy = true
      • preference = nothing
      • affinity = euclidean
      • verbose = false
      diff --git a/v0.20.3/models/AgglomerativeClustering_MLJScikitLearnInterface/index.html b/v0.20.3/models/AgglomerativeClustering_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..18d3bc1c4 --- /dev/null +++ b/v0.20.3/models/AgglomerativeClustering_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +AgglomerativeClustering · MLJ

      AgglomerativeClustering

      AgglomerativeClustering

      A model type for constructing a agglomerative clustering, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      AgglomerativeClustering = @load AgglomerativeClustering pkg=MLJScikitLearnInterface

      Do model = AgglomerativeClustering() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in AgglomerativeClustering(n_clusters=...).

      Recursively merges the pair of clusters that minimally increases a given linkage distance. Note: there is no predict or transform. Instead, inspect the fitted_params.

      diff --git a/v0.20.3/models/AutoEncoder_BetaML/index.html b/v0.20.3/models/AutoEncoder_BetaML/index.html new file mode 100644 index 000000000..2417dd9b2 --- /dev/null +++ b/v0.20.3/models/AutoEncoder_BetaML/index.html @@ -0,0 +1,62 @@ + +AutoEncoder · MLJ

      AutoEncoder

      mutable struct AutoEncoder <: MLJModelInterface.Unsupervised

      A ready-to use AutoEncoder, from the Beta Machine Learning Toolkit (BetaML) for ecoding and decoding of data using neural networks

      Parameters:

      • e_layers: The layers (vector of AbstractLayers) responsable of the encoding of the data [def: nothing, i.e. two dense layers with the inner one of innerdims]. See subtypes(BetaML.AbstractLayer) for supported layers

      • d_layers: The layers (vector of AbstractLayers) responsable of the decoding of the data [def: nothing, i.e. two dense layers with the inner one of innerdims]. See subtypes(BetaML.AbstractLayer) for supported layers

      • outdims: The number of neurons (i.e. dimensions) of the encoded data. If the value is a float it is consiered a percentual (to be rounded) of the dimensionality of the data [def: 0.33]

      • innerdims: Inner layer dimension (i.e. number of neurons). If the value is a float it is considered a percentual (to be rounded) of the dimensionality of the data [def: nothing that applies a specific heuristic]. Consider that the underlying neural network is trying to predict multiple values at the same times. Normally this requires many more neurons than a scalar prediction. If e_layers or d_layers are specified, this parameter is ignored for the respective part.

      • loss: Loss (cost) function [def: BetaML.squared_cost]. Should always assume y and ŷ as (n x d) matrices.

        Warning

        If you change the parameter loss, you need to either provide its derivative on the parameter dloss or use autodiff with dloss=nothing.

      • dloss: Derivative of the loss function [def: BetaML.dsquared_cost if loss==squared_cost, nothing otherwise, i.e. use the derivative of the squared cost or autodiff]

      • epochs: Number of epochs, i.e. passages trough the whole training sample [def: 200]

      • batch_size: Size of each individual batch [def: 8]

      • opt_alg: The optimisation algorithm to update the gradient at each batch [def: BetaML.ADAM()] See subtypes(BetaML.OptimisationAlgorithm) for supported optimizers

      • shuffle: Whether to randomly shuffle the data at each iteration (epoch) [def: true]

      • tunemethod: The method - and its parameters - to employ for hyperparameters autotuning. See SuccessiveHalvingSearch for the default method. To implement automatic hyperparameter tuning during the (first) fit! call simply set autotune=true and eventually change the default tunemethod options (including the parameter ranges, the resources to employ and the loss function to adopt).

      • descr: An optional title and/or description for this model

      • rng: Random Number Generator (see FIXEDSEED) [deafult: Random.GLOBAL_RNG]

      Notes:

      • data must be numerical
      • use transform to obtain the encoded data, and inverse_trasnform to decode to the original data

      Example:

      julia> using MLJ
      +
      +julia> X, y        = @load_iris;
      +
      +julia> modelType   = @load AutoEncoder pkg = "BetaML" verbosity=0;
      +
      +julia> model       = modelType(outdims=2,innerdims=10);
      +
      +julia> mach        = machine(model, X)
      +untrained Machine; caches model-specific representations of data
      +  model: AutoEncoder(e_layers = nothing, …)
      +  args: 
      +    1:	Source @334 ⏎ Table{AbstractVector{Continuous}}
      +
      +julia> fit!(mach,verbosity=2)
      +[ Info: Training machine(AutoEncoder(e_layers = nothing, …), …).
      +***
      +*** Training  for 200 epochs with algorithm BetaML.Nn.ADAM.
      +Training.. 	 avg loss on epoch 1 (1): 	 35.48243542158747
      +Training.. 	 avg loss on epoch 20 (20): 	 0.07528042222678126
      +Training.. 	 avg loss on epoch 40 (40): 	 0.06293071729378613
      +Training.. 	 avg loss on epoch 60 (60): 	 0.057035588828991145
      +Training.. 	 avg loss on epoch 80 (80): 	 0.056313167754822875
      +Training.. 	 avg loss on epoch 100 (100): 	 0.055521461091809436
      +Training the Neural Network...  52%|██████████████████████████████████████                                   |  ETA: 0:00:01Training.. 	 avg loss on epoch 120 (120): 	 0.06015206472927942
      +Training.. 	 avg loss on epoch 140 (140): 	 0.05536835903285201
      +Training.. 	 avg loss on epoch 160 (160): 	 0.05877560142428245
      +Training.. 	 avg loss on epoch 180 (180): 	 0.05476302769966953
      +Training.. 	 avg loss on epoch 200 (200): 	 0.049240864053557445
      +Training the Neural Network... 100%|█████████████████████████████████████████████████████████████████████████| Time: 0:00:01
      +Training of 200 epoch completed. Final epoch error: 0.049240864053557445.
      +trained Machine; caches model-specific representations of data
      +  model: AutoEncoder(e_layers = nothing, …)
      +  args: 
      +    1:	Source @334 ⏎ Table{AbstractVector{Continuous}}
      +
      +
      +julia> X_latent    = transform(mach, X)
      +150×2 Matrix{Float64}:
      + 7.01701   -2.77285
      + 6.50615   -2.9279
      + 6.5233    -2.60754
      + ⋮        
      + 6.70196  -10.6059
      + 6.46369  -11.1117
      + 6.20212  -10.1323
      +
      +julia> X_recovered = inverse_transform(mach,X_latent)
      +150×4 Matrix{Float64}:
      + 5.04973  3.55838  1.43251  0.242215
      + 4.73689  3.19985  1.44085  0.295257
      + 4.65128  3.25308  1.30187  0.244354
      + ⋮                          
      + 6.50077  2.93602  5.3303   1.87647
      + 6.38639  2.83864  5.54395  2.04117
      + 6.01595  2.67659  5.03669  1.83234
      +
      +julia> BetaML.relative_mean_error(MLJ.matrix(X),X_recovered)
      +0.03387721261716176
      +
      +
      diff --git a/v0.20.3/models/BM25Transformer_MLJText/index.html b/v0.20.3/models/BM25Transformer_MLJText/index.html new file mode 100644 index 000000000..aed43d4f2 --- /dev/null +++ b/v0.20.3/models/BM25Transformer_MLJText/index.html @@ -0,0 +1,46 @@ + +BM25Transformer · MLJ

      BM25Transformer

      BM25Transformer

      A model type for constructing a b m25 transformer, based on MLJText.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      BM25Transformer = @load BM25Transformer pkg=MLJText

      Do model = BM25Transformer() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in BM25Transformer(max_doc_freq=...).

      The transformer converts a collection of documents, tokenized or pre-parsed as bags of words/ngrams, to a matrix of Okapi BM25 document-word statistics. The BM25 scoring function uses both term frequency (TF) and inverse document frequency (IDF, defined below), as in TfidfTransformer, but additionally adjusts for the probability that a user will consider a search result relevant based, on the terms in the search query and those in each document.

      In textbooks and implementations there is variation in the definition of IDF. Here two IDF definitions are available. The default, smoothed option provides the IDF for a term t as log((1 + n)/(1 + df(t))) + 1, where n is the total number of documents and df(t) the number of documents in which t appears. Setting smooth_df = false provides an IDF of log(n/df(t)) + 1.

      References:

      • http://ethen8181.github.io/machine-learning/search/bm25_intro.html
      • https://en.wikipedia.org/wiki/Okapi_BM25
      • https://nlp.stanford.edu/IR-book/html/htmledition/okapi-bm25-a-non-binary-model-1.html

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, X)

      Here:

      • X is any vector whose elements are either tokenized documents or bags of words/ngrams. Specifically, each element is one of the following:

        • A vector of abstract strings (tokens), e.g., ["I", "like", "Sam", ".", "Sam", "is", "nice", "."] (scitype AbstractVector{Textual})
        • A dictionary of counts, indexed on abstract strings, e.g., Dict("I"=>1, "Sam"=>2, "Sam is"=>1) (scitype Multiset{Textual}})
        • A dictionary of counts, indexed on plain ngrams, e.g., Dict(("I",)=>1, ("Sam",)=>2, ("I", "Sam")=>1) (scitype Multiset{<:NTuple{N,Textual} where N}); here a plain ngram is a tuple of abstract strings.

      Train the machine using fit!(mach, rows=...).

      Hyper-parameters

      • max_doc_freq=1.0: Restricts the vocabulary that the transformer will consider. Terms that occur in > max_doc_freq documents will not be considered by the transformer. For example, if max_doc_freq is set to 0.9, terms that are in more than 90% of the documents will be removed.
      • min_doc_freq=0.0: Restricts the vocabulary that the transformer will consider. Terms that occur in < max_doc_freq documents will not be considered by the transformer. A value of 0.01 means that only terms that are at least in 1% of the documents will be included.
      • κ=2: The term frequency saturation characteristic. Higher values represent slower saturation. What we mean by saturation is the degree to which a term occurring extra times adds to the overall score.
      • β=0.075: Amplifies the particular document length compared to the average length. The bigger β is, the more document length is amplified in terms of the overall score. The default value is 0.75, and the bounds are restricted between 0 and 1.
      • smooth_idf=true: Control which definition of IDF to use (see above).

      Operations

      • transform(mach, Xnew): Based on the vocabulary, IDF, and mean word counts learned in training, return the matrix of BM25 scores for Xnew, a vector of the same form as X above. The matrix has size (n, p), where n = length(Xnew) and p the size of the vocabulary. Tokens/ngrams not appearing in the learned vocabulary are scored zero.

      Fitted parameters

      The fields of fitted_params(mach) are:

      • vocab: A vector containing the string used in the transformer's vocabulary.
      • idf_vector: The transformer's calculated IDF vector.
      • mean_words_in_docs: The mean number of words in each document.

      Examples

      BM25Transformer accepts a variety of inputs. The example below transforms tokenized documents:

      using MLJ
      +import TextAnalysis
      +
      +BM25Transformer = @load BM25Transformer pkg=MLJText
      +
      +docs = ["Hi my name is Sam.", "How are you today?"]
      +bm25_transformer = BM25Transformer()
      +
      +julia> tokenized_docs = TextAnalysis.tokenize.(docs)
      +2-element Vector{Vector{String}}:
      + ["Hi", "my", "name", "is", "Sam", "."]
      + ["How", "are", "you", "today", "?"]
      +
      +mach = machine(bm25_transformer, tokenized_docs)
      +fit!(mach)
      +
      +fitted_params(mach)
      +
      +tfidf_mat = transform(mach, tokenized_docs)

      Alternatively, one can provide documents pre-parsed as ngrams counts:

      using MLJ
      +import TextAnalysis
      +
      +docs = ["Hi my name is Sam.", "How are you today?"]
      +corpus = TextAnalysis.Corpus(TextAnalysis.NGramDocument.(docs, 1, 2))
      +ngram_docs = TextAnalysis.ngrams.(corpus)
      +
      +julia> ngram_docs[1]
      +Dict{AbstractString, Int64} with 11 entries:
      +  "is"      => 1
      +  "my"      => 1
      +  "name"    => 1
      +  "."       => 1
      +  "Hi"      => 1
      +  "Sam"     => 1
      +  "my name" => 1
      +  "Hi my"   => 1
      +  "name is" => 1
      +  "Sam ."   => 1
      +  "is Sam"  => 1
      +
      +bm25_transformer = BM25Transformer()
      +mach = machine(bm25_transformer, ngram_docs)
      +MLJ.fit!(mach)
      +fitted_params(mach)
      +
      +tfidf_mat = transform(mach, ngram_docs)

      See also TfidfTransformer, CountTransformer

      diff --git a/v0.20.3/models/BaggingClassifier_MLJScikitLearnInterface/index.html b/v0.20.3/models/BaggingClassifier_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..48c27bc85 --- /dev/null +++ b/v0.20.3/models/BaggingClassifier_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +BaggingClassifier · MLJ

      BaggingClassifier

      BaggingClassifier

      A model type for constructing a bagging ensemble classifier, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      BaggingClassifier = @load BaggingClassifier pkg=MLJScikitLearnInterface

      Do model = BaggingClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in BaggingClassifier(estimator=...).

      A Bagging classifier is an ensemble meta-estimator that fits base classifiers each on random subsets of the original dataset and then aggregate their individual predictions (either by voting or by averaging) to form a final prediction. Such a meta-estimator can typically be used as a way to reduce the variance of a black-box estimator (e.g., a decision tree), by introducing randomization into its construction procedure and then making an ensemble out of it.

      diff --git a/v0.20.3/models/BaggingRegressor_MLJScikitLearnInterface/index.html b/v0.20.3/models/BaggingRegressor_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..35eb1d001 --- /dev/null +++ b/v0.20.3/models/BaggingRegressor_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +BaggingRegressor · MLJ

      BaggingRegressor

      BaggingRegressor

      A model type for constructing a bagging ensemble regressor, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      BaggingRegressor = @load BaggingRegressor pkg=MLJScikitLearnInterface

      Do model = BaggingRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in BaggingRegressor(estimator=...).

      A Bagging regressor is an ensemble meta-estimator that fits base regressors each on random subsets of the original dataset and then aggregate their individual predictions (either by voting or by averaging) to form a final prediction. Such a meta-estimator can typically be used as a way to reduce the variance of a black-box estimator (e.g., a decision tree), by introducing randomization into its construction procedure and then making an ensemble out of it.

      diff --git a/v0.20.3/models/BayesianLDA_MLJScikitLearnInterface/index.html b/v0.20.3/models/BayesianLDA_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..334ad38d3 --- /dev/null +++ b/v0.20.3/models/BayesianLDA_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +BayesianLDA · MLJ

      BayesianLDA

      BayesianLDA

      A model type for constructing a Bayesian linear discriminant analysis, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      BayesianLDA = @load BayesianLDA pkg=MLJScikitLearnInterface

      Do model = BayesianLDA() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in BayesianLDA(solver=...).

      Hyper-parameters

      • solver = svd
      • shrinkage = nothing
      • priors = nothing
      • n_components = nothing
      • store_covariance = false
      • tol = 0.0001
      • covariance_estimator = nothing
      diff --git a/v0.20.3/models/BayesianLDA_MultivariateStats/index.html b/v0.20.3/models/BayesianLDA_MultivariateStats/index.html new file mode 100644 index 000000000..ca2466757 --- /dev/null +++ b/v0.20.3/models/BayesianLDA_MultivariateStats/index.html @@ -0,0 +1,13 @@ + +BayesianLDA · MLJ

      BayesianLDA

      BayesianLDA

      A model type for constructing a Bayesian LDA model, based on MultivariateStats.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      BayesianLDA = @load BayesianLDA pkg=MultivariateStats

      Do model = BayesianLDA() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in BayesianLDA(method=...).

      The Bayesian multiclass LDA algorithm learns a projection matrix as described in ordinary LDA. Predicted class posterior probability distributions are derived by applying Bayes' rule with a multivariate Gaussian class-conditional distribution. A prior class distribution can be specified by the user or inferred from training data class frequency.

      See also the package documentation. For more information about the algorithm, see Li, Zhu and Ogihara (2006): Using Discriminant Analysis for Multi-class Classification: An Experimental Investigation.

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, X, y)

      Here:

      • X is any table of input features (eg, a DataFrame) whose columns are of scitype Continuous; check column scitypes with schema(X).
      • y is the target, which can be any AbstractVector whose element scitype is OrderedFactor or Multiclass; check the scitype with scitype(y)

      Train the machine using fit!(mach, rows=...).

      Hyper-parameters

      • method::Symbol=:gevd: choice of solver, one of :gevd or :whiten methods.
      • cov_w::StatsBase.SimpleCovariance(): An estimator for the within-class covariance (used in computing the within-class scatter matrix, Sw). Any robust estimator from CovarianceEstimation.jl can be used.
      • cov_b::StatsBase.SimpleCovariance(): The same as cov_w but for the between-class covariance (used in computing the between-class scatter matrix, Sb).
      • outdim::Int=0: The output dimension, i.e., dimension of the transformed space, automatically set to min(indim, nclasses-1) if equal to 0.
      • regcoef::Float64=1e-6: The regularization coefficient. A positive value regcoef*eigmax(Sw) where Sw is the within-class scatter matrix, is added to the diagonal of Sw to improve numerical stability. This can be useful if using the standard covariance estimator.
      • priors::Union{Nothing, UnivariateFinite{<:Any, <:Any, <:Any, <:Real}, Dict{<:Any, <:Real}} = nothing: For use in prediction with Bayes rule. If priors = nothing then priors are estimated from the class proportions in the training data. Otherwise it requires a Dict or UnivariateFinite object specifying the classes with non-zero probabilities in the training target.

      Operations

      • transform(mach, Xnew): Return a lower dimensional projection of the input Xnew, which should have the same scitype as X above.
      • predict(mach, Xnew): Return predictions of the target given features Xnew, which should have the same scitype as X above. Predictions are probabilistic but uncalibrated.
      • predict_mode(mach, Xnew): Return the modes of the probabilistic predictions returned above.

      Fitted parameters

      The fields of fitted_params(mach) are:

      • classes: The classes seen during model fitting.
      • projection_matrix: The learned projection matrix, of size (indim, outdim), where indim and outdim are the input and output dimensions respectively (See Report section below).
      • priors: The class priors for classification. As inferred from training target y, if not user-specified. A UnivariateFinite object with levels consistent with levels(y).

      Report

      The fields of report(mach) are:

      • indim: The dimension of the input space i.e the number of training features.
      • outdim: The dimension of the transformed space the model is projected to.
      • mean: The mean of the untransformed training data. A vector of length indim.
      • nclasses: The number of classes directly observed in the training data (which can be less than the total number of classes in the class pool).
      • class_means: The class-specific means of the training data. A matrix of size (indim, nclasses) with the ith column being the class-mean of the ith class in classes (See fitted params section above).
      • class_weights: The weights (class counts) of each class. A vector of length nclasses with the ith element being the class weight of the ith class in classes. (See fitted params section above.)
      • Sb: The between class scatter matrix.
      • Sw: The within class scatter matrix.

      Examples

      using MLJ
      +
      +BayesianLDA = @load BayesianLDA pkg=MultivariateStats
      +
      +X, y = @load_iris ## a table and a vector
      +
      +model = BayesianLDA()
      +mach = machine(model, X, y) |> fit!
      +
      +Xproj = transform(mach, X)
      +y_hat = predict(mach, X)
      +labels = predict_mode(mach, X)

      See also LDA, SubspaceLDA, BayesianSubspaceLDA

      diff --git a/v0.20.3/models/BayesianQDA_MLJScikitLearnInterface/index.html b/v0.20.3/models/BayesianQDA_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..eb4bdf37b --- /dev/null +++ b/v0.20.3/models/BayesianQDA_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +BayesianQDA · MLJ

      BayesianQDA

      BayesianQDA

      A model type for constructing a Bayesian quadratic discriminant analysis, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      BayesianQDA = @load BayesianQDA pkg=MLJScikitLearnInterface

      Do model = BayesianQDA() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in BayesianQDA(priors=...).

      Hyper-parameters

      • priors = nothing
      • reg_param = 0.0
      • store_covariance = false
      • tol = 0.0001
      diff --git a/v0.20.3/models/BayesianRidgeRegressor_MLJScikitLearnInterface/index.html b/v0.20.3/models/BayesianRidgeRegressor_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..9698ebfdd --- /dev/null +++ b/v0.20.3/models/BayesianRidgeRegressor_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +BayesianRidgeRegressor · MLJ

      BayesianRidgeRegressor

      BayesianRidgeRegressor

      A model type for constructing a Bayesian ridge regressor, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      BayesianRidgeRegressor = @load BayesianRidgeRegressor pkg=MLJScikitLearnInterface

      Do model = BayesianRidgeRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in BayesianRidgeRegressor(n_iter=...).

      Hyper-parameters

      • n_iter = 300
      • tol = 0.001
      • alpha_1 = 1.0e-6
      • alpha_2 = 1.0e-6
      • lambda_1 = 1.0e-6
      • lambda_2 = 1.0e-6
      • compute_score = false
      • fit_intercept = true
      • copy_X = true
      • verbose = false
      diff --git a/v0.20.3/models/BayesianSubspaceLDA_MultivariateStats/index.html b/v0.20.3/models/BayesianSubspaceLDA_MultivariateStats/index.html new file mode 100644 index 000000000..b83cffd91 --- /dev/null +++ b/v0.20.3/models/BayesianSubspaceLDA_MultivariateStats/index.html @@ -0,0 +1,13 @@ + +BayesianSubspaceLDA · MLJ

      BayesianSubspaceLDA

      BayesianSubspaceLDA

      A model type for constructing a Bayesian subspace LDA model, based on MultivariateStats.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      BayesianSubspaceLDA = @load BayesianSubspaceLDA pkg=MultivariateStats

      Do model = BayesianSubspaceLDA() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in BayesianSubspaceLDA(normalize=...).

      The Bayesian multiclass subspace linear discriminant analysis algorithm learns a projection matrix as described in SubspaceLDA. The posterior class probability distribution is derived as in BayesianLDA.

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, X, y)

      Here:

      • X is any table of input features (eg, a DataFrame) whose columns are of scitype Continuous; check column scitypes with schema(X).
      • y is the target, which can be any AbstractVector whose element scitype is OrderedFactor or Multiclass; check the scitype with scitype(y).

      Train the machine using fit!(mach, rows=...).

      Hyper-parameters

      • normalize=true: Option to normalize the between class variance for the number of observations in each class, one of true or false.

      outdim: the ouput dimension, automatically set to min(indim, nclasses-1) if equal to 0. If a non-zero outdim is passed, then the actual output dimension used is min(rank, outdim) where rank is the rank of the within-class covariance matrix.

      • priors::Union{Nothing, UnivariateFinite{<:Any, <:Any, <:Any, <:Real}, Dict{<:Any, <:Real}} = nothing: For use in prediction with Bayes rule. If priors = nothing then priors are estimated from the class proportions in the training data. Otherwise it requires a Dict or UnivariateFinite object specifying the classes with non-zero probabilities in the training target.

      Operations

      • transform(mach, Xnew): Return a lower dimensional projection of the input Xnew, which should have the same scitype as X above.
      • predict(mach, Xnew): Return predictions of the target given features Xnew, which should have same scitype as X above. Predictions are probabilistic but uncalibrated.
      • predict_mode(mach, Xnew): Return the modes of the probabilistic predictions returned above.

      Fitted parameters

      The fields of fitted_params(mach) are:

      • classes: The classes seen during model fitting.
      • projection_matrix: The learned projection matrix, of size (indim, outdim), where indim and outdim are the input and output dimensions respectively (See Report section below).
      • priors: The class priors for classification. As inferred from training target y, if not user-specified. A UnivariateFinite object with levels consistent with levels(y).

      Report

      The fields of report(mach) are:

      • indim: The dimension of the input space i.e the number of training features.
      • outdim: The dimension of the transformed space the model is projected to.
      • mean: The overall mean of the training data.
      • nclasses: The number of classes directly observed in the training data (which can be less than the total number of classes in the class pool).

      class_means: The class-specific means of the training data. A matrix of size (indim, nclasses) with the ith column being the class-mean of the ith class in classes (See fitted params section above).

      • class_weights: The weights (class counts) of each class. A vector of length nclasses with the ith element being the class weight of the ith class in classes. (See fitted params section above.)
      • explained_variance_ratio: The ratio of explained variance to total variance. Each dimension corresponds to an eigenvalue.

      Examples

      using MLJ
      +
      +BayesianSubspaceLDA = @load BayesianSubspaceLDA pkg=MultivariateStats
      +
      +X, y = @load_iris ## a table and a vector
      +
      +model = BayesianSubspaceLDA()
      +mach = machine(model, X, y) |> fit!
      +
      +Xproj = transform(mach, X)
      +y_hat = predict(mach, X)
      +labels = predict_mode(mach, X)

      See also LDA, BayesianLDA, SubspaceLDA

      diff --git a/v0.20.3/models/BernoulliNBClassifier_MLJScikitLearnInterface/index.html b/v0.20.3/models/BernoulliNBClassifier_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..8b52dc676 --- /dev/null +++ b/v0.20.3/models/BernoulliNBClassifier_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +BernoulliNBClassifier · MLJ

      BernoulliNBClassifier

      BernoulliNBClassifier

      A model type for constructing a Bernoulli naive Bayes classifier, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      BernoulliNBClassifier = @load BernoulliNBClassifier pkg=MLJScikitLearnInterface

      Do model = BernoulliNBClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in BernoulliNBClassifier(alpha=...).

      Binomial naive bayes classifier. It is suitable for classification with binary features; features will be binarized based on the binarize keyword (unless it's nothing in which case the features are assumed to be binary).

      diff --git a/v0.20.3/models/Birch_MLJScikitLearnInterface/index.html b/v0.20.3/models/Birch_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..6e9a76b7b --- /dev/null +++ b/v0.20.3/models/Birch_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +Birch · MLJ

      Birch

      Birch

      A model type for constructing a birch, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      Birch = @load Birch pkg=MLJScikitLearnInterface

      Do model = Birch() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in Birch(threshold=...).

      Memory-efficient, online-learning algorithm provided as an alternative to MiniBatchKMeans. Note: noisy samples are given the label -1.

      diff --git a/v0.20.3/models/BisectingKMeans_MLJScikitLearnInterface/index.html b/v0.20.3/models/BisectingKMeans_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..1ebcd553c --- /dev/null +++ b/v0.20.3/models/BisectingKMeans_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +BisectingKMeans · MLJ

      BisectingKMeans

      BisectingKMeans

      A model type for constructing a bisecting k means, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      BisectingKMeans = @load BisectingKMeans pkg=MLJScikitLearnInterface

      Do model = BisectingKMeans() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in BisectingKMeans(n_clusters=...).

      Bisecting K-Means clustering.

      diff --git a/v0.20.3/models/BorderlineSMOTE1_Imbalance/index.html b/v0.20.3/models/BorderlineSMOTE1_Imbalance/index.html new file mode 100644 index 000000000..b022cd46f --- /dev/null +++ b/v0.20.3/models/BorderlineSMOTE1_Imbalance/index.html @@ -0,0 +1,31 @@ + +BorderlineSMOTE1 · MLJ

      BorderlineSMOTE1

      Initiate a BorderlineSMOTE1 model with the given hyper-parameters.

      BorderlineSMOTE1

      A model type for constructing a borderline smot e1, based on Imbalance.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      BorderlineSMOTE1 = @load BorderlineSMOTE1 pkg=Imbalance

      Do model = BorderlineSMOTE1() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in BorderlineSMOTE1(m=...).

      BorderlineSMOTE1 implements the BorderlineSMOTE1 algorithm to correct for class imbalance as in Han, H., Wang, W.-Y., & Mao, B.-H. (2005). Borderline-SMOTE: A new over-sampling method in imbalanced data sets learning. In D.S. Huang, X.-P. Zhang, & G.-B. Huang (Eds.), Advances in Intelligent Computing (pp. 878-887). Springer.

      Training data

      In MLJ or MLJBase, wrap the model in a machine by

      mach = machine(model)

      There is no need to provide any data here because the model is a static transformer.

      Likewise, there is no need to fit!(mach).

      For default values of the hyper-parameters, model can be constructed by

      model = BorderlineSMOTE1()

      Hyperparameters

      • m::Integer=5: The number of neighbors to consider while checking the BorderlineSMOTE1 condition. Should be within the range 0 < m < N where N is the number of observations in the data. It will be automatically set to N-1 if N ≤ m.

      • k::Integer=5: Number of nearest neighbors to consider in the SMOTE part of the algorithm. Should be within the range 0 < k < n where n is the number of observations in the smallest class. It will be automatically set to l-1 for any class with l points where l ≤ k.

      • ratios=1.0: A parameter that controls the amount of oversampling to be done for each class

        • Can be a float and in this case each class will be oversampled to the size of the majority class times the float. By default, all classes are oversampled to the size of the majority class
        • Can be a dictionary mapping each class label to the float ratio for that class
      • rng::Union{AbstractRNG, Integer}=default_rng(): Either an AbstractRNG object or an Integer seed to be used with Xoshiro if the Julia VERSION supports it. Otherwise, uses MersenneTwister`.

      • verbosity::Integer=1: Whenever higher than 0 info regarding the points that will participate in oversampling is logged.

      Transform Inputs

      • X: A matrix or table of floats where each row is an observation from the dataset
      • y: An abstract vector of labels (e.g., strings) that correspond to the observations in X

      Transform Outputs

      • Xover: A matrix or table that includes original data and the new observations due to oversampling. depending on whether the input X is a matrix or table respectively
      • yover: An abstract vector of labels corresponding to Xover

      Operations

      • transform(mach, X, y): resample the data X and y using BorderlineSMOTE1, returning both the new and original observations

      Example

      using MLJ
      +import Imbalance
      +
      +## set probability of each class
      +class_probs = [0.5, 0.2, 0.3]                         
      +num_rows, num_continuous_feats = 1000, 5
      +## generate a table and categorical vector accordingly
      +X, y = Imbalance.generate_imbalanced_data(num_rows, num_continuous_feats; 
      +                                stds=[0.1 0.1 0.1], min_sep=0.01, class_probs, rng=42)            
      +
      +julia> Imbalance.checkbalance(y)
      +1: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 200 (40.8%) 
      +2: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 310 (63.3%) 
      +0: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 490 (100.0%) 
      +
      +## load BorderlineSMOTE1
      +BorderlineSMOTE1 = @load BorderlineSMOTE1 pkg=Imbalance
      +
      +## wrap the model in a machine
      +oversampler = BorderlineSMOTE1(m=3, k=5, ratios=Dict(0=>1.0, 1=> 0.9, 2=>0.8), rng=42)
      +mach = machine(oversampler)
      +
      +## provide the data to transform (there is nothing to fit)
      +Xover, yover = transform(mach, X, y)
      +
      +
      +julia> Imbalance.checkbalance(yover)
      +2: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 392 (80.0%) 
      +1: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 441 (90.0%) 
      +0: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 490 (100.0%) 
      diff --git a/v0.20.3/models/CBLOFDetector_OutlierDetectionPython/index.html b/v0.20.3/models/CBLOFDetector_OutlierDetectionPython/index.html new file mode 100644 index 000000000..aeafe505e --- /dev/null +++ b/v0.20.3/models/CBLOFDetector_OutlierDetectionPython/index.html @@ -0,0 +1,7 @@ + +CBLOFDetector · MLJ diff --git a/v0.20.3/models/CDDetector_OutlierDetectionPython/index.html b/v0.20.3/models/CDDetector_OutlierDetectionPython/index.html new file mode 100644 index 000000000..318723ba1 --- /dev/null +++ b/v0.20.3/models/CDDetector_OutlierDetectionPython/index.html @@ -0,0 +1,3 @@ + +CDDetector · MLJ diff --git a/v0.20.3/models/COFDetector_OutlierDetectionNeighbors/index.html b/v0.20.3/models/COFDetector_OutlierDetectionNeighbors/index.html new file mode 100644 index 000000000..b59abff61 --- /dev/null +++ b/v0.20.3/models/COFDetector_OutlierDetectionNeighbors/index.html @@ -0,0 +1,11 @@ + +COFDetector · MLJ

      COFDetector

      COFDetector(k = 5,
      +            metric = Euclidean(),
      +            algorithm = :kdtree,
      +            leafsize = 10,
      +            reorder = true,
      +            parallel = false)

      Local outlier density based on chaining distance between graphs of neighbors, as described in [1].

      Parameters

      k::Integer

      Number of neighbors (must be greater than 0).

      metric::Metric

      This is one of the Metric types defined in the Distances.jl package. It is possible to define your own metrics by creating new types that are subtypes of Metric.

      algorithm::Symbol

      One of (:kdtree, :balltree). In a kdtree, points are recursively split into groups using hyper-planes. Therefore a KDTree only works with axis aligned metrics which are: Euclidean, Chebyshev, Minkowski and Cityblock. A brutetree linearly searches all points in a brute force fashion and works with any Metric. A balltree recursively splits points into groups bounded by hyper-spheres and works with any Metric.

      static::Union{Bool, Symbol}

      One of (true, false, :auto). Whether the input data for fitting and transform should be statically or dynamically allocated. If true, the data is statically allocated. If false, the data is dynamically allocated. If :auto, the data is dynamically allocated if the product of all dimensions except the last is greater than 100.

      leafsize::Int

      Determines at what number of points to stop splitting the tree further. There is a trade-off between traversing the tree and having to evaluate the metric function for increasing number of points.

      reorder::Bool

      While building the tree this will put points close in distance close in memory since this helps with cache locality. In this case, a copy of the original data will be made so that the original data is left unmodified. This can have a significant impact on performance and is by default set to true.

      parallel::Bool

      Parallelize score and predict using all threads available. The number of threads can be set with the JULIA_NUM_THREADS environment variable. Note: fit is not parallel.

      Examples

      using OutlierDetection: COFDetector, fit, transform
      +detector = COFDetector()
      +X = rand(10, 100)
      +model, result = fit(detector, X; verbosity=0)
      +test_scores = transform(detector, model, X)

      References

      [1] Tang, Jian; Chen, Zhixiang; Fu, Ada Wai-Chee; Cheung, David Wai-Lok (2002): Enhancing Effectiveness of Outlier Detections for Low Density Patterns.

      diff --git a/v0.20.3/models/COFDetector_OutlierDetectionPython/index.html b/v0.20.3/models/COFDetector_OutlierDetectionPython/index.html new file mode 100644 index 000000000..44e9fb9df --- /dev/null +++ b/v0.20.3/models/COFDetector_OutlierDetectionPython/index.html @@ -0,0 +1,3 @@ + +COFDetector · MLJ diff --git a/v0.20.3/models/COPODDetector_OutlierDetectionPython/index.html b/v0.20.3/models/COPODDetector_OutlierDetectionPython/index.html new file mode 100644 index 000000000..b648088b9 --- /dev/null +++ b/v0.20.3/models/COPODDetector_OutlierDetectionPython/index.html @@ -0,0 +1,2 @@ + +COPODDetector · MLJ diff --git a/v0.20.3/models/CatBoostClassifier_CatBoost/index.html b/v0.20.3/models/CatBoostClassifier_CatBoost/index.html new file mode 100644 index 000000000..2cdfd50c0 --- /dev/null +++ b/v0.20.3/models/CatBoostClassifier_CatBoost/index.html @@ -0,0 +1,16 @@ + +CatBoostClassifier · MLJ

      CatBoostClassifier

      CatBoostClassifier

      A model type for constructing a CatBoost classifier, based on CatBoost.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      CatBoostClassifier = @load CatBoostClassifier pkg=CatBoost

      Do model = CatBoostClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in CatBoostClassifier(iterations=...).

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, X, y)

      where

      • X: any table of input features (eg, a DataFrame) whose columns each have one of the following element scitypes: Continuous, Count, Finite, Textual; check column scitypes with schema(X). Textual columns will be passed to catboost as text_features, Multiclass columns will be passed to catboost as cat_features, and OrderedFactor columns will be converted to integers.
      • y: the target, which can be any AbstractVector whose element scitype is Finite; check the scitype with scitype(y)

      Train the machine with fit!(mach, rows=...).

      Hyper-parameters

      More details on the catboost hyperparameters, here are the Python docs: https://catboost.ai/en/docs/concepts/python-reference_catboostclassifier#parameters

      Operations

      • predict(mach, Xnew): probabilistic predictions of the target given new features Xnew having the same scitype as X above.
      • predict_mode(mach, Xnew): returns the mode of each of the prediction above.

      Accessor functions

      • feature_importances(mach): return vector of feature importances, in the form of feature::Symbol => importance::Real pairs

      Fitted parameters

      The fields of fitted_params(mach) are:

      • model: The Python CatBoostClassifier model

      Report

      The fields of report(mach) are:

      • feature_importances: Vector{Pair{Symbol, Float64}} of feature importances

      Examples

      using CatBoost.MLJCatBoostInterface
      +using MLJ
      +
      +X = (
      +    duration = [1.5, 4.1, 5.0, 6.7], 
      +    n_phone_calls = [4, 5, 6, 7], 
      +    department = coerce(["acc", "ops", "acc", "ops"], Multiclass), 
      +)
      +y = coerce([0, 0, 1, 1], Multiclass)
      +
      +model = CatBoostClassifier(iterations=5)
      +mach = machine(model, X, y)
      +fit!(mach)
      +probs = predict(mach, X)
      +preds = predict_mode(mach, X)

      See also catboost and the unwrapped model type CatBoost.CatBoostClassifier.

      diff --git a/v0.20.3/models/CatBoostRegressor_CatBoost/index.html b/v0.20.3/models/CatBoostRegressor_CatBoost/index.html new file mode 100644 index 000000000..4c9941e62 --- /dev/null +++ b/v0.20.3/models/CatBoostRegressor_CatBoost/index.html @@ -0,0 +1,15 @@ + +CatBoostRegressor · MLJ

      CatBoostRegressor

      CatBoostRegressor

      A model type for constructing a CatBoost regressor, based on CatBoost.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      CatBoostRegressor = @load CatBoostRegressor pkg=CatBoost

      Do model = CatBoostRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in CatBoostRegressor(iterations=...).

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, X, y)

      where

      • X: any table of input features (eg, a DataFrame) whose columns each have one of the following element scitypes: Continuous, Count, Finite, Textual; check column scitypes with schema(X). Textual columns will be passed to catboost as text_features, Multiclass columns will be passed to catboost as cat_features, and OrderedFactor columns will be converted to integers.
      • y: the target, which can be any AbstractVector whose element scitype is Continuous; check the scitype with scitype(y)

      Train the machine with fit!(mach, rows=...).

      Hyper-parameters

      More details on the catboost hyperparameters, here are the Python docs: https://catboost.ai/en/docs/concepts/python-reference_catboostclassifier#parameters

      Operations

      • predict(mach, Xnew): probabilistic predictions of the target given new features Xnew having the same scitype as X above.

      Accessor functions

      • feature_importances(mach): return vector of feature importances, in the form of feature::Symbol => importance::Real pairs

      Fitted parameters

      The fields of fitted_params(mach) are:

      • model: The Python CatBoostRegressor model

      Report

      The fields of report(mach) are:

      • feature_importances: Vector{Pair{Symbol, Float64}} of feature importances

      Examples

      using CatBoost.MLJCatBoostInterface
      +using MLJ
      +
      +X = (
      +    duration = [1.5, 4.1, 5.0, 6.7], 
      +    n_phone_calls = [4, 5, 6, 7], 
      +    department = coerce(["acc", "ops", "acc", "ops"], Multiclass), 
      +)
      +y = [2.0, 4.0, 6.0, 7.0]
      +
      +model = CatBoostRegressor(iterations=5)
      +mach = machine(model, X, y)
      +fit!(mach)
      +preds = predict(mach, X)

      See also catboost and the unwrapped model type CatBoost.CatBoostRegressor.

      diff --git a/v0.20.3/models/ClusterUndersampler_Imbalance/index.html b/v0.20.3/models/ClusterUndersampler_Imbalance/index.html new file mode 100644 index 000000000..69fc090f0 --- /dev/null +++ b/v0.20.3/models/ClusterUndersampler_Imbalance/index.html @@ -0,0 +1,32 @@ + +ClusterUndersampler · MLJ

      ClusterUndersampler

      Initiate a cluster undersampling model with the given hyper-parameters.

      ClusterUndersampler

      A model type for constructing a cluster undersampler, based on Imbalance.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      ClusterUndersampler = @load ClusterUndersampler pkg=Imbalance

      Do model = ClusterUndersampler() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in ClusterUndersampler(mode=...).

      ClusterUndersampler implements clustering undersampling as presented in Wei-Chao, L., Chih-Fong, T., Ya-Han, H., & Jing-Shang, J. (2017). Clustering-based undersampling in class-imbalanced data. Information Sciences, 409–410, 17–26. with K-means as the clustering algorithm.

      Training data

      In MLJ or MLJBase, wrap the model in a machine by mach = machine(model)

      There is no need to provide any data here because the model is a static transformer.

      Likewise, there is no need to fit!(mach).

      For default values of the hyper-parameters, model can be constructed with model = ClusterUndersampler().

      Hyperparameters

      • mode::AbstractString="nearest: If "center" then the undersampled data will consist of the centriods of
      each cluster found; if `"nearest"` then it will consist of the nearest neighbor of each centroid.
      • ratios=1.0: A parameter that controls the amount of undersampling to be done for each class

        • Can be a float and in this case each class will be undersampled to the size of the minority class times the float. By default, all classes are undersampled to the size of the minority class
        • Can be a dictionary mapping each class label to the float ratio for that class
      • maxiter::Integer=100: Maximum number of iterations to run K-means

      • rng::Integer=42: Random number generator seed. Must be an integer.

      Transform Inputs

      • X: A matrix or table of floats where each row is an observation from the dataset
      • y: An abstract vector of labels (e.g., strings) that correspond to the observations in X

      Transform Outputs

      • X_under: A matrix or table that includes the data after undersampling depending on whether the input X is a matrix or table respectively
      • y_under: An abstract vector of labels corresponding to X_under

      Operations

      • transform(mach, X, y): resample the data X and y using ClusterUndersampler, returning the undersampled versions

      Example

      using MLJ
      +import Imbalance
      +
      +## set probability of each class
      +class_probs = [0.5, 0.2, 0.3]                         
      +num_rows, num_continuous_feats = 100, 5
      +## generate a table and categorical vector accordingly
      +X, y = Imbalance.generate_imbalanced_data(num_rows, num_continuous_feats; 
      +                                class_probs, rng=42)   
      +                                                    
      +julia> Imbalance.checkbalance(y; ref="minority")
      + 1: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 19 (100.0%) 
      + 2: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 33 (173.7%) 
      + 0: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 48 (252.6%) 
      +
      +## load cluster_undersampling
      +ClusterUndersampler = @load ClusterUndersampler pkg=Imbalance
      +
      +## wrap the model in a machine
      +undersampler = ClusterUndersampler(mode="nearest", 
      +                                   ratios=Dict(0=>1.0, 1=> 1.0, 2=>1.0), rng=42)
      +mach = machine(undersampler)
      +
      +## provide the data to transform (there is nothing to fit)
      +X_under, y_under = transform(mach, X, y)
      +
      +                                       
      +julia> Imbalance.checkbalance(y_under; ref="minority")
      +0: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 19 (100.0%) 
      +2: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 19 (100.0%) 
      +1: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 19 (100.0%)
      diff --git a/v0.20.3/models/ComplementNBClassifier_MLJScikitLearnInterface/index.html b/v0.20.3/models/ComplementNBClassifier_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..584dd94a5 --- /dev/null +++ b/v0.20.3/models/ComplementNBClassifier_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +ComplementNBClassifier · MLJ

      ComplementNBClassifier

      ComplementNBClassifier

      A model type for constructing a Complement naive Bayes classifier, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      ComplementNBClassifier = @load ComplementNBClassifier pkg=MLJScikitLearnInterface

      Do model = ComplementNBClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in ComplementNBClassifier(alpha=...).

      Similar to MultinomialNBClassifier but with more robust assumptions. Suited for imbalanced datasets.

      diff --git a/v0.20.3/models/ConstantClassifier_MLJModels/index.html b/v0.20.3/models/ConstantClassifier_MLJModels/index.html new file mode 100644 index 000000000..2cea8b158 --- /dev/null +++ b/v0.20.3/models/ConstantClassifier_MLJModels/index.html @@ -0,0 +1,29 @@ + +ConstantClassifier · MLJ

      ConstantClassifier

      ConstantClassifier

      This "dummy" probabilistic predictor always returns the same distribution, irrespective of the provided input pattern. The distribution d returned is the UnivariateFinite distribution based on frequency of classes observed in the training target data. So, pdf(d, level) is the number of times the training target takes on the value level. Use predict_mode instead of predict to obtain the training target mode instead. For more on the UnivariateFinite type, see the CategoricalDistributions.jl package.

      Almost any reasonable model is expected to outperform ConstantClassifier, which is used almost exclusively for testing and establishing performance baselines.

      In MLJ (or MLJModels) do model = ConstantClassifier() to construct an instance.

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, X, y)

      Here:

      • X is any table of input features (eg, a DataFrame)
      • y is the target, which can be any AbstractVector whose element scitype is Finite; check the scitype with schema(y)

      Train the machine using fit!(mach, rows=...).

      Hyper-parameters

      None.

      Operations

      • predict(mach, Xnew): Return predictions of the target given features Xnew (which for this model are ignored). Predictions are probabilistic.
      • predict_mode(mach, Xnew): Return the mode of the probabilistic predictions returned above.

      Fitted parameters

      The fields of fitted_params(mach) are:

      • target_distribution: The distribution fit to the supplied target data.

      Examples

      using MLJ
      +
      +clf = ConstantClassifier()
      +
      +X, y = @load_crabs ## a table and a categorical vector
      +mach = machine(clf, X, y) |> fit!
      +
      +fitted_params(mach)
      +
      +Xnew = (;FL = [8.1, 24.8, 7.2],
      +        RW = [5.1, 25.7, 6.4],
      +        CL = [15.9, 46.7, 14.3],
      +        CW = [18.7, 59.7, 12.2],
      +        BD = [6.2, 23.6, 8.4],)
      +
      +## probabilistic predictions:
      +yhat = predict(mach, Xnew)
      +yhat[1]
      +
      +## raw probabilities:
      +pdf.(yhat, "B")
      +
      +## probability matrix:
      +L = levels(y)
      +pdf(yhat, L)
      +
      +## point predictions:
      +predict_mode(mach, Xnew)

      See also ConstantRegressor

      diff --git a/v0.20.3/models/ConstantRegressor_MLJModels/index.html b/v0.20.3/models/ConstantRegressor_MLJModels/index.html new file mode 100644 index 000000000..c42353e32 --- /dev/null +++ b/v0.20.3/models/ConstantRegressor_MLJModels/index.html @@ -0,0 +1,13 @@ + +ConstantRegressor · MLJ

      ConstantRegressor

      ConstantRegressor

      This "dummy" probabilistic predictor always returns the same distribution, irrespective of the provided input pattern. The distribution returned is the one of the type specified that best fits the training target data. Use predict_mean or predict_median to predict the mean or median values instead. If not specified, a normal distribution is fit.

      Almost any reasonable model is expected to outperform ConstantRegressor which is used almost exclusively for testing and establishing performance baselines.

      In MLJ (or MLJModels) do model = ConstantRegressor() or model = ConstantRegressor(distribution=...) to construct a model instance.

      Training data

      In MLJ (or MLJBase) bind an instance model to data with

      mach = machine(model, X, y)

      Here:

      • X is any table of input features (eg, a DataFrame)
      • y is the target, which can be any AbstractVector whose element scitype is Continuous; check the scitype with schema(y)

      Train the machine using fit!(mach, rows=...).

      Hyper-parameters

      • distribution_type=Distributions.Normal: The distribution to be fit to the target data. Must be a subtype of Distributions.ContinuousUnivariateDistribution.

      Operations

      • predict(mach, Xnew): Return predictions of the target given features Xnew (which for this model are ignored). Predictions are probabilistic.
      • predict_mean(mach, Xnew): Return instead the means of the probabilistic predictions returned above.
      • predict_median(mach, Xnew): Return instead the medians of the probabilistic predictions returned above.

      Fitted parameters

      The fields of fitted_params(mach) are:

      • target_distribution: The distribution fit to the supplied target data.

      Examples

      using MLJ
      +
      +X, y = make_regression(10, 2) ## synthetic data: a table and vector
      +regressor = ConstantRegressor()
      +mach = machine(regressor, X, y) |> fit!
      +
      +fitted_params(mach)
      +
      +Xnew, _ = make_regression(3, 2)
      +predict(mach, Xnew)
      +predict_mean(mach, Xnew)
      +

      See also ConstantClassifier

      diff --git a/v0.20.3/models/ContinuousEncoder_MLJModels/index.html b/v0.20.3/models/ContinuousEncoder_MLJModels/index.html new file mode 100644 index 000000000..84559ed0d --- /dev/null +++ b/v0.20.3/models/ContinuousEncoder_MLJModels/index.html @@ -0,0 +1,38 @@ + +ContinuousEncoder · MLJ

      ContinuousEncoder

      ContinuousEncoder

      A model type for constructing a continuous encoder, based on MLJModels.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      ContinuousEncoder = @load ContinuousEncoder pkg=MLJModels

      Do model = ContinuousEncoder() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in ContinuousEncoder(drop_last=...).

      Use this model to arrange all features (columns) of a table to have Continuous element scitype, by applying the following protocol to each feature ftr:

      • If ftr is already Continuous retain it.
      • If ftr is Multiclass, one-hot encode it.
      • If ftr is OrderedFactor, replace it with coerce(ftr, Continuous) (vector of floating point integers), unless ordered_factors=false is specified, in which case one-hot encode it.
      • If ftr is Count, replace it with coerce(ftr, Continuous).
      • If ftr has some other element scitype, or was not observed in fitting the encoder, drop it from the table.

      Warning: This transformer assumes that levels(col) for any Multiclass or OrderedFactor column, col, is the same for training data and new data to be transformed.

      To selectively one-hot-encode categorical features (without dropping columns) use OneHotEncoder instead.

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, X)

      where

      • X: any Tables.jl compatible table. Columns can be of mixed type but only those with element scitype Multiclass or OrderedFactor can be encoded. Check column scitypes with schema(X).

      Train the machine using fit!(mach, rows=...).

      Hyper-parameters

      • drop_last=true: whether to drop the column corresponding to the final class of one-hot encoded features. For example, a three-class feature is spawned into three new features if drop_last=false, but two just features otherwise.
      • one_hot_ordered_factors=false: whether to one-hot any feature with OrderedFactor element scitype, or to instead coerce it directly to a (single) Continuous feature using the order

      Fitted parameters

      The fields of fitted_params(mach) are:

      • features_to_keep: names of features that will not be dropped from the table
      • one_hot_encoder: the OneHotEncoder model instance for handling the one-hot encoding
      • one_hot_encoder_fitresult: the fitted parameters of the OneHotEncoder model

      Report

      • features_to_keep: names of input features that will not be dropped from the table
      • new_features: names of all output features

      Example

      X = (name=categorical(["Danesh", "Lee", "Mary", "John"]),
      +     grade=categorical(["A", "B", "A", "C"], ordered=true),
      +     height=[1.85, 1.67, 1.5, 1.67],
      +     n_devices=[3, 2, 4, 3],
      +     comments=["the force", "be", "with you", "too"])
      +
      +julia> schema(X)
      +┌───────────┬──────────────────┐
      +│ names     │ scitypes         │
      +├───────────┼──────────────────┤
      +│ name      │ Multiclass{4}    │
      +│ grade     │ OrderedFactor{3} │
      +│ height    │ Continuous       │
      +│ n_devices │ Count            │
      +│ comments  │ Textual          │
      +└───────────┴──────────────────┘
      +
      +encoder = ContinuousEncoder(drop_last=true)
      +mach = fit!(machine(encoder, X))
      +W = transform(mach, X)
      +
      +julia> schema(W)
      +┌──────────────┬────────────┐
      +│ names        │ scitypes   │
      +├──────────────┼────────────┤
      +│ name__Danesh │ Continuous │
      +│ name__John   │ Continuous │
      +│ name__Lee    │ Continuous │
      +│ grade        │ Continuous │
      +│ height       │ Continuous │
      +│ n_devices    │ Continuous │
      +└──────────────┴────────────┘
      +
      +julia> setdiff(schema(X).names, report(mach).features_to_keep) ## dropped features
      +1-element Vector{Symbol}:
      + :comments
      +

      See also OneHotEncoder

      diff --git a/v0.20.3/models/CountTransformer_MLJText/index.html b/v0.20.3/models/CountTransformer_MLJText/index.html new file mode 100644 index 000000000..e1371a449 --- /dev/null +++ b/v0.20.3/models/CountTransformer_MLJText/index.html @@ -0,0 +1,46 @@ + +CountTransformer · MLJ

      CountTransformer

      CountTransformer

      A model type for constructing a count transformer, based on MLJText.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      CountTransformer = @load CountTransformer pkg=MLJText

      Do model = CountTransformer() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in CountTransformer(max_doc_freq=...).

      The transformer converts a collection of documents, tokenized or pre-parsed as bags of words/ngrams, to a matrix of term counts.

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, X)

      Here:

      • X is any vector whose elements are either tokenized documents or bags of words/ngrams. Specifically, each element is one of the following:

        • A vector of abstract strings (tokens), e.g., ["I", "like", "Sam", ".", "Sam", "is", "nice", "."] (scitype AbstractVector{Textual})
        • A dictionary of counts, indexed on abstract strings, e.g., Dict("I"=>1, "Sam"=>2, "Sam is"=>1) (scitype Multiset{Textual}})
        • A dictionary of counts, indexed on plain ngrams, e.g., Dict(("I",)=>1, ("Sam",)=>2, ("I", "Sam")=>1) (scitype Multiset{<:NTuple{N,Textual} where N}); here a plain ngram is a tuple of abstract strings.

      Train the machine using fit!(mach, rows=...).

      Hyper-parameters

      • max_doc_freq=1.0: Restricts the vocabulary that the transformer will consider. Terms that occur in > max_doc_freq documents will not be considered by the transformer. For example, if max_doc_freq is set to 0.9, terms that are in more than 90% of the documents will be removed.
      • min_doc_freq=0.0: Restricts the vocabulary that the transformer will consider. Terms that occur in < max_doc_freq documents will not be considered by the transformer. A value of 0.01 means that only terms that are at least in 1% of the documents will be included.

      Operations

      • transform(mach, Xnew): Based on the vocabulary learned in training, return the matrix of counts for Xnew, a vector of the same form as X above. The matrix has size (n, p), where n = length(Xnew) and p the size of the vocabulary. Tokens/ngrams not appearing in the learned vocabulary are scored zero.

      Fitted parameters

      The fields of fitted_params(mach) are:

      • vocab: A vector containing the string used in the transformer's vocabulary.

      Examples

      CountTransformer accepts a variety of inputs. The example below transforms tokenized documents:

      using MLJ
      +import TextAnalysis
      +
      +CountTransformer = @load CountTransformer pkg=MLJText
      +
      +docs = ["Hi my name is Sam.", "How are you today?"]
      +count_transformer = CountTransformer()
      +
      +julia> tokenized_docs = TextAnalysis.tokenize.(docs)
      +2-element Vector{Vector{String}}:
      + ["Hi", "my", "name", "is", "Sam", "."]
      + ["How", "are", "you", "today", "?"]
      +
      +mach = machine(count_transformer, tokenized_docs)
      +fit!(mach)
      +
      +fitted_params(mach)
      +
      +tfidf_mat = transform(mach, tokenized_docs)

      Alternatively, one can provide documents pre-parsed as ngrams counts:

      using MLJ
      +import TextAnalysis
      +
      +docs = ["Hi my name is Sam.", "How are you today?"]
      +corpus = TextAnalysis.Corpus(TextAnalysis.NGramDocument.(docs, 1, 2))
      +ngram_docs = TextAnalysis.ngrams.(corpus)
      +
      +julia> ngram_docs[1]
      +Dict{AbstractString, Int64} with 11 entries:
      +  "is"      => 1
      +  "my"      => 1
      +  "name"    => 1
      +  "."       => 1
      +  "Hi"      => 1
      +  "Sam"     => 1
      +  "my name" => 1
      +  "Hi my"   => 1
      +  "name is" => 1
      +  "Sam ."   => 1
      +  "is Sam"  => 1
      +
      +count_transformer = CountTransformer()
      +mach = machine(count_transformer, ngram_docs)
      +MLJ.fit!(mach)
      +fitted_params(mach)
      +
      +tfidf_mat = transform(mach, ngram_docs)

      See also TfidfTransformer, BM25Transformer

      diff --git a/v0.20.3/models/DBSCAN_Clustering/index.html b/v0.20.3/models/DBSCAN_Clustering/index.html new file mode 100644 index 000000000..ea7594f18 --- /dev/null +++ b/v0.20.3/models/DBSCAN_Clustering/index.html @@ -0,0 +1,35 @@ + +DBSCAN · MLJ

      DBSCAN

      DBSCAN

      A model type for constructing a DBSCAN clusterer (density-based spatial clustering of applications with noise), based on Clustering.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      DBSCAN = @load DBSCAN pkg=Clustering

      Do model = DBSCAN() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in DBSCAN(radius=...).

      DBSCAN is a clustering algorithm that groups together points that are closely packed together (points with many nearby neighbors), marking as outliers points that lie alone in low-density regions (whose nearest neighbors are too far away). More information is available at the Clustering.jl documentation. Use predict to get cluster assignments. Point types - core, boundary or noise - are accessed from the machine report (see below).

      This is a static implementation, i.e., it does not generalize to new data instances, and there is no training data. For clusterers that do generalize, see KMeans or KMedoids.

      In MLJ or MLJBase, create a machine with

      mach = machine(model)

      Hyper-parameters

      • radius=1.0: query radius.
      • leafsize=20: number of points binned in each leaf node of the nearest neighbor k-d tree.
      • min_neighbors=1: minimum number of a core point neighbors.
      • min_cluster_size=1: minimum number of points in a valid cluster.

      Operations

      • predict(mach, X): return cluster label assignments, as an unordered CategoricalVector. Here X is any table of input features (eg, a DataFrame) whose columns are of scitype Continuous; check column scitypes with schema(X). Note that points of type noise will always get a label of 0.

      Report

      After calling predict(mach), the fields of report(mach) are:

      • point_types: A CategoricalVector with the DBSCAN point type classification, one element per row of X. Elements are either 'C' (core), 'B' (boundary), or 'N' (noise).

      • nclusters: The number of clusters (excluding the noise "cluster")

      • cluster_labels: The unique list of cluster labels

      • clusters: A vector of Clustering.DbscanCluster objects from Clustering.jl, which have these fields:

        • size: number of points in a cluster (core + boundary)
        • core_indices: indices of points in the cluster core
        • boundary_indices: indices of points on the cluster boundary

      Examples

      using MLJ
      +
      +X, labels  = make_moons(400, noise=0.09, rng=1) ## synthetic data with 2 clusters; X
      +y = map(labels) do label
      +    label == 0 ? "cookie" : "monster"
      +end;
      +y = coerce(y, Multiclass);
      +
      +DBSCAN = @load DBSCAN pkg=Clustering
      +model = DBSCAN(radius=0.13, min_cluster_size=5)
      +mach = machine(model)
      +
      +## compute and output cluster assignments for observations in `X`:
      +yhat = predict(mach, X)
      +
      +## get DBSCAN point types:
      +report(mach).point_types
      +report(mach).nclusters
      +
      +## compare cluster labels with actual labels:
      +compare = zip(yhat, y) |> collect;
      +compare[1:10] ## clusters align with classes
      +
      +## visualize clusters, noise in red:
      +points = zip(X.x1, X.x2) |> collect
      +colors = map(yhat) do i
      +   i == 0 ? :red :
      +   i == 1 ? :blue :
      +   i == 2 ? :green :
      +   i == 3 ? :yellow :
      +   :black
      +end
      +using Plots
      +scatter(points, color=colors)
      diff --git a/v0.20.3/models/DBSCAN_MLJScikitLearnInterface/index.html b/v0.20.3/models/DBSCAN_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..05ab3a80b --- /dev/null +++ b/v0.20.3/models/DBSCAN_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +DBSCAN · MLJ

      DBSCAN

      DBSCAN

      A model type for constructing a dbscan, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      DBSCAN = @load DBSCAN pkg=MLJScikitLearnInterface

      Do model = DBSCAN() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in DBSCAN(eps=...).

      Density-Based Spatial Clustering of Applications with Noise. Finds core samples of high density and expands clusters from them. Good for data which contains clusters of similar density.

      diff --git a/v0.20.3/models/DNNDetector_OutlierDetectionNeighbors/index.html b/v0.20.3/models/DNNDetector_OutlierDetectionNeighbors/index.html new file mode 100644 index 000000000..a8cf294bd --- /dev/null +++ b/v0.20.3/models/DNNDetector_OutlierDetectionNeighbors/index.html @@ -0,0 +1,11 @@ + +DNNDetector · MLJ

      DNNDetector

      DNNDetector(d = 0,
      +            metric = Euclidean(),
      +            algorithm = :kdtree,
      +            leafsize = 10,
      +            reorder = true,
      +            parallel = false)

      Anomaly score based on the number of neighbors in a hypersphere of radius d. Knorr et al. [1] directly converted the resulting outlier scores to labels, thus this implementation does not fully reflect the approach from the paper.

      Parameters

      d::Real

      The hypersphere radius used to calculate the global density of an instance.

      metric::Metric

      This is one of the Metric types defined in the Distances.jl package. It is possible to define your own metrics by creating new types that are subtypes of Metric.

      algorithm::Symbol

      One of (:kdtree, :balltree). In a kdtree, points are recursively split into groups using hyper-planes. Therefore a KDTree only works with axis aligned metrics which are: Euclidean, Chebyshev, Minkowski and Cityblock. A brutetree linearly searches all points in a brute force fashion and works with any Metric. A balltree recursively splits points into groups bounded by hyper-spheres and works with any Metric.

      static::Union{Bool, Symbol}

      One of (true, false, :auto). Whether the input data for fitting and transform should be statically or dynamically allocated. If true, the data is statically allocated. If false, the data is dynamically allocated. If :auto, the data is dynamically allocated if the product of all dimensions except the last is greater than 100.

      leafsize::Int

      Determines at what number of points to stop splitting the tree further. There is a trade-off between traversing the tree and having to evaluate the metric function for increasing number of points.

      reorder::Bool

      While building the tree this will put points close in distance close in memory since this helps with cache locality. In this case, a copy of the original data will be made so that the original data is left unmodified. This can have a significant impact on performance and is by default set to true.

      parallel::Bool

      Parallelize score and predict using all threads available. The number of threads can be set with the JULIA_NUM_THREADS environment variable. Note: fit is not parallel.

      Examples

      using OutlierDetection: DNNDetector, fit, transform
      +detector = DNNDetector()
      +X = rand(10, 100)
      +model, result = fit(detector, X; verbosity=0)
      +test_scores = transform(detector, model, X)

      References

      [1] Knorr, Edwin M.; Ng, Raymond T. (1998): Algorithms for Mining Distance-Based Outliers in Large Datasets.

      diff --git a/v0.20.3/models/DecisionTreeClassifier_BetaML/index.html b/v0.20.3/models/DecisionTreeClassifier_BetaML/index.html new file mode 100644 index 000000000..cbb74a263 --- /dev/null +++ b/v0.20.3/models/DecisionTreeClassifier_BetaML/index.html @@ -0,0 +1,30 @@ + +DecisionTreeClassifier · MLJ

      DecisionTreeClassifier

      mutable struct DecisionTreeClassifier <: MLJModelInterface.Probabilistic

      A simple Decision Tree model for classification with support for Missing data, from the Beta Machine Learning Toolkit (BetaML).

      Hyperparameters:

      • max_depth::Int64: The maximum depth the tree is allowed to reach. When this is reached the node is forced to become a leaf [def: 0, i.e. no limits]
      • min_gain::Float64: The minimum information gain to allow for a node's partition [def: 0]
      • min_records::Int64: The minimum number of records a node must holds to consider for a partition of it [def: 2]
      • max_features::Int64: The maximum number of (random) features to consider at each partitioning [def: 0, i.e. look at all features]
      • splitting_criterion::Function: This is the name of the function to be used to compute the information gain of a specific partition. This is done by measuring the difference betwwen the "impurity" of the labels of the parent node with those of the two child nodes, weighted by the respective number of items. [def: gini]. Either gini, entropy or a custom function. It can also be an anonymous function.
      • rng::Random.AbstractRNG: A Random Number Generator to be used in stochastic parts of the code [deafult: Random.GLOBAL_RNG]

      Example:

      julia> using MLJ
      +
      +julia> X, y        = @load_iris;
      +
      +julia> modelType   = @load DecisionTreeClassifier pkg = "BetaML" verbosity=0
      +BetaML.Trees.DecisionTreeClassifier
      +
      +julia> model       = modelType()
      +DecisionTreeClassifier(
      +  max_depth = 0, 
      +  min_gain = 0.0, 
      +  min_records = 2, 
      +  max_features = 0, 
      +  splitting_criterion = BetaML.Utils.gini, 
      +  rng = Random._GLOBAL_RNG())
      +
      +julia> mach        = machine(model, X, y);
      +
      +julia> fit!(mach);
      +[ Info: Training machine(DecisionTreeClassifier(max_depth = 0, …), …).
      +
      +julia> cat_est    = predict(mach, X)
      +150-element CategoricalDistributions.UnivariateFiniteVector{Multiclass{3}, String, UInt32, Float64}:
      + UnivariateFinite{Multiclass{3}}(setosa=>1.0, versicolor=>0.0, virginica=>0.0)
      + UnivariateFinite{Multiclass{3}}(setosa=>1.0, versicolor=>0.0, virginica=>0.0)
      + ⋮
      + UnivariateFinite{Multiclass{3}}(setosa=>0.0, versicolor=>0.0, virginica=>1.0)
      + UnivariateFinite{Multiclass{3}}(setosa=>0.0, versicolor=>0.0, virginica=>1.0)
      + UnivariateFinite{Multiclass{3}}(setosa=>0.0, versicolor=>0.0, virginica=>1.0)
      diff --git a/v0.20.3/models/DecisionTreeClassifier_DecisionTree/index.html b/v0.20.3/models/DecisionTreeClassifier_DecisionTree/index.html new file mode 100644 index 000000000..3e8b7a954 --- /dev/null +++ b/v0.20.3/models/DecisionTreeClassifier_DecisionTree/index.html @@ -0,0 +1,31 @@ + +DecisionTreeClassifier · MLJ

      DecisionTreeClassifier

      DecisionTreeClassifier

      A model type for constructing a CART decision tree classifier, based on DecisionTree.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      DecisionTreeClassifier = @load DecisionTreeClassifier pkg=DecisionTree

      Do model = DecisionTreeClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in DecisionTreeClassifier(max_depth=...).

      DecisionTreeClassifier implements the CART algorithm, originally published in Breiman, Leo; Friedman, J. H.; Olshen, R. A.; Stone, C. J. (1984): "Classification and regression trees". Monterey, CA: Wadsworth & Brooks/Cole Advanced Books & Software..

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, X, y)

      where

      • X: any table of input features (eg, a DataFrame) whose columns each have one of the following element scitypes: Continuous, Count, or <:OrderedFactor; check column scitypes with schema(X)
      • y: is the target, which can be any AbstractVector whose element scitype is <:OrderedFactor or <:Multiclass; check the scitype with scitype(y)

      Train the machine using fit!(mach, rows=...).

      Hyperparameters

      • max_depth=-1: max depth of the decision tree (-1=any)
      • min_samples_leaf=1: max number of samples each leaf needs to have
      • min_samples_split=2: min number of samples needed for a split
      • min_purity_increase=0: min purity needed for a split
      • n_subfeatures=0: number of features to select at random (0 for all)
      • post_prune=false: set to true for post-fit pruning
      • merge_purity_threshold=1.0: (post-pruning) merge leaves having combined purity >= merge_purity_threshold
      • display_depth=5: max depth to show when displaying the tree
      • feature_importance: method to use for computing feature importances. One of (:impurity, :split)
      • rng=Random.GLOBAL_RNG: random number generator or seed

      Operations

      • predict(mach, Xnew): return predictions of the target given features Xnew having the same scitype as X above. Predictions are probabilistic, but uncalibrated.
      • predict_mode(mach, Xnew): instead return the mode of each prediction above.

      Fitted parameters

      The fields of fitted_params(mach) are:

      • raw_tree: the raw Node, Leaf or Root object returned by the core DecisionTree.jl algorithm
      • tree: a visualizable, wrapped version of raw_tree implementing the AbstractTrees.jl interface; see "Examples" below
      • encoding: dictionary of target classes keyed on integers used internally by DecisionTree.jl
      • features: the names of the features encountered in training, in an order consistent with the output of print_tree (see below)

      Report

      The fields of report(mach) are:

      • classes_seen: list of target classes actually observed in training
      • print_tree: alternative method to print the fitted tree, with single argument the tree depth; interpretation requires internal integer-class encoding (see "Fitted parameters" above).
      • features: the names of the features encountered in training, in an order consistent with the output of print_tree (see below)

      Accessor functions

      • feature_importances(mach) returns a vector of (feature::Symbol => importance) pairs; the type of importance is determined by the hyperparameter feature_importance (see above)

      Examples

      using MLJ
      +DecisionTreeClassifier = @load DecisionTreeClassifier pkg=DecisionTree
      +model = DecisionTreeClassifier(max_depth=3, min_samples_split=3)
      +
      +X, y = @load_iris
      +mach = machine(model, X, y) |> fit!
      +
      +Xnew = (sepal_length = [6.4, 7.2, 7.4],
      +        sepal_width = [2.8, 3.0, 2.8],
      +        petal_length = [5.6, 5.8, 6.1],
      +        petal_width = [2.1, 1.6, 1.9],)
      +yhat = predict(mach, Xnew) ## probabilistic predictions
      +predict_mode(mach, Xnew)   ## point predictions
      +pdf.(yhat, "virginica")    ## probabilities for the "verginica" class
      +
      +julia> tree = fitted_params(mach).tree
      +petal_length < 2.45
      +├─ setosa (50/50)
      +└─ petal_width < 1.75
      +   ├─ petal_length < 4.95
      +   │  ├─ versicolor (47/48)
      +   │  └─ virginica (4/6)
      +   └─ petal_length < 4.85
      +      ├─ virginica (2/3)
      +      └─ virginica (43/43)
      +
      +using Plots, TreeRecipe
      +plot(tree) ## for a graphical representation of the tree
      +
      +feature_importances(mach)

      See also DecisionTree.jl and the unwrapped model type MLJDecisionTreeInterface.DecisionTree.DecisionTreeClassifier.

      diff --git a/v0.20.3/models/DecisionTreeRegressor_BetaML/index.html b/v0.20.3/models/DecisionTreeRegressor_BetaML/index.html new file mode 100644 index 000000000..f0a7a303f --- /dev/null +++ b/v0.20.3/models/DecisionTreeRegressor_BetaML/index.html @@ -0,0 +1,33 @@ + +DecisionTreeRegressor · MLJ

      DecisionTreeRegressor

      mutable struct DecisionTreeRegressor <: MLJModelInterface.Deterministic

      A simple Decision Tree model for regression with support for Missing data, from the Beta Machine Learning Toolkit (BetaML).

      Hyperparameters:

      • max_depth::Int64: The maximum depth the tree is allowed to reach. When this is reached the node is forced to become a leaf [def: 0, i.e. no limits]
      • min_gain::Float64: The minimum information gain to allow for a node's partition [def: 0]
      • min_records::Int64: The minimum number of records a node must holds to consider for a partition of it [def: 2]
      • max_features::Int64: The maximum number of (random) features to consider at each partitioning [def: 0, i.e. look at all features]
      • splitting_criterion::Function: This is the name of the function to be used to compute the information gain of a specific partition. This is done by measuring the difference betwwen the "impurity" of the labels of the parent node with those of the two child nodes, weighted by the respective number of items. [def: variance]. Either variance or a custom function. It can also be an anonymous function.
      • rng::Random.AbstractRNG: A Random Number Generator to be used in stochastic parts of the code [deafult: Random.GLOBAL_RNG]

      Example:

      julia> using MLJ
      +
      +julia> X, y        = @load_boston;
      +
      +julia> modelType   = @load DecisionTreeRegressor pkg = "BetaML" verbosity=0
      +BetaML.Trees.DecisionTreeRegressor
      +
      +julia> model       = modelType()
      +DecisionTreeRegressor(
      +  max_depth = 0, 
      +  min_gain = 0.0, 
      +  min_records = 2, 
      +  max_features = 0, 
      +  splitting_criterion = BetaML.Utils.variance, 
      +  rng = Random._GLOBAL_RNG())
      +
      +julia> mach        = machine(model, X, y);
      +
      +julia> fit!(mach);
      +[ Info: Training machine(DecisionTreeRegressor(max_depth = 0, …), …).
      +
      +julia> ŷ           = predict(mach, X);
      +
      +julia> hcat(y,ŷ)
      +506×2 Matrix{Float64}:
      + 24.0  26.35
      + 21.6  21.6
      + 34.7  34.8
      +  ⋮    
      + 23.9  23.75
      + 22.0  22.2
      + 11.9  13.2
      diff --git a/v0.20.3/models/DecisionTreeRegressor_DecisionTree/index.html b/v0.20.3/models/DecisionTreeRegressor_DecisionTree/index.html new file mode 100644 index 000000000..7e82d3659 --- /dev/null +++ b/v0.20.3/models/DecisionTreeRegressor_DecisionTree/index.html @@ -0,0 +1,27 @@ + +DecisionTreeRegressor · MLJ

      DecisionTreeRegressor

      DecisionTreeRegressor

      A model type for constructing a CART decision tree regressor, based on DecisionTree.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      DecisionTreeRegressor = @load DecisionTreeRegressor pkg=DecisionTree

      Do model = DecisionTreeRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in DecisionTreeRegressor(max_depth=...).

      DecisionTreeRegressor implements the CART algorithm, originally published in Breiman, Leo; Friedman, J. H.; Olshen, R. A.; Stone, C. J. (1984): "Classification and regression trees". Monterey, CA: Wadsworth & Brooks/Cole Advanced Books & Software..

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, X, y)

      where

      • X: any table of input features (eg, a DataFrame) whose columns each have one of the following element scitypes: Continuous, Count, or <:OrderedFactor; check column scitypes with schema(X)
      • y: the target, which can be any AbstractVector whose element scitype is Continuous; check the scitype with scitype(y)

      Train the machine with fit!(mach, rows=...).

      Hyperparameters

      • max_depth=-1: max depth of the decision tree (-1=any)
      • min_samples_leaf=1: max number of samples each leaf needs to have
      • min_samples_split=2: min number of samples needed for a split
      • min_purity_increase=0: min purity needed for a split
      • n_subfeatures=0: number of features to select at random (0 for all)
      • post_prune=false: set to true for post-fit pruning
      • merge_purity_threshold=1.0: (post-pruning) merge leaves having combined purity >= merge_purity_threshold
      • feature_importance: method to use for computing feature importances. One of (:impurity, :split)
      • rng=Random.GLOBAL_RNG: random number generator or seed

      Operations

      • predict(mach, Xnew): return predictions of the target given new features Xnew having the same scitype as X above.

      Fitted parameters

      The fields of fitted_params(mach) are:

      • tree: the tree or stump object returned by the core DecisionTree.jl algorithm
      • features: the names of the features encountered in training

      Report

      • features: the names of the features encountered in training

      Accessor functions

      • feature_importances(mach) returns a vector of (feature::Symbol => importance) pairs; the type of importance is determined by the hyperparameter feature_importance (see above)

      Examples

      using MLJ
      +DecisionTreeRegressor = @load DecisionTreeRegressor pkg=DecisionTree
      +model = DecisionTreeRegressor(max_depth=3, min_samples_split=3)
      +
      +X, y = make_regression(100, 4; rng=123) ## synthetic data
      +mach = machine(model, X, y) |> fit!
      +
      +Xnew, _ = make_regression(3, 2; rng=123)
      +yhat = predict(mach, Xnew) ## new predictions
      +
      +julia> fitted_params(mach).tree
      +x1 < 0.2758
      +├─ x2 < 0.9137
      +│  ├─ x1 < -0.9582
      +│  │  ├─ 0.9189256882087312 (0/12)
      +│  │  └─ -0.23180616021065256 (0/38)
      +│  └─ -1.6461153800037722 (0/9)
      +└─ x1 < 1.062
      +   ├─ x2 < -0.4969
      +   │  ├─ -0.9330755147107384 (0/5)
      +   │  └─ -2.3287967825015548 (0/17)
      +   └─ x2 < 0.4598
      +      ├─ -2.931299926506291 (0/11)
      +      └─ -4.726518740473489 (0/8)
      +
      +feature_importances(mach) ## get feature importances

      See also DecisionTree.jl and the unwrapped model type MLJDecisionTreeInterface.DecisionTree.DecisionTreeRegressor.

      diff --git a/v0.20.3/models/DeterministicConstantClassifier_MLJModels/index.html b/v0.20.3/models/DeterministicConstantClassifier_MLJModels/index.html new file mode 100644 index 000000000..150afac16 --- /dev/null +++ b/v0.20.3/models/DeterministicConstantClassifier_MLJModels/index.html @@ -0,0 +1,2 @@ + +DeterministicConstantClassifier · MLJ

      DeterministicConstantClassifier

      DeterministicConstantClassifier

      A model type for constructing a deterministic constant classifier, based on MLJModels.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      DeterministicConstantClassifier = @load DeterministicConstantClassifier pkg=MLJModels

      Do model = DeterministicConstantClassifier() to construct an instance with default hyper-parameters.

      diff --git a/v0.20.3/models/DeterministicConstantRegressor_MLJModels/index.html b/v0.20.3/models/DeterministicConstantRegressor_MLJModels/index.html new file mode 100644 index 000000000..15670984b --- /dev/null +++ b/v0.20.3/models/DeterministicConstantRegressor_MLJModels/index.html @@ -0,0 +1,2 @@ + +DeterministicConstantRegressor · MLJ

      DeterministicConstantRegressor

      DeterministicConstantRegressor

      A model type for constructing a deterministic constant regressor, based on MLJModels.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      DeterministicConstantRegressor = @load DeterministicConstantRegressor pkg=MLJModels

      Do model = DeterministicConstantRegressor() to construct an instance with default hyper-parameters.

      diff --git a/v0.20.3/models/DummyClassifier_MLJScikitLearnInterface/index.html b/v0.20.3/models/DummyClassifier_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..30cfba7a8 --- /dev/null +++ b/v0.20.3/models/DummyClassifier_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +DummyClassifier · MLJ

      DummyClassifier

      DummyClassifier

      A model type for constructing a dummy classifier, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      DummyClassifier = @load DummyClassifier pkg=MLJScikitLearnInterface

      Do model = DummyClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in DummyClassifier(strategy=...).

      DummyClassifier is a classifier that makes predictions using simple rules.

      diff --git a/v0.20.3/models/DummyRegressor_MLJScikitLearnInterface/index.html b/v0.20.3/models/DummyRegressor_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..d10a90c2f --- /dev/null +++ b/v0.20.3/models/DummyRegressor_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +DummyRegressor · MLJ

      DummyRegressor

      DummyRegressor

      A model type for constructing a dummy regressor, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      DummyRegressor = @load DummyRegressor pkg=MLJScikitLearnInterface

      Do model = DummyRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in DummyRegressor(strategy=...).

      DummyRegressor is a regressor that makes predictions using simple rules.

      diff --git a/v0.20.3/models/ECODDetector_OutlierDetectionPython/index.html b/v0.20.3/models/ECODDetector_OutlierDetectionPython/index.html new file mode 100644 index 000000000..123ec8a47 --- /dev/null +++ b/v0.20.3/models/ECODDetector_OutlierDetectionPython/index.html @@ -0,0 +1,2 @@ + +ECODDetector · MLJ diff --git a/v0.20.3/models/ENNUndersampler_Imbalance/index.html b/v0.20.3/models/ENNUndersampler_Imbalance/index.html new file mode 100644 index 000000000..115757e3b --- /dev/null +++ b/v0.20.3/models/ENNUndersampler_Imbalance/index.html @@ -0,0 +1,31 @@ + +ENNUndersampler · MLJ

      ENNUndersampler

      Initiate a ENN undersampling model with the given hyper-parameters.

      ENNUndersampler

      A model type for constructing a enn undersampler, based on Imbalance.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      ENNUndersampler = @load ENNUndersampler pkg=Imbalance

      Do model = ENNUndersampler() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in ENNUndersampler(k=...).

      ENNUndersampler undersamples a dataset by removing ("cleaning") points that violate a certain condition such as having a different class compared to the majority of the neighbors as proposed in Dennis L Wilson. Asymptotic properties of nearest neighbor rules using edited data. IEEE Transactions on Systems, Man, and Cybernetics, pages 408–421, 1972.

      Training data

      In MLJ or MLJBase, wrap the model in a machine by mach = machine(model)

      There is no need to provide any data here because the model is a static transformer.

      Likewise, there is no need to fit!(mach).

      For default values of the hyper-parameters, model can be constructed by model = ENNUndersampler()

      Hyperparameters

      • k::Integer=5: Number of nearest neighbors to consider in the algorithm. Should be within the range 0 < k < n where n is the number of observations in the smallest class. It will be automatically set to m-1 for any class with m points where m ≤ k.
      • keep_condition::AbstractString="mode": The condition that leads to cleaning a point upon violation. Takes one of "exists", "mode", "only mode" and "all"
      - `"exists"`: the point has at least one neighbor from the same class
      +- `"mode"`: the class of the point is one of the most frequent classes of the neighbors (there may be many)
      +- `"only mode"`: the class of the point is the single most frequent class of the neighbors
      +- `"all"`: the class of the point is the same as all the neighbors
      • min_ratios=1.0: A parameter that controls the maximum amount of undersampling to be done for each class. If this algorithm cleans the data to an extent that this is violated, some of the cleaned points will be revived randomly so that it is satisfied.

        • Can be a float and in this case each class will be at most undersampled to the size of the minority class times the float. By default, all classes are undersampled to the size of the minority class
        • Can be a dictionary mapping each class label to the float minimum ratio for that class
      • force_min_ratios=false: If true, and this algorithm cleans the data such that the ratios for each class exceed those specified in min_ratios then further undersampling will be perform so that the final ratios are equal to min_ratios.

      • rng::Union{AbstractRNG, Integer}=default_rng(): Either an AbstractRNG object or an Integer seed to be used with Xoshiro if the Julia VERSION supports it. Otherwise, uses MersenneTwister`.

      • try_preserve_type::Bool=true: When true, the function will try to not change the type of the input table (e.g., DataFrame). However, for some tables, this may not succeed, and in this case, the table returned will be a column table (named-tuple of vectors). This parameter is ignored if the input is a matrix.

      Transform Inputs

      • X: A matrix or table of floats where each row is an observation from the dataset
      • y: An abstract vector of labels (e.g., strings) that correspond to the observations in X

      Transform Outputs

      • X_under: A matrix or table that includes the data after undersampling depending on whether the input X is a matrix or table respectively
      • y_under: An abstract vector of labels corresponding to X_under

      Operations

      • transform(mach, X, y): resample the data X and y using ENNUndersampler, returning the undersampled versions

      Example

      using MLJ
      +import Imbalance
      +
      +## set probability of each class
      +class_probs = [0.5, 0.2, 0.3]                         
      +num_rows, num_continuous_feats = 100, 5
      +## generate a table and categorical vector accordingly
      +X, y = Imbalance.generate_imbalanced_data(num_rows, num_continuous_feats; 
      +                                min_sep=0.01, stds=[3.0 3.0 3.0], class_probs, rng=42)     
      +
      +julia> Imbalance.checkbalance(y; ref="minority")
      +1: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 19 (100.0%) 
      +2: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 33 (173.7%) 
      +0: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 48 (252.6%) 
      +
      +## load ENN model type:
      +ENNUndersampler = @load ENNUndersampler pkg=Imbalance
      +
      +## underample the majority classes to  sizes relative to the minority class:
      +undersampler = ENNUndersampler(min_ratios=0.5, rng=42)
      +mach = machine(undersampler)
      +X_under, y_under = transform(mach, X, y)
      +
      +julia> Imbalance.checkbalance(y_under; ref="minority")
      +2: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 10 (100.0%) 
      +1: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 10 (100.0%) 
      +0: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 24 (240.0%) 
      diff --git a/v0.20.3/models/ElasticNetCVRegressor_MLJScikitLearnInterface/index.html b/v0.20.3/models/ElasticNetCVRegressor_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..6a6f82889 --- /dev/null +++ b/v0.20.3/models/ElasticNetCVRegressor_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +ElasticNetCVRegressor · MLJ

      ElasticNetCVRegressor

      ElasticNetCVRegressor

      A model type for constructing a elastic net regression with built-in cross-validation, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      ElasticNetCVRegressor = @load ElasticNetCVRegressor pkg=MLJScikitLearnInterface

      Do model = ElasticNetCVRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in ElasticNetCVRegressor(l1_ratio=...).

      Hyper-parameters

      • l1_ratio = 0.5
      • eps = 0.001
      • n_alphas = 100
      • alphas = nothing
      • fit_intercept = true
      • precompute = auto
      • max_iter = 1000
      • tol = 0.0001
      • cv = 5
      • copy_X = true
      • verbose = 0
      • n_jobs = nothing
      • positive = false
      • random_state = nothing
      • selection = cyclic
      diff --git a/v0.20.3/models/ElasticNetRegressor_MLJLinearModels/index.html b/v0.20.3/models/ElasticNetRegressor_MLJLinearModels/index.html new file mode 100644 index 000000000..d563c96f7 --- /dev/null +++ b/v0.20.3/models/ElasticNetRegressor_MLJLinearModels/index.html @@ -0,0 +1,6 @@ + +ElasticNetRegressor · MLJ

      ElasticNetRegressor

      ElasticNetRegressor

      A model type for constructing a elastic net regressor, based on MLJLinearModels.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      ElasticNetRegressor = @load ElasticNetRegressor pkg=MLJLinearModels

      Do model = ElasticNetRegressor() to construct an instance with default hyper-parameters.

      Elastic net is a linear model with objective function

      $

      |Xθ - y|₂²/2 + n⋅λ|θ|₂²/2 + n⋅γ|θ|₁ $

      where $n$ is the number of observations.

      If scale_penalty_with_samples = false the objective function is instead

      $

      |Xθ - y|₂²/2 + λ|θ|₂²/2 + γ|θ|₁ $

      .

      Different solver options exist, as indicated under "Hyperparameters" below.

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, X, y)

      where:

      • X is any table of input features (eg, a DataFrame) whose columns have Continuous scitype; check column scitypes with schema(X)
      • y is the target, which can be any AbstractVector whose element scitype is Continuous; check the scitype with scitype(y)

      Train the machine using fit!(mach, rows=...).

      Hyperparameters

      • lambda::Real: strength of the L2 regularization. Default: 1.0

      • gamma::Real: strength of the L1 regularization. Default: 0.0

      • fit_intercept::Bool: whether to fit the intercept or not. Default: true

      • penalize_intercept::Bool: whether to penalize the intercept. Default: false

      • scale_penalty_with_samples::Bool: whether to scale the penalty with the number of observations. Default: true

      • solver::Union{Nothing, MLJLinearModels.Solver}: any instance of MLJLinearModels.ProxGrad.

        If solver=nothing (default) then ProxGrad(accel=true) (FISTA) is used.

        Solver aliases: FISTA(; kwargs...) = ProxGrad(accel=true, kwargs...), ISTA(; kwargs...) = ProxGrad(accel=false, kwargs...). Default: nothing

      Example

      using MLJ
      +X, y = make_regression()
      +mach = fit!(machine(ElasticNetRegressor(), X, y))
      +predict(mach, X)
      +fitted_params(mach)

      See also LassoRegressor.

      diff --git a/v0.20.3/models/ElasticNetRegressor_MLJScikitLearnInterface/index.html b/v0.20.3/models/ElasticNetRegressor_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..1e0c07e5e --- /dev/null +++ b/v0.20.3/models/ElasticNetRegressor_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +ElasticNetRegressor · MLJ

      ElasticNetRegressor

      ElasticNetRegressor

      A model type for constructing a elastic net regressor, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      ElasticNetRegressor = @load ElasticNetRegressor pkg=MLJScikitLearnInterface

      Do model = ElasticNetRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in ElasticNetRegressor(alpha=...).

      Hyper-parameters

      • alpha = 1.0
      • l1_ratio = 0.5
      • fit_intercept = true
      • precompute = false
      • max_iter = 1000
      • copy_X = true
      • tol = 0.0001
      • warm_start = false
      • positive = false
      • random_state = nothing
      • selection = cyclic
      diff --git a/v0.20.3/models/EpsilonSVR_LIBSVM/index.html b/v0.20.3/models/EpsilonSVR_LIBSVM/index.html new file mode 100644 index 000000000..342638c65 --- /dev/null +++ b/v0.20.3/models/EpsilonSVR_LIBSVM/index.html @@ -0,0 +1,25 @@ + +EpsilonSVR · MLJ

      EpsilonSVR

      EpsilonSVR

      A model type for constructing a ϵ-support vector regressor, based on LIBSVM.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      EpsilonSVR = @load EpsilonSVR pkg=LIBSVM

      Do model = EpsilonSVR() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in EpsilonSVR(kernel=...).

      Reference for algorithm and core C-library: C.-C. Chang and C.-J. Lin (2011): "LIBSVM: a library for support vector machines." ACM Transactions on Intelligent Systems and Technology, 2(3):27:1–27:27. Updated at https://www.csie.ntu.edu.tw/~cjlin/papers/libsvm.pdf.

      This model is an adaptation of the classifier SVC to regression, but has an additional parameter epsilon (denoted $ϵ$ in the cited reference).

      Training data

      In MLJ or MLJBase, bind an instance model to data with:

      mach = machine(model, X, y)

      where

      • X: any table of input features (eg, a DataFrame) whose columns each have Continuous element scitype; check column scitypes with schema(X)
      • y: is the target, which can be any AbstractVector whose element scitype is Continuous; check the scitype with scitype(y)

      Train the machine using fit!(mach, rows=...).

      Hyper-parameters

      • kernel=LIBSVM.Kernel.RadialBasis: either an object that can be called, as in kernel(x1, x2), or one of the built-in kernels from the LIBSVM.jl package listed below. Here x1 and x2 are vectors whose lengths match the number of columns of the training data X (see "Examples" below).

        • LIBSVM.Kernel.Linear: (x1, x2) -> x1'*x2
        • LIBSVM.Kernel.Polynomial: (x1, x2) -> gamma*x1'*x2 + coef0)^degree
        • LIBSVM.Kernel.RadialBasis: (x1, x2) -> (exp(-gamma*norm(x1 - x2)^2))
        • LIBSVM.Kernel.Sigmoid: (x1, x2) - > tanh(gamma*x1'*x2 + coef0)

        Here gamma, coef0, degree are other hyper-parameters. Serialization of models with user-defined kernels comes with some restrictions. See LIVSVM.jl issue91

      • gamma = 0.0: kernel parameter (see above); if gamma==-1.0 then gamma = 1/nfeatures is used in training, where nfeatures is the number of features (columns of X). If gamma==0.0 then gamma = 1/(var(Tables.matrix(X))*nfeatures) is used. Actual value used appears in the report (see below).

      • coef0 = 0.0: kernel parameter (see above)

      • degree::Int32 = Int32(3): degree in polynomial kernel (see above)

      • cost=1.0 (range (0, Inf)): the parameter denoted $C$ in the cited reference; for greater regularization, decrease cost

      • epsilon=0.1 (range (0, Inf)): the parameter denoted $ϵ$ in the cited reference; epsilon is the thickness of the penalty-free neighborhood of the graph of the prediction function ("slab" or "tube"). Specifically, a data point (x, y) incurs no training loss unless it is outside this neighborhood; the further away it is from the this neighborhood, the greater the loss penalty.

      • cachesize=200.0 cache memory size in MB

      • tolerance=0.001: tolerance for the stopping criterion

      • shrinking=true: whether to use shrinking heuristics

      Operations

      • predict(mach, Xnew): return predictions of the target given features Xnew having the same scitype as X above.

      Fitted parameters

      The fields of fitted_params(mach) are:

      • libsvm_model: the trained model object created by the LIBSVM.jl package

      Report

      The fields of report(mach) are:

      • gamma: actual value of the kernel parameter gamma used in training

      Examples

      Using a built-in kernel

      using MLJ
      +import LIBSVM
      +
      +EpsilonSVR = @load EpsilonSVR pkg=LIBSVM            ## model type
      +model = EpsilonSVR(kernel=LIBSVM.Kernel.Polynomial) ## instance
      +
      +X, y = make_regression(rng=123) ## table, vector
      +mach = machine(model, X, y) |> fit!
      +
      +Xnew, _ = make_regression(3, rng=123)
      +
      +julia> yhat = predict(mach, Xnew)
      +3-element Vector{Float64}:
      +  0.2512132502584155
      +  0.007340201523624579
      + -0.2482949812264707

      User-defined kernels

      k(x1, x2) = x1'*x2 ## equivalent to `LIBSVM.Kernel.Linear`
      +model = EpsilonSVR(kernel=k)
      +mach = machine(model, X, y) |> fit!
      +
      +julia> yhat = predict(mach, Xnew)
      +3-element Vector{Float64}:
      +  1.1121225361666656
      +  0.04667702229741916
      + -0.6958148424680672

      See also NuSVR, LIVSVM.jl and the original C implementation documentation.

      diff --git a/v0.20.3/models/EvoLinearRegressor_EvoLinear/index.html b/v0.20.3/models/EvoLinearRegressor_EvoLinear/index.html new file mode 100644 index 000000000..f7d0e1469 --- /dev/null +++ b/v0.20.3/models/EvoLinearRegressor_EvoLinear/index.html @@ -0,0 +1,3 @@ + +EvoLinearRegressor · MLJ

      EvoLinearRegressor

      EvoLinearRegressor(; kwargs...)

      A model type for constructing a EvoLinearRegressor, based on EvoLinear.jl, and implementing both an internal API and the MLJ model interface.

      Keyword arguments

      • loss=:mse: loss function to be minimised. Can be one of:

        • :mse
        • :logistic
        • :poisson
        • :gamma
        • :tweedie
      • nrounds=10: maximum number of training rounds.

      • eta=1: Learning rate. Typically in the range [1e-2, 1].

      • L1=0: Regularization penalty applied by shrinking to 0 weight update if update is < L1. No penalty if update > L1. Results in sparse feature selection. Typically in the [0, 1] range on normalized features.

      • L2=0: Regularization penalty applied to the squared of the weight update value. Restricts large parameter values. Typically in the [0, 1] range on normalized features.

      • rng=123: random seed. Not used at the moment.

      • updater=:all: training method. Only :all is supported at the moment. Gradients for each feature are computed simultaneously, then bias is updated based on all features update.

      • device=:cpu: Only :cpu is supported at the moment.

      Internal API

      Do config = EvoLinearRegressor() to construct an hyper-parameter struct with default hyper-parameters. Provide keyword arguments as listed above to override defaults, for example:

      EvoLinearRegressor(loss=:logistic, L1=1e-3, L2=1e-2, nrounds=100)

      Training model

      A model is built using fit:

      config = EvoLinearRegressor()
      +m = fit(config; x, y, w)

      Inference

      Fitted results is an EvoLinearModel which acts as a prediction function when passed a features matrix as argument.

      preds = m(x)

      MLJ Interface

      From MLJ, the type can be imported using:

      EvoLinearRegressor = @load EvoLinearRegressor pkg=EvoLinear

      Do model = EvoLinearRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in EvoLinearRegressor(loss=...).

      Training model

      In MLJ or MLJBase, bind an instance model to data with mach = machine(model, X, y) where:

      • X: any table of input features (eg, a DataFrame) whose columns each have one of the following element scitypes: Continuous, Count, or <:OrderedFactor; check column scitypes with schema(X)
      • y: is the target, which can be any AbstractVector whose element scitype is <:Continuous; check the scitype with scitype(y)

      Train the machine using fit!(mach, rows=...).

      Operations

      • predict(mach, Xnew): return predictions of the target given

      features Xnew having the same scitype as X above. Predictions are deterministic.

      Fitted parameters

      The fields of fitted_params(mach) are:

      • :fitresult: the EvoLinearModel object returned by EvoLnear.jl fitting algorithm.

      Report

      The fields of report(mach) are:

      • :coef: Vector of coefficients (βs) associated to each of the features.
      • :bias: Value of the bias.
      • :names: Names of each of the features.
      diff --git a/v0.20.3/models/EvoSplineRegressor_EvoLinear/index.html b/v0.20.3/models/EvoSplineRegressor_EvoLinear/index.html new file mode 100644 index 000000000..7b39c8c42 --- /dev/null +++ b/v0.20.3/models/EvoSplineRegressor_EvoLinear/index.html @@ -0,0 +1,3 @@ + +EvoSplineRegressor · MLJ

      EvoSplineRegressor

      EvoSplineRegressor(; kwargs...)

      A model type for constructing a EvoSplineRegressor, based on EvoLinear.jl, and implementing both an internal API and the MLJ model interface.

      Keyword arguments

      • loss=:mse: loss function to be minimised. Can be one of:

        • :mse
        • :logistic
        • :poisson
        • :gamma
        • :tweedie
      • nrounds=10: maximum number of training rounds.

      • eta=1: Learning rate. Typically in the range [1e-2, 1].

      • L1=0: Regularization penalty applied by shrinking to 0 weight update if update is < L1. No penalty if update > L1. Results in sparse feature selection. Typically in the [0, 1] range on normalized features.

      • L2=0: Regularization penalty applied to the squared of the weight update value. Restricts large parameter values. Typically in the [0, 1] range on normalized features.

      • rng=123: random seed. Not used at the moment.

      • updater=:all: training method. Only :all is supported at the moment. Gradients for each feature are computed simultaneously, then bias is updated based on all features update.

      • device=:cpu: Only :cpu is supported at the moment.

      Internal API

      Do config = EvoSplineRegressor() to construct an hyper-parameter struct with default hyper-parameters. Provide keyword arguments as listed above to override defaults, for example:

      EvoSplineRegressor(loss=:logistic, L1=1e-3, L2=1e-2, nrounds=100)

      Training model

      A model is built using fit:

      config = EvoSplineRegressor()
      +m = fit(config; x, y, w)

      Inference

      Fitted results is an EvoLinearModel which acts as a prediction function when passed a features matrix as argument.

      preds = m(x)

      MLJ Interface

      From MLJ, the type can be imported using:

      EvoSplineRegressor = @load EvoSplineRegressor pkg=EvoLinear

      Do model = EvoLinearRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in EvoSplineRegressor(loss=...).

      Training model

      In MLJ or MLJBase, bind an instance model to data with mach = machine(model, X, y) where:

      • X: any table of input features (eg, a DataFrame) whose columns each have one of the following element scitypes: Continuous, Count, or <:OrderedFactor; check column scitypes with schema(X)
      • y: is the target, which can be any AbstractVector whose element scitype is <:Continuous; check the scitype with scitype(y)

      Train the machine using fit!(mach, rows=...).

      Operations

      • predict(mach, Xnew): return predictions of the target given

      features Xnew having the same scitype as X above. Predictions are deterministic.

      Fitted parameters

      The fields of fitted_params(mach) are:

      • :fitresult: the SplineModel object returned by EvoSplineRegressor fitting algorithm.

      Report

      The fields of report(mach) are:

      • :coef: Vector of coefficients (βs) associated to each of the features.
      • :bias: Value of the bias.
      • :names: Names of each of the features.
      diff --git a/v0.20.3/models/EvoTreeClassifier_EvoTrees/index.html b/v0.20.3/models/EvoTreeClassifier_EvoTrees/index.html new file mode 100644 index 000000000..a8ba028ad --- /dev/null +++ b/v0.20.3/models/EvoTreeClassifier_EvoTrees/index.html @@ -0,0 +1,15 @@ + +EvoTreeClassifier · MLJ

      EvoTreeClassifier

      EvoTreeClassifier(;kwargs...)

      A model type for constructing a EvoTreeClassifier, based on EvoTrees.jl, and implementing both an internal API and the MLJ model interface. EvoTreeClassifier is used to perform multi-class classification, using cross-entropy loss.

      Hyper-parameters

      • nrounds=100: Number of rounds. It corresponds to the number of trees that will be sequentially stacked. Must be >= 1.

      • eta=0.1: Learning rate. Each tree raw predictions are scaled by eta prior to be added to the stack of predictions. Must be > 0. A lower eta results in slower learning, requiring a higher nrounds but typically improves model performance.

      • L2::T=0.0: L2 regularization factor on aggregate gain. Must be >= 0. Higher L2 can result in a more robust model.

      • lambda::T=0.0: L2 regularization factor on individual gain. Must be >= 0. Higher lambda can result in a more robust model.

      • gamma::T=0.0: Minimum gain improvement needed to perform a node split. Higher gamma can result in a more robust model. Must be >= 0.

      • max_depth=6: Maximum depth of a tree. Must be >= 1. A tree of depth 1 is made of a single prediction leaf. A complete tree of depth N contains 2^(N - 1) terminal leaves and 2^(N - 1) - 1 split nodes. Compute cost is proportional to 2^max_depth. Typical optimal values are in the 3 to 9 range.

      • min_weight=1.0: Minimum weight needed in a node to perform a split. Matches the number of observations by default or the sum of weights as provided by the weights vector. Must be > 0.

      • rowsample=1.0: Proportion of rows that are sampled at each iteration to build the tree. Should be in ]0, 1].

      • colsample=1.0: Proportion of columns / features that are sampled at each iteration to build the tree. Should be in ]0, 1].

      • nbins=64: Number of bins into which each feature is quantized. Buckets are defined based on quantiles, hence resulting in equal weight bins. Should be between 2 and 255.

      • tree_type="binary" Tree structure to be used. One of:

        • binary: Each node of a tree is grown independently. Tree are built depthwise until max depth is reach or if min weight or gain (see gamma) stops further node splits.
        • oblivious: A common splitting condition is imposed to all nodes of a given depth.
      • rng=123: Either an integer used as a seed to the random number generator or an actual random number generator (::Random.AbstractRNG).

      Internal API

      Do config = EvoTreeClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in EvoTreeClassifier(max_depth=...).

      Training model

      A model is built using fit_evotree:

      model = fit_evotree(config; x_train, y_train, kwargs...)

      Inference

      Predictions are obtained using predict which returns a Matrix of size [nobs, K] where K is the number of classes:

      EvoTrees.predict(model, X)

      Alternatively, models act as a functor, returning predictions when called as a function with features as argument:

      model(X)

      MLJ

      From MLJ, the type can be imported using:

      EvoTreeClassifier = @load EvoTreeClassifier pkg=EvoTrees

      Do model = EvoTreeClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in EvoTreeClassifier(loss=...).

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, X, y)

      where

      • X: any table of input features (eg, a DataFrame) whose columns each have one of the following element scitypes: Continuous, Count, or <:OrderedFactor; check column scitypes with schema(X)
      • y: is the target, which can be any AbstractVector whose element scitype is <:Multiclas or <:OrderedFactor; check the scitype with scitype(y)

      Train the machine using fit!(mach, rows=...).

      Operations

      • predict(mach, Xnew): return predictions of the target given features Xnew having the same scitype as X above. Predictions are probabilistic.
      • predict_mode(mach, Xnew): returns the mode of each of the prediction above.

      Fitted parameters

      The fields of fitted_params(mach) are:

      • :fitresult: The GBTree object returned by EvoTrees.jl fitting algorithm.

      Report

      The fields of report(mach) are:

      • :features: The names of the features encountered in training.

      Examples

      ## Internal API
      +using EvoTrees
      +config = EvoTreeClassifier(max_depth=5, nbins=32, nrounds=100)
      +nobs, nfeats = 1_000, 5
      +x_train, y_train = randn(nobs, nfeats), rand(1:3, nobs)
      +model = fit_evotree(config; x_train, y_train)
      +preds = EvoTrees.predict(model, x_train)
      ## MLJ Interface
      +using MLJ
      +EvoTreeClassifier = @load EvoTreeClassifier pkg=EvoTrees
      +model = EvoTreeClassifier(max_depth=5, nbins=32, nrounds=100)
      +X, y = @load_iris
      +mach = machine(model, X, y) |> fit!
      +preds = predict(mach, X)
      +preds = predict_mode(mach, X)

      See also EvoTrees.jl.

      diff --git a/v0.20.3/models/EvoTreeCount_EvoTrees/index.html b/v0.20.3/models/EvoTreeCount_EvoTrees/index.html new file mode 100644 index 000000000..f17932e53 --- /dev/null +++ b/v0.20.3/models/EvoTreeCount_EvoTrees/index.html @@ -0,0 +1,18 @@ + +EvoTreeCount · MLJ

      EvoTreeCount

      EvoTreeCount(;kwargs...)

      A model type for constructing a EvoTreeCount, based on EvoTrees.jl, and implementing both an internal API the MLJ model interface. EvoTreeCount is used to perform Poisson probabilistic regression on count target.

      Hyper-parameters

      • nrounds=100: Number of rounds. It corresponds to the number of trees that will be sequentially stacked. Must be >= 1.

      • eta=0.1: Learning rate. Each tree raw predictions are scaled by eta prior to be added to the stack of predictions. Must be > 0. A lower eta results in slower learning, requiring a higher nrounds but typically improves model performance.

      • L2::T=0.0: L2 regularization factor on aggregate gain. Must be >= 0. Higher L2 can result in a more robust model.

      • lambda::T=0.0: L2 regularization factor on individual gain. Must be >= 0. Higher lambda can result in a more robust model.

      • gamma::T=0.0: Minimum gain imprvement needed to perform a node split. Higher gamma can result in a more robust model.

      • max_depth=6: Maximum depth of a tree. Must be >= 1. A tree of depth 1 is made of a single prediction leaf. A complete tree of depth N contains 2^(N - 1) terminal leaves and 2^(N - 1) - 1 split nodes. Compute cost is proportional to 2^max_depth. Typical optimal values are in the 3 to 9 range.

      • min_weight=1.0: Minimum weight needed in a node to perform a split. Matches the number of observations by default or the sum of weights as provided by the weights vector. Must be > 0.

      • rowsample=1.0: Proportion of rows that are sampled at each iteration to build the tree. Should be ]0, 1].

      • colsample=1.0: Proportion of columns / features that are sampled at each iteration to build the tree. Should be ]0, 1].

      • nbins=64: Number of bins into which each feature is quantized. Buckets are defined based on quantiles, hence resulting in equal weight bins. Should be between 2 and 255.

      • monotone_constraints=Dict{Int, Int}(): Specify monotonic constraints using a dict where the key is the feature index and the value the applicable constraint (-1=decreasing, 0=none, 1=increasing).

      • tree_type="binary" Tree structure to be used. One of:

        • binary: Each node of a tree is grown independently. Tree are built depthwise until max depth is reach or if min weight or gain (see gamma) stops further node splits.
        • oblivious: A common splitting condition is imposed to all nodes of a given depth.
      • rng=123: Either an integer used as a seed to the random number generator or an actual random number generator (::Random.AbstractRNG).

      Internal API

      Do config = EvoTreeCount() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in EvoTreeCount(max_depth=...).

      Training model

      A model is built using fit_evotree:

      model = fit_evotree(config; x_train, y_train, kwargs...)

      Inference

      Predictions are obtained using predict which returns a Vector of length nobs:

      EvoTrees.predict(model, X)

      Alternatively, models act as a functor, returning predictions when called as a function with features as argument:

      model(X)

      MLJ

      From MLJ, the type can be imported using:

      EvoTreeCount = @load EvoTreeCount pkg=EvoTrees

      Do model = EvoTreeCount() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in EvoTreeCount(loss=...).

      Training data

      In MLJ or MLJBase, bind an instance model to data with mach = machine(model, X, y) where

      • X: any table of input features (eg, a DataFrame) whose columns each have one of the following element scitypes: Continuous, Count, or <:OrderedFactor; check column scitypes with schema(X)
      • y: is the target, which can be any AbstractVector whose element scitype is <:Count; check the scitype with scitype(y)

      Train the machine using fit!(mach, rows=...).

      Operations

      • predict(mach, Xnew): returns a vector of Poisson distributions given features Xnew having the same scitype as X above. Predictions are probabilistic.

      Specific metrics can also be predicted using:

      • predict_mean(mach, Xnew)
      • predict_mode(mach, Xnew)
      • predict_median(mach, Xnew)

      Fitted parameters

      The fields of fitted_params(mach) are:

      • :fitresult: The GBTree object returned by EvoTrees.jl fitting algorithm.

      Report

      The fields of report(mach) are:

      • :features: The names of the features encountered in training.

      Examples

      ## Internal API
      +using EvoTrees
      +config = EvoTreeCount(max_depth=5, nbins=32, nrounds=100)
      +nobs, nfeats = 1_000, 5
      +x_train, y_train = randn(nobs, nfeats), rand(0:2, nobs)
      +model = fit_evotree(config; x_train, y_train)
      +preds = EvoTrees.predict(model, x_train)
      using MLJ
      +EvoTreeCount = @load EvoTreeCount pkg=EvoTrees
      +model = EvoTreeCount(max_depth=5, nbins=32, nrounds=100)
      +nobs, nfeats = 1_000, 5
      +X, y = randn(nobs, nfeats), rand(0:2, nobs)
      +mach = machine(model, X, y) |> fit!
      +preds = predict(mach, X)
      +preds = predict_mean(mach, X)
      +preds = predict_mode(mach, X)
      +preds = predict_median(mach, X)
      +

      See also EvoTrees.jl.

      diff --git a/v0.20.3/models/EvoTreeGaussian_EvoTrees/index.html b/v0.20.3/models/EvoTreeGaussian_EvoTrees/index.html new file mode 100644 index 000000000..2da794a3f --- /dev/null +++ b/v0.20.3/models/EvoTreeGaussian_EvoTrees/index.html @@ -0,0 +1,17 @@ + +EvoTreeGaussian · MLJ

      EvoTreeGaussian

      EvoTreeGaussian(;kwargs...)

      A model type for constructing a EvoTreeGaussian, based on EvoTrees.jl, and implementing both an internal API the MLJ model interface. EvoTreeGaussian is used to perform Gaussian probabilistic regression, fitting μ and σ parameters to maximize likelihood.

      Hyper-parameters

      • nrounds=100: Number of rounds. It corresponds to the number of trees that will be sequentially stacked. Must be >= 1.

      • eta=0.1: Learning rate. Each tree raw predictions are scaled by eta prior to be added to the stack of predictions. Must be > 0. A lower eta results in slower learning, requiring a higher nrounds but typically improves model performance.

      • L2::T=0.0: L2 regularization factor on aggregate gain. Must be >= 0. Higher L2 can result in a more robust model.

      • lambda::T=0.0: L2 regularization factor on individual gain. Must be >= 0. Higher lambda can result in a more robust model.

      • gamma::T=0.0: Minimum gain imprvement needed to perform a node split. Higher gamma can result in a more robust model. Must be >= 0.

      • max_depth=6: Maximum depth of a tree. Must be >= 1. A tree of depth 1 is made of a single prediction leaf. A complete tree of depth N contains 2^(N - 1) terminal leaves and 2^(N - 1) - 1 split nodes. Compute cost is proportional to 2^max_depth. Typical optimal values are in the 3 to 9 range.

      • min_weight=8.0: Minimum weight needed in a node to perform a split. Matches the number of observations by default or the sum of weights as provided by the weights vector. Must be > 0.

      • rowsample=1.0: Proportion of rows that are sampled at each iteration to build the tree. Should be in ]0, 1].

      • colsample=1.0: Proportion of columns / features that are sampled at each iteration to build the tree. Should be in ]0, 1].

      • nbins=64: Number of bins into which each feature is quantized. Buckets are defined based on quantiles, hence resulting in equal weight bins. Should be between 2 and 255.

      • monotone_constraints=Dict{Int, Int}(): Specify monotonic constraints using a dict where the key is the feature index and the value the applicable constraint (-1=decreasing, 0=none, 1=increasing). !Experimental feature: note that for Gaussian regression, constraints may not be enforce systematically.

      • tree_type="binary" Tree structure to be used. One of:

        • binary: Each node of a tree is grown independently. Tree are built depthwise until max depth is reach or if min weight or gain (see gamma) stops further node splits.
        • oblivious: A common splitting condition is imposed to all nodes of a given depth.
      • rng=123: Either an integer used as a seed to the random number generator or an actual random number generator (::Random.AbstractRNG).

      Internal API

      Do config = EvoTreeGaussian() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in EvoTreeGaussian(max_depth=...).

      Training model

      A model is built using fit_evotree:

      model = fit_evotree(config; x_train, y_train, kwargs...)

      Inference

      Predictions are obtained using predict which returns a Matrix of size [nobs, 2] where the second dimensions refer to μ and σ respectively:

      EvoTrees.predict(model, X)

      Alternatively, models act as a functor, returning predictions when called as a function with features as argument:

      model(X)

      MLJ

      From MLJ, the type can be imported using:

      EvoTreeGaussian = @load EvoTreeGaussian pkg=EvoTrees

      Do model = EvoTreeGaussian() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in EvoTreeGaussian(loss=...).

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, X, y)

      where

      • X: any table of input features (eg, a DataFrame) whose columns each have one of the following element scitypes: Continuous, Count, or <:OrderedFactor; check column scitypes with schema(X)
      • y: is the target, which can be any AbstractVector whose element scitype is <:Continuous; check the scitype with scitype(y)

      Train the machine using fit!(mach, rows=...).

      Operations

      • predict(mach, Xnew): returns a vector of Gaussian distributions given features Xnew having the same scitype as X above.

      Predictions are probabilistic.

      Specific metrics can also be predicted using:

      • predict_mean(mach, Xnew)
      • predict_mode(mach, Xnew)
      • predict_median(mach, Xnew)

      Fitted parameters

      The fields of fitted_params(mach) are:

      • :fitresult: The GBTree object returned by EvoTrees.jl fitting algorithm.

      Report

      The fields of report(mach) are:

      • :features: The names of the features encountered in training.

      Examples

      ## Internal API
      +using EvoTrees
      +params = EvoTreeGaussian(max_depth=5, nbins=32, nrounds=100)
      +nobs, nfeats = 1_000, 5
      +x_train, y_train = randn(nobs, nfeats), rand(nobs)
      +model = fit_evotree(params; x_train, y_train)
      +preds = EvoTrees.predict(model, x_train)
      ## MLJ Interface
      +using MLJ
      +EvoTreeGaussian = @load EvoTreeGaussian pkg=EvoTrees
      +model = EvoTreeGaussian(max_depth=5, nbins=32, nrounds=100)
      +X, y = @load_boston
      +mach = machine(model, X, y) |> fit!
      +preds = predict(mach, X)
      +preds = predict_mean(mach, X)
      +preds = predict_mode(mach, X)
      +preds = predict_median(mach, X)
      diff --git a/v0.20.3/models/EvoTreeMLE_EvoTrees/index.html b/v0.20.3/models/EvoTreeMLE_EvoTrees/index.html new file mode 100644 index 000000000..3a5c73dba --- /dev/null +++ b/v0.20.3/models/EvoTreeMLE_EvoTrees/index.html @@ -0,0 +1,17 @@ + +EvoTreeMLE · MLJ

      EvoTreeMLE

      EvoTreeMLE(;kwargs...)

      A model type for constructing a EvoTreeMLE, based on EvoTrees.jl, and implementing both an internal API the MLJ model interface. EvoTreeMLE performs maximum likelihood estimation. Assumed distribution is specified through loss kwargs. Both Gaussian and Logistic distributions are supported.

      Hyper-parameters

      loss=:gaussian: Loss to be be minimized during training. One of:

      • :gaussian / :gaussian_mle
      • :logistic / :logistic_mle
      • nrounds=100: Number of rounds. It corresponds to the number of trees that will be sequentially stacked. Must be >= 1.
      • eta=0.1: Learning rate. Each tree raw predictions are scaled by eta prior to be added to the stack of predictions. Must be > 0.

      A lower eta results in slower learning, requiring a higher nrounds but typically improves model performance.

      • L2::T=0.0: L2 regularization factor on aggregate gain. Must be >= 0. Higher L2 can result in a more robust model.

      • lambda::T=0.0: L2 regularization factor on individual gain. Must be >= 0. Higher lambda can result in a more robust model.

      • gamma::T=0.0: Minimum gain imprvement needed to perform a node split. Higher gamma can result in a more robust model. Must be >= 0.

      • max_depth=6: Maximum depth of a tree. Must be >= 1. A tree of depth 1 is made of a single prediction leaf. A complete tree of depth N contains 2^(N - 1) terminal leaves and 2^(N - 1) - 1 split nodes. Compute cost is proportional to 2^max_depth. Typical optimal values are in the 3 to 9 range.

      • min_weight=8.0: Minimum weight needed in a node to perform a split. Matches the number of observations by default or the sum of weights as provided by the weights vector. Must be > 0.

      • rowsample=1.0: Proportion of rows that are sampled at each iteration to build the tree. Should be in ]0, 1].

      • colsample=1.0: Proportion of columns / features that are sampled at each iteration to build the tree. Should be in ]0, 1].

      • nbins=64: Number of bins into which each feature is quantized. Buckets are defined based on quantiles, hence resulting in equal weight bins. Should be between 2 and 255.

      • monotone_constraints=Dict{Int, Int}(): Specify monotonic constraints using a dict where the key is the feature index and the value the applicable constraint (-1=decreasing, 0=none, 1=increasing). !Experimental feature: note that for MLE regression, constraints may not be enforced systematically.

      • tree_type="binary" Tree structure to be used. One of:

        • binary: Each node of a tree is grown independently. Tree are built depthwise until max depth is reach or if min weight or gain (see gamma) stops further node splits.
        • oblivious: A common splitting condition is imposed to all nodes of a given depth.
      • rng=123: Either an integer used as a seed to the random number generator or an actual random number generator (::Random.AbstractRNG).

      Internal API

      Do config = EvoTreeMLE() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in EvoTreeMLE(max_depth=...).

      Training model

      A model is built using fit_evotree:

      model = fit_evotree(config; x_train, y_train, kwargs...)

      Inference

      Predictions are obtained using predict which returns a Matrix of size [nobs, nparams] where the second dimensions refer to μ & σ for Normal/Gaussian and μ & s for Logistic.

      EvoTrees.predict(model, X)

      Alternatively, models act as a functor, returning predictions when called as a function with features as argument:

      model(X)

      MLJ

      From MLJ, the type can be imported using:

      EvoTreeMLE = @load EvoTreeMLE pkg=EvoTrees

      Do model = EvoTreeMLE() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in EvoTreeMLE(loss=...).

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, X, y)

      where

      • X: any table of input features (eg, a DataFrame) whose columns each have one of the following element scitypes: Continuous, Count, or <:OrderedFactor; check column scitypes with schema(X)
      • y: is the target, which can be any AbstractVector whose element scitype is <:Continuous; check the scitype with scitype(y)

      Train the machine using fit!(mach, rows=...).

      Operations

      • predict(mach, Xnew): returns a vector of Gaussian or Logistic distributions (according to provided loss) given features Xnew having the same scitype as X above.

      Predictions are probabilistic.

      Specific metrics can also be predicted using:

      • predict_mean(mach, Xnew)
      • predict_mode(mach, Xnew)
      • predict_median(mach, Xnew)

      Fitted parameters

      The fields of fitted_params(mach) are:

      • :fitresult: The GBTree object returned by EvoTrees.jl fitting algorithm.

      Report

      The fields of report(mach) are:

      • :features: The names of the features encountered in training.

      Examples

      ## Internal API
      +using EvoTrees
      +config = EvoTreeMLE(max_depth=5, nbins=32, nrounds=100)
      +nobs, nfeats = 1_000, 5
      +x_train, y_train = randn(nobs, nfeats), rand(nobs)
      +model = fit_evotree(config; x_train, y_train)
      +preds = EvoTrees.predict(model, x_train)
      ## MLJ Interface
      +using MLJ
      +EvoTreeMLE = @load EvoTreeMLE pkg=EvoTrees
      +model = EvoTreeMLE(max_depth=5, nbins=32, nrounds=100)
      +X, y = @load_boston
      +mach = machine(model, X, y) |> fit!
      +preds = predict(mach, X)
      +preds = predict_mean(mach, X)
      +preds = predict_mode(mach, X)
      +preds = predict_median(mach, X)
      diff --git a/v0.20.3/models/EvoTreeRegressor_EvoTrees/index.html b/v0.20.3/models/EvoTreeRegressor_EvoTrees/index.html new file mode 100644 index 000000000..404c0b8fa --- /dev/null +++ b/v0.20.3/models/EvoTreeRegressor_EvoTrees/index.html @@ -0,0 +1,14 @@ + +EvoTreeRegressor · MLJ

      EvoTreeRegressor

      EvoTreeRegressor(;kwargs...)

      A model type for constructing a EvoTreeRegressor, based on EvoTrees.jl, and implementing both an internal API and the MLJ model interface.

      Hyper-parameters

      • loss=:mse: Loss to be be minimized during training. One of:

        • :mse
        • :logloss
        • :gamma
        • :tweedie
        • :quantile
        • :l1
      • nrounds=100: Number of rounds. It corresponds to the number of trees that will be sequentially stacked. Must be >= 1.

      • eta=0.1: Learning rate. Each tree raw predictions are scaled by eta prior to be added to the stack of predictions. Must be > 0. A lower eta results in slower learning, requiring a higher nrounds but typically improves model performance.

      • L2::T=0.0: L2 regularization factor on aggregate gain. Must be >= 0. Higher L2 can result in a more robust model.

      • lambda::T=0.0: L2 regularization factor on individual gain. Must be >= 0. Higher lambda can result in a more robust model.

      • gamma::T=0.0: Minimum gain improvement needed to perform a node split. Higher gamma can result in a more robust model. Must be >= 0.

      • alpha::T=0.5: Loss specific parameter in the [0, 1] range: - :quantile: target quantile for the regression. - :l1: weighting parameters to positive vs negative residuals. - Positive residual weights = alpha - Negative residual weights = (1 - alpha)

      • max_depth=6: Maximum depth of a tree. Must be >= 1. A tree of depth 1 is made of a single prediction leaf. A complete tree of depth N contains 2^(N - 1) terminal leaves and 2^(N - 1) - 1 split nodes. Compute cost is proportional to 2^max_depth. Typical optimal values are in the 3 to 9 range.

      • min_weight=1.0: Minimum weight needed in a node to perform a split. Matches the number of observations by default or the sum of weights as provided by the weights vector. Must be > 0.

      • rowsample=1.0: Proportion of rows that are sampled at each iteration to build the tree. Should be in ]0, 1].

      • colsample=1.0: Proportion of columns / features that are sampled at each iteration to build the tree. Should be in ]0, 1].

      • nbins=64: Number of bins into which each feature is quantized. Buckets are defined based on quantiles, hence resulting in equal weight bins. Should be between 2 and 255.

      • monotone_constraints=Dict{Int, Int}(): Specify monotonic constraints using a dict where the key is the feature index and the value the applicable constraint (-1=decreasing, 0=none, 1=increasing). Only :linear, :logistic, :gamma and tweedie losses are supported at the moment.

      • tree_type="binary" Tree structure to be used. One of:

        • binary: Each node of a tree is grown independently. Tree are built depthwise until max depth is reach or if min weight or gain (see gamma) stops further node splits.
        • oblivious: A common splitting condition is imposed to all nodes of a given depth.
      • rng=123: Either an integer used as a seed to the random number generator or an actual random number generator (::Random.AbstractRNG).

      Internal API

      Do config = EvoTreeRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in EvoTreeRegressor(loss=...).

      Training model

      A model is built using fit_evotree:

      model = fit_evotree(config; x_train, y_train, kwargs...)

      Inference

      Predictions are obtained using predict which returns a Vector of length nobs:

      EvoTrees.predict(model, X)

      Alternatively, models act as a functor, returning predictions when called as a function with features as argument:

      model(X)

      MLJ Interface

      From MLJ, the type can be imported using:

      EvoTreeRegressor = @load EvoTreeRegressor pkg=EvoTrees

      Do model = EvoTreeRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in EvoTreeRegressor(loss=...).

      Training model

      In MLJ or MLJBase, bind an instance model to data with mach = machine(model, X, y) where

      • X: any table of input features (eg, a DataFrame) whose columns each have one of the following element scitypes: Continuous, Count, or <:OrderedFactor; check column scitypes with schema(X)
      • y: is the target, which can be any AbstractVector whose element scitype is <:Continuous; check the scitype with scitype(y)

      Train the machine using fit!(mach, rows=...).

      Operations

      • predict(mach, Xnew): return predictions of the target given features Xnew having the same scitype as X above. Predictions are deterministic.

      Fitted parameters

      The fields of fitted_params(mach) are:

      • :fitresult: The GBTree object returned by EvoTrees.jl fitting algorithm.

      Report

      The fields of report(mach) are:

      • :features: The names of the features encountered in training.

      Examples

      ## Internal API
      +using EvoTrees
      +config = EvoTreeRegressor(max_depth=5, nbins=32, nrounds=100)
      +nobs, nfeats = 1_000, 5
      +x_train, y_train = randn(nobs, nfeats), rand(nobs)
      +model = fit_evotree(config; x_train, y_train)
      +preds = EvoTrees.predict(model, x_train)
      ## MLJ Interface
      +using MLJ
      +EvoTreeRegressor = @load EvoTreeRegressor pkg=EvoTrees
      +model = EvoTreeRegressor(max_depth=5, nbins=32, nrounds=100)
      +X, y = @load_boston
      +mach = machine(model, X, y) |> fit!
      +preds = predict(mach, X)
      diff --git a/v0.20.3/models/ExtraTreesClassifier_MLJScikitLearnInterface/index.html b/v0.20.3/models/ExtraTreesClassifier_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..b096fd629 --- /dev/null +++ b/v0.20.3/models/ExtraTreesClassifier_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +ExtraTreesClassifier · MLJ

      ExtraTreesClassifier

      ExtraTreesClassifier

      A model type for constructing a extra trees classifier, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      ExtraTreesClassifier = @load ExtraTreesClassifier pkg=MLJScikitLearnInterface

      Do model = ExtraTreesClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in ExtraTreesClassifier(n_estimators=...).

      Extra trees classifier, fits a number of randomized decision trees on various sub-samples of the dataset and uses averaging to improve the predictive accuracy and control over-fitting.

      diff --git a/v0.20.3/models/ExtraTreesRegressor_MLJScikitLearnInterface/index.html b/v0.20.3/models/ExtraTreesRegressor_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..06ef6a87a --- /dev/null +++ b/v0.20.3/models/ExtraTreesRegressor_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +ExtraTreesRegressor · MLJ

      ExtraTreesRegressor

      ExtraTreesRegressor

      A model type for constructing a extra trees regressor, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      ExtraTreesRegressor = @load ExtraTreesRegressor pkg=MLJScikitLearnInterface

      Do model = ExtraTreesRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in ExtraTreesRegressor(n_estimators=...).

      Extra trees regressor, fits a number of randomized decision trees on various sub-samples of the dataset and uses averaging to improve the predictive accuracy and control over-fitting.

      diff --git a/v0.20.3/models/FactorAnalysis_MultivariateStats/index.html b/v0.20.3/models/FactorAnalysis_MultivariateStats/index.html new file mode 100644 index 000000000..58f422e96 --- /dev/null +++ b/v0.20.3/models/FactorAnalysis_MultivariateStats/index.html @@ -0,0 +1,11 @@ + +FactorAnalysis · MLJ

      FactorAnalysis

      FactorAnalysis

      A model type for constructing a factor analysis model, based on MultivariateStats.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      FactorAnalysis = @load FactorAnalysis pkg=MultivariateStats

      Do model = FactorAnalysis() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in FactorAnalysis(method=...).

      Factor analysis is a linear-Gaussian latent variable model that is closely related to probabilistic PCA. In contrast to the probabilistic PCA model, the covariance of conditional distribution of the observed variable given the latent variable is diagonal rather than isotropic.

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, X)

      Here:

      • X is any table of input features (eg, a DataFrame) whose columns are of scitype Continuous; check column scitypes with schema(X).

      Train the machine using fit!(mach, rows=...).

      Hyper-parameters

      • method::Symbol=:cm: Method to use to solve the problem, one of :ml, :em, :bayes.
      • maxoutdim=0: Controls the the dimension (number of columns) of the output, outdim. Specifically, outdim = min(n, indim, maxoutdim), where n is the number of observations and indim the input dimension.
      • maxiter::Int=1000: Maximum number of iterations.
      • tol::Real=1e-6: Convergence tolerance.
      • eta::Real=tol: Variance lower bound.
      • mean::Union{Nothing, Real, Vector{Float64}}=nothing: If nothing, centering will be computed and applied; if set to 0 no centering is applied (data is assumed pre-centered); if a vector, the centering is done with that vector.

      Operations

      • transform(mach, Xnew): Return a lower dimensional projection of the input Xnew, which should have the same scitype as X above.
      • inverse_transform(mach, Xsmall): For a dimension-reduced table Xsmall, such as returned by transform, reconstruct a table, having same the number of columns as the original training data X, that transforms to Xsmall. Mathematically, inverse_transform is a right-inverse for the PCA projection map, whose image is orthogonal to the kernel of that map. In particular, if Xsmall = transform(mach, Xnew), then inverse_transform(Xsmall) is only an approximation to Xnew.

      Fitted parameters

      The fields of fitted_params(mach) are:

      • projection: Returns the projection matrix, which has size (indim, outdim), where indim and outdim are the number of features of the input and ouput respectively. Each column of the projection matrix corresponds to a factor.

      Report

      The fields of report(mach) are:

      • indim: Dimension (number of columns) of the training data and new data to be transformed.
      • outdim: Dimension of transformed data (number of factors).
      • variance: The variance of the factors.
      • covariance_matrix: The estimated covariance matrix.
      • mean: The mean of the untransformed training data, of length indim.
      • loadings: The factor loadings. A matrix of size (indim, outdim) where indim and outdim are as defined above.

      Examples

      using MLJ
      +
      +FactorAnalysis = @load FactorAnalysis pkg=MultivariateStats
      +
      +X, y = @load_iris ## a table and a vector
      +
      +model = FactorAnalysis(maxoutdim=2)
      +mach = machine(model, X) |> fit!
      +
      +Xproj = transform(mach, X)

      See also KernelPCA, ICA, PPCA, PCA

      diff --git a/v0.20.3/models/FeatureAgglomeration_MLJScikitLearnInterface/index.html b/v0.20.3/models/FeatureAgglomeration_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..b1fdf7a6d --- /dev/null +++ b/v0.20.3/models/FeatureAgglomeration_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +FeatureAgglomeration · MLJ

      FeatureAgglomeration

      FeatureAgglomeration

      A model type for constructing a feature agglomeration, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      FeatureAgglomeration = @load FeatureAgglomeration pkg=MLJScikitLearnInterface

      Do model = FeatureAgglomeration() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in FeatureAgglomeration(n_clusters=...).

      Similar to AgglomerativeClustering, but recursively merges features instead of samples."

      diff --git a/v0.20.3/models/FeatureSelector_MLJModels/index.html b/v0.20.3/models/FeatureSelector_MLJModels/index.html new file mode 100644 index 000000000..c6cb81f61 --- /dev/null +++ b/v0.20.3/models/FeatureSelector_MLJModels/index.html @@ -0,0 +1,17 @@ + +FeatureSelector · MLJ

      FeatureSelector

      FeatureSelector

      A model type for constructing a feature selector, based on MLJModels.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      FeatureSelector = @load FeatureSelector pkg=MLJModels

      Do model = FeatureSelector() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in FeatureSelector(features=...).

      Use this model to select features (columns) of a table, usually as part of a model Pipeline.

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, X)

      where

      • X: any table of input features, where "table" is in the sense of Tables.jl

      Train the machine using fit!(mach, rows=...).

      Hyper-parameters

      • features: one of the following, with the behavior indicated:

        • [] (empty, the default): filter out all features (columns) which were not encountered in training
        • non-empty vector of feature names (symbols): keep only the specified features (ignore=false) or keep only unspecified features (ignore=true)
        • function or other callable: keep a feature if the callable returns true on its name. For example, specifying FeatureSelector(features = name -> name in [:x1, :x3], ignore = true) has the same effect as FeatureSelector(features = [:x1, :x3], ignore = true), namely to select all features, with the exception of :x1 and :x3.
      • ignore: whether to ignore or keep specified features, as explained above

      Operations

      • transform(mach, Xnew): select features from the table Xnew as specified by the model, taking features seen during training into account, if relevant

      Fitted parameters

      The fields of fitted_params(mach) are:

      • features_to_keep: the features that will be selected

      Example

      using MLJ
      +
      +X = (ordinal1 = [1, 2, 3],
      +     ordinal2 = coerce(["x", "y", "x"], OrderedFactor),
      +     ordinal3 = [10.0, 20.0, 30.0],
      +     ordinal4 = [-20.0, -30.0, -40.0],
      +     nominal = coerce(["Your father", "he", "is"], Multiclass));
      +
      +selector = FeatureSelector(features=[:ordinal3, ], ignore=true);
      +
      +julia> transform(fit!(machine(selector, X)), X)
      +(ordinal1 = [1, 2, 3],
      + ordinal2 = CategoricalValue{Symbol,UInt32}["x", "y", "x"],
      + ordinal4 = [-20.0, -30.0, -40.0],
      + nominal = CategoricalValue{String,UInt32}["Your father", "he", "is"],)
      +
      diff --git a/v0.20.3/models/FillImputer_MLJModels/index.html b/v0.20.3/models/FillImputer_MLJModels/index.html new file mode 100644 index 000000000..0f6fe4bcf --- /dev/null +++ b/v0.20.3/models/FillImputer_MLJModels/index.html @@ -0,0 +1,34 @@ + +FillImputer · MLJ

      FillImputer

      FillImputer

      A model type for constructing a fill imputer, based on MLJModels.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      FillImputer = @load FillImputer pkg=MLJModels

      Do model = FillImputer() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in FillImputer(features=...).

      Use this model to impute missing values in tabular data. A fixed "filler" value is learned from the training data, one for each column of the table.

      For imputing missing values in a vector, use UnivariateFillImputer instead.

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, X)

      where

      • X: any table of input features (eg, a DataFrame) whose columns each have element scitypes Union{Missing, T}, where T is a subtype of Continuous, Multiclass, OrderedFactor or Count. Check scitypes with schema(X).

      Train the machine using fit!(mach, rows=...).

      Hyper-parameters

      • features: a vector of names of features (symbols) for which imputation is to be attempted; default is empty, which is interpreted as "impute all".
      • continuous_fill: function or other callable to determine value to be imputed in the case of Continuous (abstract float) data; default is to apply median after skipping missing values
      • count_fill: function or other callable to determine value to be imputed in the case of Count (integer) data; default is to apply rounded median after skipping missing values
      • finite_fill: function or other callable to determine value to be imputed in the case of Multiclass or OrderedFactor data (categorical vectors); default is to apply mode after skipping missing values

      Operations

      • transform(mach, Xnew): return Xnew with missing values imputed with the fill values learned when fitting mach

      Fitted parameters

      The fields of fitted_params(mach) are:

      • features_seen_in_fit: the names of features (columns) encountered during training
      • univariate_transformer: the univariate model applied to determine the fillers (it's fields contain the functions defining the filler computations)
      • filler_given_feature: dictionary of filler values, keyed on feature (column) names

      Examples

      using MLJ
      +imputer = FillImputer()
      +
      +X = (a = [1.0, 2.0, missing, 3.0, missing],
      +     b = coerce(["y", "n", "y", missing, "y"], Multiclass),
      +     c = [1, 1, 2, missing, 3])
      +
      +schema(X)
      +julia> schema(X)
      +┌───────┬───────────────────────────────┐
      +│ names │ scitypes                      │
      +├───────┼───────────────────────────────┤
      +│ a     │ Union{Missing, Continuous}    │
      +│ b     │ Union{Missing, Multiclass{2}} │
      +│ c     │ Union{Missing, Count}         │
      +└───────┴───────────────────────────────┘
      +
      +mach = machine(imputer, X)
      +fit!(mach)
      +
      +julia> fitted_params(mach).filler_given_feature
      +(filler = 2.0,)
      +
      +julia> fitted_params(mach).filler_given_feature
      +Dict{Symbol, Any} with 3 entries:
      +  :a => 2.0
      +  :b => "y"
      +  :c => 2
      +
      +julia> transform(mach, X)
      +(a = [1.0, 2.0, 2.0, 3.0, 2.0],
      + b = CategoricalValue{String, UInt32}["y", "n", "y", "y", "y"],
      + c = [1, 1, 2, 2, 3],)

      See also UnivariateFillImputer.

      diff --git a/v0.20.3/models/GMMDetector_OutlierDetectionPython/index.html b/v0.20.3/models/GMMDetector_OutlierDetectionPython/index.html new file mode 100644 index 000000000..6a4cc7ccf --- /dev/null +++ b/v0.20.3/models/GMMDetector_OutlierDetectionPython/index.html @@ -0,0 +1,13 @@ + +GMMDetector · MLJ diff --git a/v0.20.3/models/GaussianMixtureClusterer_BetaML/index.html b/v0.20.3/models/GaussianMixtureClusterer_BetaML/index.html new file mode 100644 index 000000000..b14db586c --- /dev/null +++ b/v0.20.3/models/GaussianMixtureClusterer_BetaML/index.html @@ -0,0 +1,37 @@ + +GaussianMixtureClusterer · MLJ

      GaussianMixtureClusterer

      mutable struct GaussianMixtureClusterer <: MLJModelInterface.Unsupervised

      A Expectation-Maximisation clustering algorithm with customisable mixtures, from the Beta Machine Learning Toolkit (BetaML).

      Hyperparameters:

      • n_classes::Int64: Number of mixtures (latent classes) to consider [def: 3]

      • initial_probmixtures::AbstractVector{Float64}: Initial probabilities of the categorical distribution (n_classes x 1) [default: []]

      • mixtures::Union{Type, Vector{<:BetaML.GMM.AbstractMixture}}: An array (of length n_classes) of the mixtures to employ (see the ?GMM module). Each mixture object can be provided with or without its parameters (e.g. mean and variance for the gaussian ones). Fully qualified mixtures are useful only if the initialisation_strategy parameter is set to "gived". This parameter can also be given symply in term of a type. In this case it is automatically extended to a vector of n_classes mixtures of the specified type. Note that mixing of different mixture types is not currently supported. [def: [DiagonalGaussian() for i in 1:n_classes]]

      • tol::Float64: Tolerance to stop the algorithm [default: 10^(-6)]

      • minimum_variance::Float64: Minimum variance for the mixtures [default: 0.05]

      • minimum_covariance::Float64: Minimum covariance for the mixtures with full covariance matrix [default: 0]. This should be set different than minimum_variance (see notes).

      • initialisation_strategy::String: The computation method of the vector of the initial mixtures. One of the following:

        • "grid": using a grid approach
        • "given": using the mixture provided in the fully qualified mixtures parameter
        • "kmeans": use first kmeans (itself initialised with a "grid" strategy) to set the initial mixture centers [default]

        Note that currently "random" and "shuffle" initialisations are not supported in gmm-based algorithms.

      • maximum_iterations::Int64: Maximum number of iterations [def: typemax(Int64), i.e. ∞]

      • rng::Random.AbstractRNG: Random Number Generator [deafult: Random.GLOBAL_RNG]

      Example:

      
      +julia> using MLJ
      +
      +julia> X, y        = @load_iris;
      +
      +julia> modelType   = @load GaussianMixtureClusterer pkg = "BetaML" verbosity=0
      +BetaML.GMM.GaussianMixtureClusterer
      +
      +julia> model       = modelType()
      +GaussianMixtureClusterer(
      +  n_classes = 3, 
      +  initial_probmixtures = Float64[], 
      +  mixtures = BetaML.GMM.DiagonalGaussian{Float64}[BetaML.GMM.DiagonalGaussian{Float64}(nothing, nothing), BetaML.GMM.DiagonalGaussian{Float64}(nothing, nothing), BetaML.GMM.DiagonalGaussian{Float64}(nothing, nothing)], 
      +  tol = 1.0e-6, 
      +  minimum_variance = 0.05, 
      +  minimum_covariance = 0.0, 
      +  initialisation_strategy = "kmeans", 
      +  maximum_iterations = 9223372036854775807, 
      +  rng = Random._GLOBAL_RNG())
      +
      +julia> mach        = machine(model, X);
      +
      +julia> fit!(mach);
      +[ Info: Training machine(GaussianMixtureClusterer(n_classes = 3, …), …).
      +Iter. 1:        Var. of the post  10.800150114964184      Log-likelihood -650.0186451891216
      +
      +julia> classes_est = predict(mach, X)
      +150-element CategoricalDistributions.UnivariateFiniteVector{Multiclass{3}, Int64, UInt32, Float64}:
      + UnivariateFinite{Multiclass{3}}(1=>1.0, 2=>4.17e-15, 3=>2.1900000000000003e-31)
      + UnivariateFinite{Multiclass{3}}(1=>1.0, 2=>1.25e-13, 3=>5.87e-31)
      + UnivariateFinite{Multiclass{3}}(1=>1.0, 2=>4.5e-15, 3=>1.55e-32)
      + UnivariateFinite{Multiclass{3}}(1=>1.0, 2=>6.93e-14, 3=>3.37e-31)
      + ⋮
      + UnivariateFinite{Multiclass{3}}(1=>5.39e-25, 2=>0.0167, 3=>0.983)
      + UnivariateFinite{Multiclass{3}}(1=>7.5e-29, 2=>0.000106, 3=>1.0)
      + UnivariateFinite{Multiclass{3}}(1=>1.6e-20, 2=>0.594, 3=>0.406)
      diff --git a/v0.20.3/models/GaussianMixtureImputer_BetaML/index.html b/v0.20.3/models/GaussianMixtureImputer_BetaML/index.html new file mode 100644 index 000000000..7a76186bf --- /dev/null +++ b/v0.20.3/models/GaussianMixtureImputer_BetaML/index.html @@ -0,0 +1,36 @@ + +GaussianMixtureImputer · MLJ

      GaussianMixtureImputer

      mutable struct GaussianMixtureImputer <: MLJModelInterface.Unsupervised

      Impute missing values using a probabilistic approach (Gaussian Mixture Models) fitted using the Expectation-Maximisation algorithm, from the Beta Machine Learning Toolkit (BetaML).

      Hyperparameters:

      • n_classes::Int64: Number of mixtures (latent classes) to consider [def: 3]

      • initial_probmixtures::Vector{Float64}: Initial probabilities of the categorical distribution (n_classes x 1) [default: []]

      • mixtures::Union{Type, Vector{<:BetaML.GMM.AbstractMixture}}: An array (of length n_classes) of the mixtures to employ (see the [?GMM](@ref GMM) module in BetaML). Each mixture object can be provided with or without its parameters (e.g. mean and variance for the gaussian ones). Fully qualified mixtures are useful only if theinitialisationstrategyparameter is set to "gived" This parameter can also be given symply in term of a _type. In this case it is automatically extended to a vector of n_classesmixtures of the specified type. Note that mixing of different mixture types is not currently supported and that currently implemented mixtures areSphericalGaussian,DiagonalGaussianandFullGaussian. [def:DiagonalGaussian`]

      • tol::Float64: Tolerance to stop the algorithm [default: 10^(-6)]

      • minimum_variance::Float64: Minimum variance for the mixtures [default: 0.05]

      • minimum_covariance::Float64: Minimum covariance for the mixtures with full covariance matrix [default: 0]. This should be set different than minimum_variance.

      • initialisation_strategy::String: The computation method of the vector of the initial mixtures. One of the following:

        • "grid": using a grid approach
        • "given": using the mixture provided in the fully qualified mixtures parameter
        • "kmeans": use first kmeans (itself initialised with a "grid" strategy) to set the initial mixture centers [default]

        Note that currently "random" and "shuffle" initialisations are not supported in gmm-based algorithms.

      • rng::Random.AbstractRNG: A Random Number Generator to be used in stochastic parts of the code [deafult: Random.GLOBAL_RNG]

      Example :

      julia> using MLJ
      +
      +julia> X = [1 10.5;1.5 missing; 1.8 8; 1.7 15; 3.2 40; missing missing; 3.3 38; missing -2.3; 5.2 -2.4] |> table ;
      +
      +julia> modelType   = @load GaussianMixtureImputer  pkg = "BetaML" verbosity=0
      +BetaML.Imputation.GaussianMixtureImputer
      +
      +julia> model     = modelType(initialisation_strategy="grid")
      +GaussianMixtureImputer(
      +  n_classes = 3, 
      +  initial_probmixtures = Float64[], 
      +  mixtures = BetaML.GMM.DiagonalGaussian{Float64}[BetaML.GMM.DiagonalGaussian{Float64}(nothing, nothing), BetaML.GMM.DiagonalGaussian{Float64}(nothing, nothing), BetaML.GMM.DiagonalGaussian{Float64}(nothing, nothing)], 
      +  tol = 1.0e-6, 
      +  minimum_variance = 0.05, 
      +  minimum_covariance = 0.0, 
      +  initialisation_strategy = "grid", 
      +  rng = Random._GLOBAL_RNG())
      +
      +julia> mach      = machine(model, X);
      +
      +julia> fit!(mach);
      +[ Info: Training machine(GaussianMixtureImputer(n_classes = 3, …), …).
      +Iter. 1:        Var. of the post  2.0225921341714286      Log-likelihood -42.96100103213314
      +
      +julia> X_full       = transform(mach) |> MLJ.matrix
      +9×2 Matrix{Float64}:
      + 1.0      10.5
      + 1.5      14.7366
      + 1.8       8.0
      + 1.7      15.0
      + 3.2      40.0
      + 2.51842  15.1747
      + 3.3      38.0
      + 2.47412  -2.3
      + 5.2      -2.4
      diff --git a/v0.20.3/models/GaussianMixtureRegressor_BetaML/index.html b/v0.20.3/models/GaussianMixtureRegressor_BetaML/index.html new file mode 100644 index 000000000..c2038fc6b --- /dev/null +++ b/v0.20.3/models/GaussianMixtureRegressor_BetaML/index.html @@ -0,0 +1,33 @@ + +GaussianMixtureRegressor · MLJ

      GaussianMixtureRegressor

      mutable struct GaussianMixtureRegressor <: MLJModelInterface.Deterministic

      A non-linear regressor derived from fitting the data on a probabilistic model (Gaussian Mixture Model). Relatively fast but generally not very precise, except for data with a structure matching the chosen underlying mixture.

      This is the single-target version of the model. If you want to predict several labels (y) at once, use the MLJ model MultitargetGaussianMixtureRegressor.

      Hyperparameters:

      • n_classes::Int64: Number of mixtures (latent classes) to consider [def: 3]

      • initial_probmixtures::Vector{Float64}: Initial probabilities of the categorical distribution (n_classes x 1) [default: []]

      • mixtures::Union{Type, Vector{<:BetaML.GMM.AbstractMixture}}: An array (of length n_classes) of the mixtures to employ (see the [?GMM](@ref GMM) module). Each mixture object can be provided with or without its parameters (e.g. mean and variance for the gaussian ones). Fully qualified mixtures are useful only if theinitialisationstrategyparameter is set to "gived" This parameter can also be given symply in term of a _type. In this case it is automatically extended to a vector of n_classesmixtures of the specified type. Note that mixing of different mixture types is not currently supported. [def:[DiagonalGaussian() for i in 1:n_classes]`]

      • tol::Float64: Tolerance to stop the algorithm [default: 10^(-6)]

      • minimum_variance::Float64: Minimum variance for the mixtures [default: 0.05]

      • minimum_covariance::Float64: Minimum covariance for the mixtures with full covariance matrix [default: 0]. This should be set different than minimum_variance (see notes).

      • initialisation_strategy::String: The computation method of the vector of the initial mixtures. One of the following:

        • "grid": using a grid approach
        • "given": using the mixture provided in the fully qualified mixtures parameter
        • "kmeans": use first kmeans (itself initialised with a "grid" strategy) to set the initial mixture centers [default]

        Note that currently "random" and "shuffle" initialisations are not supported in gmm-based algorithms.

      • maximum_iterations::Int64: Maximum number of iterations [def: typemax(Int64), i.e. ∞]

      • rng::Random.AbstractRNG: Random Number Generator [deafult: Random.GLOBAL_RNG]

      Example:

      julia> using MLJ
      +
      +julia> X, y      = @load_boston;
      +
      +julia> modelType = @load GaussianMixtureRegressor pkg = "BetaML" verbosity=0
      +BetaML.GMM.GaussianMixtureRegressor
      +
      +julia> model     = modelType()
      +GaussianMixtureRegressor(
      +  n_classes = 3, 
      +  initial_probmixtures = Float64[], 
      +  mixtures = BetaML.GMM.DiagonalGaussian{Float64}[BetaML.GMM.DiagonalGaussian{Float64}(nothing, nothing), BetaML.GMM.DiagonalGaussian{Float64}(nothing, nothing), BetaML.GMM.DiagonalGaussian{Float64}(nothing, nothing)], 
      +  tol = 1.0e-6, 
      +  minimum_variance = 0.05, 
      +  minimum_covariance = 0.0, 
      +  initialisation_strategy = "kmeans", 
      +  maximum_iterations = 9223372036854775807, 
      +  rng = Random._GLOBAL_RNG())
      +
      +julia> mach      = machine(model, X, y);
      +
      +julia> fit!(mach);
      +[ Info: Training machine(GaussianMixtureRegressor(n_classes = 3, …), …).
      +Iter. 1:        Var. of the post  21.74887448784976       Log-likelihood -21687.09917379566
      +
      +julia> ŷ         = predict(mach, X)
      +506-element Vector{Float64}:
      + 24.703442835305577
      + 24.70344283512716
      +  ⋮
      + 17.172486989759676
      + 17.172486989759644
      diff --git a/v0.20.3/models/GaussianNBClassifier_MLJScikitLearnInterface/index.html b/v0.20.3/models/GaussianNBClassifier_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..79d72d710 --- /dev/null +++ b/v0.20.3/models/GaussianNBClassifier_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +GaussianNBClassifier · MLJ

      GaussianNBClassifier

      GaussianNBClassifier

      A model type for constructing a Gaussian naive Bayes classifier, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      GaussianNBClassifier = @load GaussianNBClassifier pkg=MLJScikitLearnInterface

      Do model = GaussianNBClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in GaussianNBClassifier(priors=...).

      Hyper-parameters

      • priors = nothing
      • var_smoothing = 1.0e-9
      diff --git a/v0.20.3/models/GaussianNBClassifier_NaiveBayes/index.html b/v0.20.3/models/GaussianNBClassifier_NaiveBayes/index.html new file mode 100644 index 000000000..2b610ac1d --- /dev/null +++ b/v0.20.3/models/GaussianNBClassifier_NaiveBayes/index.html @@ -0,0 +1,13 @@ + +GaussianNBClassifier · MLJ

      GaussianNBClassifier

      GaussianNBClassifier

      A model type for constructing a Gaussian naive Bayes classifier, based on NaiveBayes.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      GaussianNBClassifier = @load GaussianNBClassifier pkg=NaiveBayes

      Do model = GaussianNBClassifier() to construct an instance with default hyper-parameters.

      Given each class taken on by the target variable y, it is supposed that the conditional probability distribution for the input variables X is a multivariate Gaussian. The mean and covariance of these Gaussian distributions are estimated using maximum likelihood, and a probability distribution for y given X is deduced by applying Bayes' rule. The required marginal for y is estimated using class frequency in the training data.

      Important. The name "naive Bayes classifier" is perhaps misleading. Since we are learning the full multivariate Gaussian distributions for X given y, we are not applying the usual naive Bayes independence condition, which would amount to forcing the covariance matrix to be diagonal.

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, X, y)

      Here:

      • X is any table of input features (eg, a DataFrame) whose columns are of scitype Continuous; check the column scitypes with schema(X)
      • y is the target, which can be any AbstractVector whose element scitype is Finite; check the scitype with schema(y)

      Train the machine using fit!(mach, rows=...).

      Operations

      • predict(mach, Xnew): return predictions of the target given new features Xnew, which should have the same scitype as X above. Predictions are probabilistic.
      • predict_mode(mach, Xnew): Return the mode of above predictions.

      Fitted parameters

      The fields of fitted_params(mach) are:

      • c_counts: A dictionary containing the observed count of each input class.

      • c_stats: A dictionary containing observed statistics on each input class. Each class is represented by a DataStats object, with the following fields:

        • n_vars: The number of variables used to describe the class's behavior.
        • n_obs: The number of times the class is observed.
        • obs_axis: The axis along which the observations were computed.
      • gaussians: A per class dictionary of Gaussians, each representing the distribution of the class. Represented with type Distributions.MvNormal from the Distributions.jl package.

      • n_obs: The total number of observations in the training data.

      Examples

      using MLJ
      +GaussianNB = @load GaussianNBClassifier pkg=NaiveBayes
      +
      +X, y = @load_iris
      +clf = GaussianNB()
      +mach = machine(clf, X, y) |> fit!
      +
      +fitted_params(mach)
      +
      +preds = predict(mach, X) ## probabilistic predictions
      +preds[1]
      +predict_mode(mach, X) ## point predictions

      See also MultinomialNBClassifier

      diff --git a/v0.20.3/models/GaussianProcessClassifier_MLJScikitLearnInterface/index.html b/v0.20.3/models/GaussianProcessClassifier_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..1c72f91ba --- /dev/null +++ b/v0.20.3/models/GaussianProcessClassifier_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +GaussianProcessClassifier · MLJ

      GaussianProcessClassifier

      GaussianProcessClassifier

      A model type for constructing a Gaussian process classifier, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      GaussianProcessClassifier = @load GaussianProcessClassifier pkg=MLJScikitLearnInterface

      Do model = GaussianProcessClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in GaussianProcessClassifier(kernel=...).

      Hyper-parameters

      • kernel = nothing
      • optimizer = fmin_l_bfgs_b
      • n_restarts_optimizer = 0
      • copy_X_train = true
      • random_state = nothing
      • max_iter_predict = 100
      • warm_start = false
      • multi_class = one_vs_rest
      diff --git a/v0.20.3/models/GaussianProcessRegressor_MLJScikitLearnInterface/index.html b/v0.20.3/models/GaussianProcessRegressor_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..5dc302984 --- /dev/null +++ b/v0.20.3/models/GaussianProcessRegressor_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +GaussianProcessRegressor · MLJ

      GaussianProcessRegressor

      GaussianProcessRegressor

      A model type for constructing a Gaussian process regressor, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      GaussianProcessRegressor = @load GaussianProcessRegressor pkg=MLJScikitLearnInterface

      Do model = GaussianProcessRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in GaussianProcessRegressor(kernel=...).

      Hyper-parameters

      • kernel = nothing
      • alpha = 1.0e-10
      • optimizer = fmin_l_bfgs_b
      • n_restarts_optimizer = 0
      • normalize_y = false
      • copy_X_train = true
      • random_state = nothing
      diff --git a/v0.20.3/models/GeneralImputer_BetaML/index.html b/v0.20.3/models/GeneralImputer_BetaML/index.html new file mode 100644 index 000000000..ae2fff776 --- /dev/null +++ b/v0.20.3/models/GeneralImputer_BetaML/index.html @@ -0,0 +1,60 @@ + +GeneralImputer · MLJ

      GeneralImputer

      mutable struct GeneralImputer <: MLJModelInterface.Unsupervised

      Impute missing values using arbitrary learning models, from the Beta Machine Learning Toolkit (BetaML).

      Impute missing values using a vector (one per column) of arbitrary learning models (classifiers/regressors, not necessarily from BetaML) that implement the interface m = Model([options]), train!(m,X,Y) and predict(m,X).

      Hyperparameters:

      • cols_to_impute::Union{String, Vector{Int64}}: Columns in the matrix for which to create an imputation model, i.e. to impute. It can be a vector of columns IDs (positions), or the keywords "auto" (default) or "all". With "auto" the model automatically detects the columns with missing data and impute only them. You may manually specify the columns or use "all" if you want to create a imputation model for that columns during training even if all training data are non-missing to apply then the training model to further data with possibly missing values.
      • estimator::Any: An entimator model (regressor or classifier), with eventually its options (hyper-parameters), to be used to impute the various columns of the matrix. It can also be a cols_to_impute-length vector of different estimators to consider a different estimator for each column (dimension) to impute, for example when some columns are categorical (and will hence require a classifier) and some others are numerical (hence requiring a regressor). [default: nothing, i.e. use BetaML random forests, handling classification and regression jobs automatically].
      • missing_supported::Union{Bool, Vector{Bool}}: Wheter the estimator(s) used to predict the missing data support itself missing data in the training features (X). If not, when the model for a certain dimension is fitted, dimensions with missing data in the same rows of those where imputation is needed are dropped and then only non-missing rows in the other remaining dimensions are considered. It can be a vector of boolean values to specify this property for each individual estimator or a single booleann value to apply to all the estimators [default: false]
      • fit_function::Union{Function, Vector{Function}}: The function used by the estimator(s) to fit the model. It should take as fist argument the model itself, as second argument a matrix representing the features, and as third argument a vector representing the labels. This parameter is mandatory for non-BetaML estimators and can be a single value or a vector (one per estimator) in case of different estimator packages used. [default: BetaML.fit!]
      • predict_function::Union{Function, Vector{Function}}: The function used by the estimator(s) to predict the labels. It should take as fist argument the model itself and as second argument a matrix representing the features. This parameter is mandatory for non-BetaML estimators and can be a single value or a vector (one per estimator) in case of different estimator packages used. [default: BetaML.predict]
      • recursive_passages::Int64: Define the number of times to go trough the various columns to impute their data. Useful when there are data to impute on multiple columns. The order of the first passage is given by the decreasing number of missing values per column, the other passages are random [default: 1].
      • rng::Random.AbstractRNG: A Random Number Generator to be used in stochastic parts of the code [deafult: Random.GLOBAL_RNG]. Note that this influence only the specific GeneralImputer code, the individual estimators may have their own rng (or similar) parameter.

      Examples :

      • Using BetaML models:
      julia> using MLJ;
      +julia> import BetaML ## The library from which to get the individual estimators to be used for each column imputation
      +julia> X = ["a"         8.2;
      +            "a"     missing;
      +            "a"         7.8;
      +            "b"          21;
      +            "b"          18;
      +            "c"        -0.9;
      +            missing      20;
      +            "c"        -1.8;
      +            missing    -2.3;
      +            "c"        -2.4] |> table ;
      +julia> modelType = @load GeneralImputer  pkg = "BetaML" verbosity=0
      +BetaML.Imputation.GeneralImputer
      +julia> model     = modelType(estimator=BetaML.DecisionTreeEstimator(),recursive_passages=2);
      +julia> mach      = machine(model, X);
      +julia> fit!(mach);
      +[ Info: Training machine(GeneralImputer(cols_to_impute = auto, …), …).
      +julia> X_full       = transform(mach) |> MLJ.matrix
      +10×2 Matrix{Any}:
      + "a"   8.2
      + "a"   8.0
      + "a"   7.8
      + "b"  21
      + "b"  18
      + "c"  -0.9
      + "b"  20
      + "c"  -1.8
      + "c"  -2.3
      + "c"  -2.4
      • Using third party packages (in this example DecisionTree):
      julia> using MLJ;
      +julia> import DecisionTree ## An example of external estimators to be used for each column imputation
      +julia> X = ["a"         8.2;
      +            "a"     missing;
      +            "a"         7.8;
      +            "b"          21;
      +            "b"          18;
      +            "c"        -0.9;
      +            missing      20;
      +            "c"        -1.8;
      +            missing    -2.3;
      +            "c"        -2.4] |> table ;
      +julia> modelType   = @load GeneralImputer  pkg = "BetaML" verbosity=0
      +BetaML.Imputation.GeneralImputer
      +julia> model     = modelType(estimator=[DecisionTree.DecisionTreeClassifier(),DecisionTree.DecisionTreeRegressor()], fit_function=DecisionTree.fit!,predict_function=DecisionTree.predict,recursive_passages=2);
      +julia> mach      = machine(model, X);
      +julia> fit!(mach);
      +[ Info: Training machine(GeneralImputer(cols_to_impute = auto, …), …).
      +julia> X_full       = transform(mach) |> MLJ.matrix
      +10×2 Matrix{Any}:
      + "a"   8.2
      + "a"   7.51111
      + "a"   7.8
      + "b"  21
      + "b"  18
      + "c"  -0.9
      + "b"  20
      + "c"  -1.8
      + "c"  -2.3
      + "c"  -2.4
      diff --git a/v0.20.3/models/GradientBoostingClassifier_MLJScikitLearnInterface/index.html b/v0.20.3/models/GradientBoostingClassifier_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..2766007f4 --- /dev/null +++ b/v0.20.3/models/GradientBoostingClassifier_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +GradientBoostingClassifier · MLJ

      GradientBoostingClassifier

      GradientBoostingClassifier

      A model type for constructing a gradient boosting classifier, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      GradientBoostingClassifier = @load GradientBoostingClassifier pkg=MLJScikitLearnInterface

      Do model = GradientBoostingClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in GradientBoostingClassifier(loss=...).

      This algorithm builds an additive model in a forward stage-wise fashion; it allows for the optimization of arbitrary differentiable loss functions. In each stage n_classes_ regression trees are fit on the negative gradient of the loss function, e.g. binary or multiclass log loss. Binary classification is a special case where only a single regression tree is induced.

      HistGradientBoostingClassifier is a much faster variant of this algorithm for intermediate datasets (n_samples >= 10_000).

      diff --git a/v0.20.3/models/GradientBoostingRegressor_MLJScikitLearnInterface/index.html b/v0.20.3/models/GradientBoostingRegressor_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..e8618ec9a --- /dev/null +++ b/v0.20.3/models/GradientBoostingRegressor_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +GradientBoostingRegressor · MLJ

      GradientBoostingRegressor

      GradientBoostingRegressor

      A model type for constructing a gradient boosting ensemble regression, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      GradientBoostingRegressor = @load GradientBoostingRegressor pkg=MLJScikitLearnInterface

      Do model = GradientBoostingRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in GradientBoostingRegressor(loss=...).

      This estimator builds an additive model in a forward stage-wise fashion; it allows for the optimization of arbitrary differentiable loss functions. In each stage a regression tree is fit on the negative gradient of the given loss function.

      HistGradientBoostingRegressor is a much faster variant of this algorithm for intermediate datasets (n_samples >= 10_000).

      diff --git a/v0.20.3/models/HBOSDetector_OutlierDetectionPython/index.html b/v0.20.3/models/HBOSDetector_OutlierDetectionPython/index.html new file mode 100644 index 000000000..443555154 --- /dev/null +++ b/v0.20.3/models/HBOSDetector_OutlierDetectionPython/index.html @@ -0,0 +1,4 @@ + +HBOSDetector · MLJ diff --git a/v0.20.3/models/HDBSCAN_MLJScikitLearnInterface/index.html b/v0.20.3/models/HDBSCAN_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..66b8599b3 --- /dev/null +++ b/v0.20.3/models/HDBSCAN_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +HDBSCAN · MLJ

      HDBSCAN

      HDBSCAN

      A model type for constructing a hdbscan, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      HDBSCAN = @load HDBSCAN pkg=MLJScikitLearnInterface

      Do model = HDBSCAN() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in HDBSCAN(min_cluster_size=...).

      Hierarchical Density-Based Spatial Clustering of Applications with Noise. Performs DBSCAN over varying epsilon values and integrates the result to find a clustering that gives the best stability over epsilon. This allows HDBSCAN to find clusters of varying densities (unlike DBSCAN), and be more robust to parameter selection.

      diff --git a/v0.20.3/models/HierarchicalClustering_Clustering/index.html b/v0.20.3/models/HierarchicalClustering_Clustering/index.html new file mode 100644 index 000000000..9dfc2610b --- /dev/null +++ b/v0.20.3/models/HierarchicalClustering_Clustering/index.html @@ -0,0 +1,18 @@ + +HierarchicalClustering · MLJ

      HierarchicalClustering

      HierarchicalClustering

      A model type for constructing a hierarchical clusterer, based on Clustering.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      HierarchicalClustering = @load HierarchicalClustering pkg=Clustering

      Do model = HierarchicalClustering() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in HierarchicalClustering(linkage=...).

      Hierarchical Clustering is a clustering algorithm that organizes the data in a dendrogram based on distances between groups of points and computes cluster assignments by cutting the dendrogram at a given height. More information is available at the Clustering.jl documentation. Use predict to get cluster assignments. The dendrogram and the dendrogram cutter are accessed from the machine report (see below).

      This is a static implementation, i.e., it does not generalize to new data instances, and there is no training data. For clusterers that do generalize, see KMeans or KMedoids.

      In MLJ or MLJBase, create a machine with

      mach = machine(model)

      Hyper-parameters

      • linkage = :single: linkage method (:single, :average, :complete, :ward, :ward_presquared)
      • metric = SqEuclidean: metric (see Distances.jl for available metrics)
      • branchorder = :r: branchorder (:r, :barjoseph, :optimal)
      • h = nothing: height at which the dendrogram is cut
      • k = 3: number of clusters.

      If both k and h are specified, it is guaranteed that the number of clusters is not less than k and their height is not above h.

      Operations

      • predict(mach, X): return cluster label assignments, as an unordered CategoricalVector. Here X is any table of input features (eg, a DataFrame) whose columns are of scitype Continuous; check column scitypes with schema(X).

      Report

      After calling predict(mach), the fields of report(mach) are:

      • dendrogram: the dendrogram that was computed when calling predict.
      • cutter: a dendrogram cutter that can be called with a height h or a number of clusters k, to obtain a new assignment of the data points to clusters (see example below).

      Examples

      using MLJ
      +
      +X, labels  = make_moons(400, noise=0.09, rng=1) ## synthetic data with 2 clusters; X
      +
      +HierarchicalClustering = @load HierarchicalClustering pkg=Clustering
      +model = HierarchicalClustering(linkage = :complete)
      +mach = machine(model)
      +
      +## compute and output cluster assignments for observations in `X`:
      +yhat = predict(mach, X)
      +
      +## plot dendrogram:
      +using StatsPlots
      +plot(report(mach).dendrogram)
      +
      +## make new predictions by cutting the dendrogram at another height
      +report(mach).cutter(h = 2.5)
      diff --git a/v0.20.3/models/HistGradientBoostingClassifier_MLJScikitLearnInterface/index.html b/v0.20.3/models/HistGradientBoostingClassifier_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..3ebdd7ec6 --- /dev/null +++ b/v0.20.3/models/HistGradientBoostingClassifier_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +HistGradientBoostingClassifier · MLJ

      HistGradientBoostingClassifier

      HistGradientBoostingClassifier

      A model type for constructing a hist gradient boosting classifier, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      HistGradientBoostingClassifier = @load HistGradientBoostingClassifier pkg=MLJScikitLearnInterface

      Do model = HistGradientBoostingClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in HistGradientBoostingClassifier(loss=...).

      This algorithm builds an additive model in a forward stage-wise fashion; it allows for the optimization of arbitrary differentiable loss functions. In each stage n_classes_ regression trees are fit on the negative gradient of the loss function, e.g. binary or multiclass log loss. Binary classification is a special case where only a single regression tree is induced.

      HistGradientBoostingClassifier is a much faster variant of this algorithm for intermediate datasets (n_samples >= 10_000).

      diff --git a/v0.20.3/models/HistGradientBoostingRegressor_MLJScikitLearnInterface/index.html b/v0.20.3/models/HistGradientBoostingRegressor_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..da9282415 --- /dev/null +++ b/v0.20.3/models/HistGradientBoostingRegressor_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +HistGradientBoostingRegressor · MLJ

      HistGradientBoostingRegressor

      HistGradientBoostingRegressor

      A model type for constructing a gradient boosting ensemble regression, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      HistGradientBoostingRegressor = @load HistGradientBoostingRegressor pkg=MLJScikitLearnInterface

      Do model = HistGradientBoostingRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in HistGradientBoostingRegressor(loss=...).

      This estimator builds an additive model in a forward stage-wise fashion; it allows for the optimization of arbitrary differentiable loss functions. In each stage a regression tree is fit on the negative gradient of the given loss function.

      HistGradientBoostingRegressor is a much faster variant of this algorithm for intermediate datasets (n_samples >= 10_000).

      diff --git a/v0.20.3/models/HuberRegressor_MLJLinearModels/index.html b/v0.20.3/models/HuberRegressor_MLJLinearModels/index.html new file mode 100644 index 000000000..d16a1eea0 --- /dev/null +++ b/v0.20.3/models/HuberRegressor_MLJLinearModels/index.html @@ -0,0 +1,6 @@ + +HuberRegressor · MLJ

      HuberRegressor

      HuberRegressor

      A model type for constructing a huber regressor, based on MLJLinearModels.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      HuberRegressor = @load HuberRegressor pkg=MLJLinearModels

      Do model = HuberRegressor() to construct an instance with default hyper-parameters.

      This model coincides with RobustRegressor, with the exception that the robust loss, rho, is fixed to HuberRho(delta), where delta is a new hyperparameter.

      Different solver options exist, as indicated under "Hyperparameters" below.

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, X, y)

      where:

      • X is any table of input features (eg, a DataFrame) whose columns have Continuous scitype; check column scitypes with schema(X)
      • y is the target, which can be any AbstractVector whose element scitype is Continuous; check the scitype with scitype(y)

      Train the machine using fit!(mach, rows=...).

      Hyperparameters

      • delta::Real: parameterizes the HuberRho function (radius of the ball within which the loss is a quadratic loss) Default: 0.5

      • lambda::Real: strength of the regularizer if penalty is :l2 or :l1. Strength of the L2 regularizer if penalty is :en. Default: 1.0

      • gamma::Real: strength of the L1 regularizer if penalty is :en. Default: 0.0

      • penalty::Union{String, Symbol}: the penalty to use, either :l2, :l1, :en (elastic net) or :none. Default: :l2

      • fit_intercept::Bool: whether to fit the intercept or not. Default: true

      • penalize_intercept::Bool: whether to penalize the intercept. Default: false

      • scale_penalty_with_samples::Bool: whether to scale the penalty with the number of observations. Default: true

      • solver::Union{Nothing, MLJLinearModels.Solver}: some instance of MLJLinearModels.S where S is one of: LBFGS, IWLSCG, Newton, NewtonCG, if penalty = :l2, and ProxGrad otherwise.

        If solver = nothing (default) then LBFGS() is used, if penalty = :l2, and otherwise ProxGrad(accel=true) (FISTA) is used.

        Solver aliases: FISTA(; kwargs...) = ProxGrad(accel=true, kwargs...), ISTA(; kwargs...) = ProxGrad(accel=false, kwargs...) Default: nothing

      Example

      using MLJ
      +X, y = make_regression()
      +mach = fit!(machine(HuberRegressor(), X, y))
      +predict(mach, X)
      +fitted_params(mach)

      See also RobustRegressor, QuantileRegressor.

      diff --git a/v0.20.3/models/HuberRegressor_MLJScikitLearnInterface/index.html b/v0.20.3/models/HuberRegressor_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..81fe537ce --- /dev/null +++ b/v0.20.3/models/HuberRegressor_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +HuberRegressor · MLJ

      HuberRegressor

      HuberRegressor

      A model type for constructing a Huber regressor, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      HuberRegressor = @load HuberRegressor pkg=MLJScikitLearnInterface

      Do model = HuberRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in HuberRegressor(epsilon=...).

      Hyper-parameters

      • epsilon = 1.35
      • max_iter = 100
      • alpha = 0.0001
      • warm_start = false
      • fit_intercept = true
      • tol = 1.0e-5
      diff --git a/v0.20.3/models/ICA_MultivariateStats/index.html b/v0.20.3/models/ICA_MultivariateStats/index.html new file mode 100644 index 000000000..20028685b --- /dev/null +++ b/v0.20.3/models/ICA_MultivariateStats/index.html @@ -0,0 +1,31 @@ + +ICA · MLJ

      ICA

      ICA

      A model type for constructing a independent component analysis model, based on MultivariateStats.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      ICA = @load ICA pkg=MultivariateStats

      Do model = ICA() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in ICA(outdim=...).

      Independent component analysis is a computational technique for separating a multivariate signal into additive subcomponents, with the assumption that the subcomponents are non-Gaussian and independent from each other.

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, X)

      Here:

      • X is any table of input features (eg, a DataFrame) whose columns are of scitype Continuous; check column scitypes with schema(X).

      Train the machine using fit!(mach, rows=...).

      Hyper-parameters

      • outdim::Int=0: The number of independent components to recover, set automatically if 0.
      • alg::Symbol=:fastica: The algorithm to use (only :fastica is supported at the moment).
      • fun::Symbol=:tanh: The approximate neg-entropy function, one of :tanh, :gaus.
      • do_whiten::Bool=true: Whether or not to perform pre-whitening.
      • maxiter::Int=100: The maximum number of iterations.
      • tol::Real=1e-6: The convergence tolerance for change in the unmixing matrix W.
      • mean::Union{Nothing, Real, Vector{Float64}}=nothing: mean to use, if nothing (default) centering is computed and applied, if zero, no centering; otherwise a vector of means can be passed.
      • winit::Union{Nothing,Matrix{<:Real}}=nothing: Initial guess for the unmixing matrix W: either an empty matrix (for random initialization of W), a matrix of size m × k (if do_whiten is true), or a matrix of size m × k. Here m is the number of components (columns) of the input.

      Operations

      • transform(mach, Xnew): Return the component-separated version of input Xnew, which should have the same scitype as X above.

      Fitted parameters

      The fields of fitted_params(mach) are:

      • projection: The estimated component matrix.
      • mean: The estimated mean vector.

      Report

      The fields of report(mach) are:

      • indim: Dimension (number of columns) of the training data and new data to be transformed.
      • outdim: Dimension of transformed data.
      • mean: The mean of the untransformed training data, of length indim.

      Examples

      using MLJ
      +
      +ICA = @load ICA pkg=MultivariateStats
      +
      +times = range(0, 8, length=2000)
      +
      +sine_wave = sin.(2*times)
      +square_wave = sign.(sin.(3*times))
      +sawtooth_wave = map(t -> mod(2t, 2) - 1, times)
      +signals = hcat(sine_wave, square_wave, sawtooth_wave)
      +noisy_signals = signals + 0.2*randn(size(signals))
      +
      +mixing_matrix = [ 1 1 1; 0.5 2 1; 1.5 1 2]
      +X = MLJ.table(noisy_signals*mixing_matrix)
      +
      +model = ICA(outdim = 3, tol=0.1)
      +mach = machine(model, X) |> fit!
      +
      +X_unmixed = transform(mach, X)
      +
      +using Plots
      +
      +plot(X.x2)
      +plot(X.x2)
      +plot(X.x3)
      +
      +plot(X_unmixed.x1)
      +plot(X_unmixed.x2)
      +plot(X_unmixed.x3)
      +

      See also PCA, KernelPCA, FactorAnalysis, PPCA

      diff --git a/v0.20.3/models/IForestDetector_OutlierDetectionPython/index.html b/v0.20.3/models/IForestDetector_OutlierDetectionPython/index.html new file mode 100644 index 000000000..8318cd8bf --- /dev/null +++ b/v0.20.3/models/IForestDetector_OutlierDetectionPython/index.html @@ -0,0 +1,8 @@ + +IForestDetector · MLJ diff --git a/v0.20.3/models/INNEDetector_OutlierDetectionPython/index.html b/v0.20.3/models/INNEDetector_OutlierDetectionPython/index.html new file mode 100644 index 000000000..7ba9299d9 --- /dev/null +++ b/v0.20.3/models/INNEDetector_OutlierDetectionPython/index.html @@ -0,0 +1,4 @@ + +INNEDetector · MLJ diff --git a/v0.20.3/models/ImageClassifier_MLJFlux/index.html b/v0.20.3/models/ImageClassifier_MLJFlux/index.html new file mode 100644 index 000000000..b5cff40eb --- /dev/null +++ b/v0.20.3/models/ImageClassifier_MLJFlux/index.html @@ -0,0 +1,48 @@ + +ImageClassifier · MLJ

      ImageClassifier

      ImageClassifier

      A model type for constructing a image classifier, based on MLJFlux.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      ImageClassifier = @load ImageClassifier pkg=MLJFlux

      Do model = ImageClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in ImageClassifier(builder=...).

      ImageClassifier classifies images using a neural network adapted to the type of images provided (color or gray scale). Predictions are probabilistic. Users provide a recipe for constructing the network, based on properties of the image encountered, by specifying an appropriate builder. See MLJFlux documentation for more on builders.

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, X, y)

      Here:

      • X is any AbstractVector of images with ColorImage or GrayImage scitype; check the scitype with scitype(X) and refer to ScientificTypes.jl documentation on coercing typical image formats into an appropriate type.
      • y is the target, which can be any AbstractVector whose element scitype is Multiclass; check the scitype with scitype(y).

      Train the machine with fit!(mach, rows=...).

      Hyper-parameters

      • builder: An MLJFlux builder that constructs the neural network. The fallback builds a depth-16 VGG architecture adapted to the image size and number of target classes, with no batch normalization; see the Metalhead.jl documentation for details. See the example below for a user-specified builder. A convenience macro @builder is also available. See also finaliser below.

      • optimiser::Flux.Adam(): A Flux.Optimise optimiser. The optimiser performs the updating of the weights of the network. For further reference, see the Flux optimiser documentation. To choose a learning rate (the update rate of the optimizer), a good rule of thumb is to start out at 10e-3, and tune using powers of 10 between 1 and 1e-7.

      • loss=Flux.crossentropy: The loss function which the network will optimize. Should be a function which can be called in the form loss(yhat, y). Possible loss functions are listed in the Flux loss function documentation. For a classification task, the most natural loss functions are:

        • Flux.crossentropy: Standard multiclass classification loss, also known as the log loss.
        • Flux.logitcrossentopy: Mathematically equal to crossentropy, but numerically more stable than finalising the outputs with softmax and then calculating crossentropy. You will need to specify finaliser=identity to remove MLJFlux's default softmax finaliser, and understand that the output of predict is then unnormalized (no longer probabilistic).
        • Flux.tversky_loss: Used with imbalanced data to give more weight to false negatives.
        • Flux.focal_loss: Used with highly imbalanced data. Weights harder examples more than easier examples.

        Currently MLJ measures are not supported values of loss.

      • epochs::Int=10: The duration of training, in epochs. Typically, one epoch represents one pass through the complete the training dataset.

      • batch_size::int=1: the batch size to be used for training, representing the number of samples per update of the network weights. Typically, batch size is between 8 and

        1. Increassing batch size may accelerate training if acceleration=CUDALibs() and a

        GPU is available.

      • lambda::Float64=0: The strength of the weight regularization penalty. Can be any value in the range [0, ∞).

      • alpha::Float64=0: The L2/L1 mix of regularization, in the range [0, 1]. A value of 0 represents L2 regularization, and a value of 1 represents L1 regularization.

      • rng::Union{AbstractRNG, Int64}: The random number generator or seed used during training.

      • optimizer_changes_trigger_retraining::Bool=false: Defines what happens when re-fitting a machine if the associated optimiser has changed. If true, the associated machine will retrain from scratch on fit! call, otherwise it will not.

      • acceleration::AbstractResource=CPU1(): Defines on what hardware training is done. For Training on GPU, use CUDALibs().

      • finaliser=Flux.softmax: The final activation function of the neural network (applied after the network defined by builder). Defaults to Flux.softmax.

      Operations

      • predict(mach, Xnew): return predictions of the target given new features Xnew, which should have the same scitype as X above. Predictions are probabilistic but uncalibrated.
      • predict_mode(mach, Xnew): Return the modes of the probabilistic predictions returned above.

      Fitted parameters

      The fields of fitted_params(mach) are:

      • chain: The trained "chain" (Flux.jl model), namely the series of layers, functions, and activations which make up the neural network. This includes the final layer specified by finaliser (eg, softmax).

      Report

      The fields of report(mach) are:

      • training_losses: A vector of training losses (penalised if lambda != 0) in historical order, of length epochs + 1. The first element is the pre-training loss.

      Examples

      In this example we use MLJFlux and a custom builder to classify the MNIST image dataset.

      using MLJ
      +using Flux
      +import MLJFlux
      +import MLJIteration ## for `skip` control

      First we want to download the MNIST dataset, and unpack into images and labels:

      import MLDatasets: MNIST
      +data = MNIST(split=:train)
      +images, labels = data.features, data.targets

      In MLJ, integers cannot be used for encoding categorical data, so we must coerce them into the Multiclass scitype:

      labels = coerce(labels, Multiclass);

      Above images is a single array but MLJFlux requires the images to be a vector of individual image arrays:

      images = coerce(images, GrayImage);
      +images[1]

      We start by defining a suitable builder object. This is a recipe for building the neural network. Our builder will work for images of any (constant) size, whether they be color or black and white (ie, single or multi-channel). The architecture always consists of six alternating convolution and max-pool layers, and a final dense layer; the filter size and the number of channels after each convolution layer is customizable.

      import MLJFlux
      +
      +struct MyConvBuilder
      +    filter_size::Int
      +    channels1::Int
      +    channels2::Int
      +    channels3::Int
      +end
      +
      +make2d(x::AbstractArray) = reshape(x, :, size(x)[end])
      +
      +function MLJFlux.build(b::MyConvBuilder, rng, n_in, n_out, n_channels)
      +    k, c1, c2, c3 = b.filter_size, b.channels1, b.channels2, b.channels3
      +    mod(k, 2) == 1 || error("`filter_size` must be odd. ")
      +    p = div(k - 1, 2) ## padding to preserve image size
      +    init = Flux.glorot_uniform(rng)
      +    front = Chain(
      +        Conv((k, k), n_channels => c1, pad=(p, p), relu, init=init),
      +        MaxPool((2, 2)),
      +        Conv((k, k), c1 => c2, pad=(p, p), relu, init=init),
      +        MaxPool((2, 2)),
      +        Conv((k, k), c2 => c3, pad=(p, p), relu, init=init),
      +        MaxPool((2 ,2)),
      +        make2d)
      +    d = Flux.outputsize(front, (n_in..., n_channels, 1)) |> first
      +    return Chain(front, Dense(d, n_out, init=init))
      +end

      It is important to note that in our build function, there is no final softmax. This is applied by default in all MLJFlux classifiers (override this using the finaliser hyperparameter).

      Now that our builder is defined, we can instantiate the actual MLJFlux model. If you have a GPU, you can substitute in acceleration=CUDALibs() below to speed up training.

      ImageClassifier = @load ImageClassifier pkg=MLJFlux
      +clf = ImageClassifier(builder=MyConvBuilder(3, 16, 32, 32),
      +                      batch_size=50,
      +                      epochs=10,
      +                      rng=123)

      You can add Flux options such as optimiser and loss in the snippet above. Currently, loss must be a flux-compatible loss, and not an MLJ measure.

      Next, we can bind the model with the data in a machine, and train using the first 500 images:

      mach = machine(clf, images, labels);
      +fit!(mach, rows=1:500, verbosity=2);
      +report(mach)
      +chain = fitted_params(mach)
      +Flux.params(chain)[2]

      We can tack on 20 more epochs by modifying the epochs field, and iteratively fit some more:

      clf.epochs = clf.epochs + 20
      +fit!(mach, rows=1:500, verbosity=2);

      We can also make predictions and calculate an out-of-sample loss estimate, using any MLJ measure (loss/score):

      predicted_labels = predict(mach, rows=501:1000);
      +cross_entropy(predicted_labels, labels[501:1000]) |> mean

      The preceding fit!/predict/evaluate workflow can be alternatively executed as follows:

      evaluate!(mach,
      +          resampling=Holdout(fraction_train=0.5),
      +          measure=cross_entropy,
      +          rows=1:1000,
      +          verbosity=0)

      See also NeuralNetworkClassifier.

      diff --git a/v0.20.3/models/InteractionTransformer_MLJModels/index.html b/v0.20.3/models/InteractionTransformer_MLJModels/index.html new file mode 100644 index 000000000..630cda79b --- /dev/null +++ b/v0.20.3/models/InteractionTransformer_MLJModels/index.html @@ -0,0 +1,32 @@ + +InteractionTransformer · MLJ

      InteractionTransformer

      InteractionTransformer

      A model type for constructing a interaction transformer, based on MLJModels.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      InteractionTransformer = @load InteractionTransformer pkg=MLJModels

      Do model = InteractionTransformer() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in InteractionTransformer(order=...).

      Generates all polynomial interaction terms up to the given order for the subset of chosen columns. Any column that contains elements with scitype <:Infinite is a valid basis to generate interactions. If features is not specified, all such columns with scitype <:Infinite in the table are used as a basis.

      In MLJ or MLJBase, you can transform features X with the single call

      transform(machine(model), X)

      See also the example below.

      Hyper-parameters

      • order: Maximum order of interactions to be generated.
      • features: Restricts interations generation to those columns

      Operations

      • transform(machine(model), X): Generates polynomial interaction terms out of table X using the hyper-parameters specified in model.

      Example

      using MLJ
      +
      +X = (
      +    A = [1, 2, 3],
      +    B = [4, 5, 6],
      +    C = [7, 8, 9],
      +    D = ["x₁", "x₂", "x₃"]
      +)
      +it = InteractionTransformer(order=3)
      +mach = machine(it)
      +
      +julia> transform(mach, X)
      +(A = [1, 2, 3],
      + B = [4, 5, 6],
      + C = [7, 8, 9],
      + D = ["x₁", "x₂", "x₃"],
      + A_B = [4, 10, 18],
      + A_C = [7, 16, 27],
      + B_C = [28, 40, 54],
      + A_B_C = [28, 80, 162],)
      +
      +it = InteractionTransformer(order=2, features=[:A, :B])
      +mach = machine(it)
      +
      +julia> transform(mach, X)
      +(A = [1, 2, 3],
      + B = [4, 5, 6],
      + C = [7, 8, 9],
      + D = ["x₁", "x₂", "x₃"],
      + A_B = [4, 10, 18],)
      +
      diff --git a/v0.20.3/models/KDEDetector_OutlierDetectionPython/index.html b/v0.20.3/models/KDEDetector_OutlierDetectionPython/index.html new file mode 100644 index 000000000..a9b4dd8aa --- /dev/null +++ b/v0.20.3/models/KDEDetector_OutlierDetectionPython/index.html @@ -0,0 +1,6 @@ + +KDEDetector · MLJ diff --git a/v0.20.3/models/KMeansClusterer_BetaML/index.html b/v0.20.3/models/KMeansClusterer_BetaML/index.html new file mode 100644 index 000000000..a6075e277 --- /dev/null +++ b/v0.20.3/models/KMeansClusterer_BetaML/index.html @@ -0,0 +1,32 @@ + +KMeansClusterer · MLJ

      KMeansClusterer

      mutable struct KMeansClusterer <: MLJModelInterface.Unsupervised

      The classical KMeansClusterer clustering algorithm, from the Beta Machine Learning Toolkit (BetaML).

      Parameters:

      • n_classes::Int64: Number of classes to discriminate the data [def: 3]

      • dist::Function: Function to employ as distance. Default to the Euclidean distance. Can be one of the predefined distances (l1_distance, l2_distance, l2squared_distance), cosine_distance), any user defined function accepting two vectors and returning a scalar or an anonymous function with the same characteristics. Attention that, contrary to KMedoidsClusterer, the KMeansClusterer algorithm is not guaranteed to converge with other distances than the Euclidean one.

      • initialisation_strategy::String: The computation method of the vector of the initial representatives. One of the following:

        • "random": randomly in the X space
        • "grid": using a grid approach
        • "shuffle": selecting randomly within the available points [default]
        • "given": using a provided set of initial representatives provided in the initial_representatives parameter
      • initial_representatives::Union{Nothing, Matrix{Float64}}: Provided (K x D) matrix of initial representatives (useful only with initialisation_strategy="given") [default: nothing]

      • rng::Random.AbstractRNG: Random Number Generator [deafult: Random.GLOBAL_RNG]

      Notes:

      • data must be numerical
      • online fitting (re-fitting with new data) is supported

      Example:

      julia> using MLJ
      +
      +julia> X, y        = @load_iris;
      +
      +julia> modelType   = @load KMeansClusterer pkg = "BetaML" verbosity=0
      +BetaML.Clustering.KMeansClusterer
      +
      +julia> model       = modelType()
      +KMeansClusterer(
      +  n_classes = 3, 
      +  dist = BetaML.Clustering.var"#34#36"(), 
      +  initialisation_strategy = "shuffle", 
      +  initial_representatives = nothing, 
      +  rng = Random._GLOBAL_RNG())
      +
      +julia> mach        = machine(model, X);
      +
      +julia> fit!(mach);
      +[ Info: Training machine(KMeansClusterer(n_classes = 3, …), …).
      +
      +julia> classes_est = predict(mach, X);
      +
      +julia> hcat(y,classes_est)
      +150×2 CategoricalArrays.CategoricalArray{Union{Int64, String},2,UInt32}:
      + "setosa"     2
      + "setosa"     2
      + "setosa"     2
      + ⋮            
      + "virginica"  3
      + "virginica"  3
      + "virginica"  1
      diff --git a/v0.20.3/models/KMeans_Clustering/index.html b/v0.20.3/models/KMeans_Clustering/index.html new file mode 100644 index 000000000..23821738b --- /dev/null +++ b/v0.20.3/models/KMeans_Clustering/index.html @@ -0,0 +1,20 @@ + +KMeans · MLJ

      KMeans

      KMeans

      A model type for constructing a K-means clusterer, based on Clustering.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      KMeans = @load KMeans pkg=Clustering

      Do model = KMeans() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in KMeans(k=...).

      K-means is a classical method for clustering or vector quantization. It produces a fixed number of clusters, each associated with a center (also known as a prototype), and each data point is assigned to a cluster with the nearest center.

      From a mathematical standpoint, K-means is a coordinate descent algorithm that solves the following optimization problem:

      :$

      \text{minimize} \ \sum{i=1}^n \| \mathbf{x}i - \boldsymbol{\mu}{zi} \|^2 \ \text{w.r.t.} \ (\boldsymbol{\mu}, z) :$

      Here, $\boldsymbol{\mu}_k$ is the center of the $k$-th cluster, and $z_i$ is an index of the cluster for $i$-th point $\mathbf{x}_i$.

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, X)

      Here:

      • X is any table of input features (eg, a DataFrame) whose columns are of scitype Continuous; check column scitypes with schema(X).

      Train the machine using fit!(mach, rows=...).

      Hyper-parameters

      • k=3: The number of centroids to use in clustering.

      • metric::SemiMetric=Distances.SqEuclidean: The metric used to calculate the clustering. Must have type PreMetric from Distances.jl.

      • init = :kmpp: One of the following options to indicate how cluster seeds should be initialized:

        • :kmpp: KMeans++
        • :kmenc: K-medoids initialization based on centrality
        • :rand: random
        • an instance of Clustering.SeedingAlgorithm from Clustering.jl
        • an integer vector of length k that provides the indices of points to use as initial cluster centers.

        See documentation of Clustering.jl.

      Operations

      • predict(mach, Xnew): return cluster label assignments, given new features Xnew having the same Scitype as X above.
      • transform(mach, Xnew): instead return the mean pairwise distances from new samples to the cluster centers.

      Fitted parameters

      The fields of fitted_params(mach) are:

      • centers: The coordinates of the cluster centers.

      Report

      The fields of report(mach) are:

      • assignments: The cluster assignments of each point in the training data.
      • cluster_labels: The labels assigned to each cluster.

      Examples

      using MLJ
      +KMeans = @load KMeans pkg=Clustering
      +
      +table = load_iris()
      +y, X = unpack(table, ==(:target), rng=123)
      +model = KMeans(k=3)
      +mach = machine(model, X) |> fit!
      +
      +yhat = predict(mach, X)
      +@assert yhat == report(mach).assignments
      +
      +compare = zip(yhat, y) |> collect;
      +compare[1:8] ## clusters align with classes
      +
      +center_dists = transform(mach, fitted_params(mach).centers')
      +
      +@assert center_dists[1][1] == 0.0
      +@assert center_dists[2][2] == 0.0
      +@assert center_dists[3][3] == 0.0

      See also KMedoids

      diff --git a/v0.20.3/models/KMeans_MLJScikitLearnInterface/index.html b/v0.20.3/models/KMeans_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..7534b5a67 --- /dev/null +++ b/v0.20.3/models/KMeans_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +KMeans · MLJ

      KMeans

      KMeans

      A model type for constructing a k means, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      KMeans = @load KMeans pkg=MLJScikitLearnInterface

      Do model = KMeans() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in KMeans(n_clusters=...).

      K-Means algorithm: find K centroids corresponding to K clusters in the data.

      diff --git a/v0.20.3/models/KMeans_ParallelKMeans/index.html b/v0.20.3/models/KMeans_ParallelKMeans/index.html new file mode 100644 index 000000000..f916e9a0d --- /dev/null +++ b/v0.20.3/models/KMeans_ParallelKMeans/index.html @@ -0,0 +1,2 @@ + +KMeans · MLJ diff --git a/v0.20.3/models/KMedoidsClusterer_BetaML/index.html b/v0.20.3/models/KMedoidsClusterer_BetaML/index.html new file mode 100644 index 000000000..18d9f677b --- /dev/null +++ b/v0.20.3/models/KMedoidsClusterer_BetaML/index.html @@ -0,0 +1,32 @@ + +KMedoidsClusterer · MLJ

      KMedoidsClusterer

      mutable struct KMedoidsClusterer <: MLJModelInterface.Unsupervised

      Parameters:

      • n_classes::Int64: Number of classes to discriminate the data [def: 3]

      • dist::Function: Function to employ as distance. Default to the Euclidean distance. Can be one of the predefined distances (l1_distance, l2_distance, l2squared_distance), cosine_distance), any user defined function accepting two vectors and returning a scalar or an anonymous function with the same characteristics.

      • initialisation_strategy::String: The computation method of the vector of the initial representatives. One of the following:

        • "random": randomly in the X space
        • "grid": using a grid approach
        • "shuffle": selecting randomly within the available points [default]
        • "given": using a provided set of initial representatives provided in the initial_representatives parameter
      • initial_representatives::Union{Nothing, Matrix{Float64}}: Provided (K x D) matrix of initial representatives (useful only with initialisation_strategy="given") [default: nothing]

      • rng::Random.AbstractRNG: Random Number Generator [deafult: Random.GLOBAL_RNG]

      The K-medoids clustering algorithm with customisable distance function, from the Beta Machine Learning Toolkit (BetaML).

      Similar to K-Means, but the "representatives" (the cetroids) are guaranteed to be one of the training points. The algorithm work with any arbitrary distance measure.

      Notes:

      • data must be numerical
      • online fitting (re-fitting with new data) is supported

      Example:

      julia> using MLJ
      +
      +julia> X, y        = @load_iris;
      +
      +julia> modelType   = @load KMedoidsClusterer pkg = "BetaML" verbosity=0
      +BetaML.Clustering.KMedoidsClusterer
      +
      +julia> model       = modelType()
      +KMedoidsClusterer(
      +  n_classes = 3, 
      +  dist = BetaML.Clustering.var"#39#41"(), 
      +  initialisation_strategy = "shuffle", 
      +  initial_representatives = nothing, 
      +  rng = Random._GLOBAL_RNG())
      +
      +julia> mach        = machine(model, X);
      +
      +julia> fit!(mach);
      +[ Info: Training machine(KMedoidsClusterer(n_classes = 3, …), …).
      +
      +julia> classes_est = predict(mach, X);
      +
      +julia> hcat(y,classes_est)
      +150×2 CategoricalArrays.CategoricalArray{Union{Int64, String},2,UInt32}:
      + "setosa"     3
      + "setosa"     3
      + "setosa"     3
      + ⋮            
      + "virginica"  1
      + "virginica"  1
      + "virginica"  2
      diff --git a/v0.20.3/models/KMedoids_Clustering/index.html b/v0.20.3/models/KMedoids_Clustering/index.html new file mode 100644 index 000000000..48fd9a979 --- /dev/null +++ b/v0.20.3/models/KMedoids_Clustering/index.html @@ -0,0 +1,20 @@ + +KMedoids · MLJ

      KMedoids

      KMedoids

      A model type for constructing a K-medoids clusterer, based on Clustering.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      KMedoids = @load KMedoids pkg=Clustering

      Do model = KMedoids() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in KMedoids(k=...).

      K-medoids is a clustering algorithm that works by finding $k$ data points (called medoids) such that the total distance between each data point and the closest medoid is minimal.

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, X)

      Here:

      • X is any table of input features (eg, a DataFrame) whose columns are of scitype Continuous; check column scitypes with schema(X)

      Train the machine using fit!(mach, rows=...).

      Hyper-parameters

      • k=3: The number of centroids to use in clustering.

      • metric::SemiMetric=Distances.SqEuclidean: The metric used to calculate the clustering. Must have type PreMetric from Distances.jl.

      • init (defaults to :kmpp): how medoids should be initialized, could be one of the following:

        • :kmpp: KMeans++
        • :kmenc: K-medoids initialization based on centrality
        • :rand: random
        • an instance of Clustering.SeedingAlgorithm from Clustering.jl
        • an integer vector of length k that provides the indices of points to use as initial medoids.

        See documentation of Clustering.jl.

      Operations

      • predict(mach, Xnew): return cluster label assignments, given new features Xnew having the same Scitype as X above.
      • transform(mach, Xnew): instead return the mean pairwise distances from new samples to the cluster centers.

      Fitted parameters

      The fields of fitted_params(mach) are:

      • medoids: The coordinates of the cluster medoids.

      Report

      The fields of report(mach) are:

      • assignments: The cluster assignments of each point in the training data.
      • cluster_labels: The labels assigned to each cluster.

      Examples

      using MLJ
      +KMedoids = @load KMedoids pkg=Clustering
      +
      +table = load_iris()
      +y, X = unpack(table, ==(:target), rng=123)
      +model = KMedoids(k=3)
      +mach = machine(model, X) |> fit!
      +
      +yhat = predict(mach, X)
      +@assert yhat == report(mach).assignments
      +
      +compare = zip(yhat, y) |> collect;
      +compare[1:8] ## clusters align with classes
      +
      +center_dists = transform(mach, fitted_params(mach).medoids')
      +
      +@assert center_dists[1][1] == 0.0
      +@assert center_dists[2][2] == 0.0
      +@assert center_dists[3][3] == 0.0

      See also KMeans

      diff --git a/v0.20.3/models/KNNClassifier_NearestNeighborModels/index.html b/v0.20.3/models/KNNClassifier_NearestNeighborModels/index.html new file mode 100644 index 000000000..530c403ca --- /dev/null +++ b/v0.20.3/models/KNNClassifier_NearestNeighborModels/index.html @@ -0,0 +1,12 @@ + +KNNClassifier · MLJ

      KNNClassifier

      KNNClassifier

      A model type for constructing a K-nearest neighbor classifier, based on NearestNeighborModels.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      KNNClassifier = @load KNNClassifier pkg=NearestNeighborModels

      Do model = KNNClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in KNNClassifier(K=...).

      KNNClassifier implements K-Nearest Neighbors classifier which is non-parametric algorithm that predicts a discrete class distribution associated with a new point by taking a vote over the classes of the k-nearest points. Each neighbor vote is assigned a weight based on proximity of the neighbor point to the test point according to a specified distance metric.

      For more information about the weighting kernels, see the paper by Geler et.al Comparison of different weighting schemes for the kNN classifier on time-series data.

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, X, y)

      OR

      mach = machine(model, X, y, w)

      Here:

      • X is any table of input features (eg, a DataFrame) whose columns are of scitype Continuous; check column scitypes with schema(X).
      • y is the target, which can be any AbstractVector whose element scitype is <:Finite (<:Multiclass or <:OrderedFactor will do); check the scitype with scitype(y)
      • w is the observation weights which can either be nothing (default) or an AbstractVector whose element scitype is Count or Continuous. This is different from weights kernel which is a model hyperparameter, see below.

      Train the machine using fit!(mach, rows=...).

      Hyper-parameters

      • K::Int=5 : number of neighbors
      • algorithm::Symbol = :kdtree : one of (:kdtree, :brutetree, :balltree)
      • metric::Metric = Euclidean() : any Metric from Distances.jl for the distance between points. For algorithm = :kdtree only metrics which are instances of Union{Distances.Chebyshev, Distances.Cityblock, Distances.Euclidean, Distances.Minkowski, Distances.WeightedCityblock, Distances.WeightedEuclidean, Distances.WeightedMinkowski} are supported.
      • leafsize::Int = algorithm == 10 : determines the number of points at which to stop splitting the tree. This option is ignored and always taken as 0 for algorithm = :brutetree, since brutetree isn't actually a tree.
      • reorder::Bool = true : if true then points which are close in distance are placed close in memory. In this case, a copy of the original data will be made so that the original data is left unmodified. Setting this to true can significantly improve performance of the specified algorithm (except :brutetree). This option is ignored and always taken as false for algorithm = :brutetree.
      • weights::KNNKernel=Uniform() : kernel used in assigning weights to the k-nearest neighbors for each observation. An instance of one of the types in list_kernels(). User-defined weighting functions can be passed by wrapping the function in a UserDefinedKernel kernel (do ?NearestNeighborModels.UserDefinedKernel for more info). If observation weights w are passed during machine construction then the weight assigned to each neighbor vote is the product of the kernel generated weight for that neighbor and the corresponding observation weight.

      Operations

      • predict(mach, Xnew): Return predictions of the target given features Xnew, which should have same scitype as X above. Predictions are probabilistic but uncalibrated.
      • predict_mode(mach, Xnew): Return the modes of the probabilistic predictions returned above.

      Fitted parameters

      The fields of fitted_params(mach) are:

      • tree: An instance of either KDTree, BruteTree or BallTree depending on the value of the algorithm hyperparameter (See hyper-parameters section above). These are data structures that stores the training data with the view of making quicker nearest neighbor searches on test data points.

      Examples

      using MLJ
      +KNNClassifier = @load KNNClassifier pkg=NearestNeighborModels
      +X, y = @load_crabs; ## a table and a vector from the crabs dataset
      +## view possible kernels
      +NearestNeighborModels.list_kernels()
      +## KNNClassifier instantiation
      +model = KNNClassifier(weights = NearestNeighborModels.Inverse())
      +mach = machine(model, X, y) |> fit! ## wrap model and required data in an MLJ machine and fit
      +y_hat = predict(mach, X)
      +labels = predict_mode(mach, X)
      +

      See also MultitargetKNNClassifier

      diff --git a/v0.20.3/models/KNNDetector_OutlierDetectionNeighbors/index.html b/v0.20.3/models/KNNDetector_OutlierDetectionNeighbors/index.html new file mode 100644 index 000000000..0b316d02c --- /dev/null +++ b/v0.20.3/models/KNNDetector_OutlierDetectionNeighbors/index.html @@ -0,0 +1,11 @@ + +KNNDetector · MLJ

      KNNDetector

      KNNDetector(k=5,
      +            metric=Euclidean,
      +            algorithm=:kdtree,
      +            leafsize=10,
      +            reorder=true,
      +            reduction=:maximum)

      Calculate the anomaly score of an instance based on the distance to its k-nearest neighbors.

      Parameters

      k::Integer

      Number of neighbors (must be greater than 0).

      metric::Metric

      This is one of the Metric types defined in the Distances.jl package. It is possible to define your own metrics by creating new types that are subtypes of Metric.

      algorithm::Symbol

      One of (:kdtree, :balltree). In a kdtree, points are recursively split into groups using hyper-planes. Therefore a KDTree only works with axis aligned metrics which are: Euclidean, Chebyshev, Minkowski and Cityblock. A brutetree linearly searches all points in a brute force fashion and works with any Metric. A balltree recursively splits points into groups bounded by hyper-spheres and works with any Metric.

      static::Union{Bool, Symbol}

      One of (true, false, :auto). Whether the input data for fitting and transform should be statically or dynamically allocated. If true, the data is statically allocated. If false, the data is dynamically allocated. If :auto, the data is dynamically allocated if the product of all dimensions except the last is greater than 100.

      leafsize::Int

      Determines at what number of points to stop splitting the tree further. There is a trade-off between traversing the tree and having to evaluate the metric function for increasing number of points.

      reorder::Bool

      While building the tree this will put points close in distance close in memory since this helps with cache locality. In this case, a copy of the original data will be made so that the original data is left unmodified. This can have a significant impact on performance and is by default set to true.

      parallel::Bool

      Parallelize score and predict using all threads available. The number of threads can be set with the JULIA_NUM_THREADS environment variable. Note: fit is not parallel.

      reduction::Symbol

      One of (:maximum, :median, :mean). (reduction=:maximum) was proposed by [1]. Angiulli et al. [2] proposed sum to reduce the distances, but mean has been implemented for numerical stability.

      Examples

      using OutlierDetection: KNNDetector, fit, transform
      +detector = KNNDetector()
      +X = rand(10, 100)
      +model, result = fit(detector, X; verbosity=0)
      +test_scores = transform(detector, model, X)

      References

      [1] Ramaswamy, Sridhar; Rastogi, Rajeev; Shim, Kyuseok (2000): Efficient Algorithms for Mining Outliers from Large Data Sets.

      [2] Angiulli, Fabrizio; Pizzuti, Clara (2002): Fast Outlier Detection in High Dimensional Spaces.

      diff --git a/v0.20.3/models/KNNDetector_OutlierDetectionPython/index.html b/v0.20.3/models/KNNDetector_OutlierDetectionPython/index.html new file mode 100644 index 000000000..02b7a7fc6 --- /dev/null +++ b/v0.20.3/models/KNNDetector_OutlierDetectionPython/index.html @@ -0,0 +1,10 @@ + +KNNDetector · MLJ diff --git a/v0.20.3/models/KNNRegressor_NearestNeighborModels/index.html b/v0.20.3/models/KNNRegressor_NearestNeighborModels/index.html new file mode 100644 index 000000000..a8a3bbc2e --- /dev/null +++ b/v0.20.3/models/KNNRegressor_NearestNeighborModels/index.html @@ -0,0 +1,10 @@ + +KNNRegressor · MLJ

      KNNRegressor

      KNNRegressor

      A model type for constructing a K-nearest neighbor regressor, based on NearestNeighborModels.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      KNNRegressor = @load KNNRegressor pkg=NearestNeighborModels

      Do model = KNNRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in KNNRegressor(K=...).

      KNNRegressor implements K-Nearest Neighbors regressor which is non-parametric algorithm that predicts the response associated with a new point by taking an weighted average of the response of the K-nearest points.

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, X, y)

      OR

      mach = machine(model, X, y, w)

      Here:

      • X is any table of input features (eg, a DataFrame) whose columns are of scitype Continuous; check column scitypes with schema(X).
      • y is the target, which can be any table of responses whose element scitype is Continuous; check the scitype with scitype(y).
      • w is the observation weights which can either be nothing(default) or an AbstractVector whoose element scitype is Count or Continuous. This is different from weights kernel which is an hyperparameter to the model, see below.

      Train the machine using fit!(mach, rows=...).

      Hyper-parameters

      • K::Int=5 : number of neighbors
      • algorithm::Symbol = :kdtree : one of (:kdtree, :brutetree, :balltree)
      • metric::Metric = Euclidean() : any Metric from Distances.jl for the distance between points. For algorithm = :kdtree only metrics which are instances of Union{Distances.Chebyshev, Distances.Cityblock, Distances.Euclidean, Distances.Minkowski, Distances.WeightedCityblock, Distances.WeightedEuclidean, Distances.WeightedMinkowski} are supported.
      • leafsize::Int = algorithm == 10 : determines the number of points at which to stop splitting the tree. This option is ignored and always taken as 0 for algorithm = :brutetree, since brutetree isn't actually a tree.
      • reorder::Bool = true : if true then points which are close in distance are placed close in memory. In this case, a copy of the original data will be made so that the original data is left unmodified. Setting this to true can significantly improve performance of the specified algorithm (except :brutetree). This option is ignored and always taken as false for algorithm = :brutetree.
      • weights::KNNKernel=Uniform() : kernel used in assigning weights to the k-nearest neighbors for each observation. An instance of one of the types in list_kernels(). User-defined weighting functions can be passed by wrapping the function in a UserDefinedKernel kernel (do ?NearestNeighborModels.UserDefinedKernel for more info). If observation weights w are passed during machine construction then the weight assigned to each neighbor vote is the product of the kernel generated weight for that neighbor and the corresponding observation weight.

      Operations

      • predict(mach, Xnew): Return predictions of the target given features Xnew, which should have same scitype as X above.

      Fitted parameters

      The fields of fitted_params(mach) are:

      • tree: An instance of either KDTree, BruteTree or BallTree depending on the value of the algorithm hyperparameter (See hyper-parameters section above). These are data structures that stores the training data with the view of making quicker nearest neighbor searches on test data points.

      Examples

      using MLJ
      +KNNRegressor = @load KNNRegressor pkg=NearestNeighborModels
      +X, y = @load_boston; ## loads the crabs dataset from MLJBase
      +## view possible kernels
      +NearestNeighborModels.list_kernels()
      +model = KNNRegressor(weights = NearestNeighborModels.Inverse()) #KNNRegressor instantiation
      +mach = machine(model, X, y) |> fit! ## wrap model and required data in an MLJ machine and fit
      +y_hat = predict(mach, X)
      +

      See also MultitargetKNNRegressor

      diff --git a/v0.20.3/models/KNeighborsClassifier_MLJScikitLearnInterface/index.html b/v0.20.3/models/KNeighborsClassifier_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..36066c7a0 --- /dev/null +++ b/v0.20.3/models/KNeighborsClassifier_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +KNeighborsClassifier · MLJ

      KNeighborsClassifier

      KNeighborsClassifier

      A model type for constructing a K-nearest neighbors classifier, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      KNeighborsClassifier = @load KNeighborsClassifier pkg=MLJScikitLearnInterface

      Do model = KNeighborsClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in KNeighborsClassifier(n_neighbors=...).

      Hyper-parameters

      • n_neighbors = 5
      • weights = uniform
      • algorithm = auto
      • leaf_size = 30
      • p = 2
      • metric = minkowski
      • metric_params = nothing
      • n_jobs = nothing
      diff --git a/v0.20.3/models/KNeighborsRegressor_MLJScikitLearnInterface/index.html b/v0.20.3/models/KNeighborsRegressor_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..121364314 --- /dev/null +++ b/v0.20.3/models/KNeighborsRegressor_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +KNeighborsRegressor · MLJ

      KNeighborsRegressor

      KNeighborsRegressor

      A model type for constructing a K-nearest neighbors regressor, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      KNeighborsRegressor = @load KNeighborsRegressor pkg=MLJScikitLearnInterface

      Do model = KNeighborsRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in KNeighborsRegressor(n_neighbors=...).

      Hyper-parameters

      • n_neighbors = 5
      • weights = uniform
      • algorithm = auto
      • leaf_size = 30
      • p = 2
      • metric = minkowski
      • metric_params = nothing
      • n_jobs = nothing
      diff --git a/v0.20.3/models/KPLSRegressor_PartialLeastSquaresRegressor/index.html b/v0.20.3/models/KPLSRegressor_PartialLeastSquaresRegressor/index.html new file mode 100644 index 000000000..11824162b --- /dev/null +++ b/v0.20.3/models/KPLSRegressor_PartialLeastSquaresRegressor/index.html @@ -0,0 +1,2 @@ + +KPLSRegressor · MLJ diff --git a/v0.20.3/models/KernelPCA_MultivariateStats/index.html b/v0.20.3/models/KernelPCA_MultivariateStats/index.html new file mode 100644 index 000000000..64ed024fc --- /dev/null +++ b/v0.20.3/models/KernelPCA_MultivariateStats/index.html @@ -0,0 +1,16 @@ + +KernelPCA · MLJ

      KernelPCA

      KernelPCA

      A model type for constructing a kernel prinicipal component analysis model, based on MultivariateStats.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      KernelPCA = @load KernelPCA pkg=MultivariateStats

      Do model = KernelPCA() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in KernelPCA(maxoutdim=...).

      In kernel PCA the linear operations of ordinary principal component analysis are performed in a reproducing Hilbert space.

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, X)

      Here:

      • X is any table of input features (eg, a DataFrame) whose columns are of scitype Continuous; check column scitypes with schema(X).

      Train the machine using fit!(mach, rows=...).

      Hyper-parameters

      • maxoutdim=0: Controls the the dimension (number of columns) of the output, outdim. Specifically, outdim = min(n, indim, maxoutdim), where n is the number of observations and indim the input dimension.
      • kernel::Function=(x,y)->x'y: The kernel function, takes in 2 vector arguments x and y, returns a scalar value. Defaults to the dot product of x and y.
      • solver::Symbol=:eig: solver to use for the eigenvalues, one of :eig(default, uses LinearAlgebra.eigen), :eigs(uses Arpack.eigs).
      • inverse::Bool=true: perform calculations needed for inverse transform
      • beta::Real=1.0: strength of the ridge regression that learns the inverse transform when inverse is true.
      • tol::Real=0.0: Convergence tolerance for eigenvalue solver.
      • maxiter::Int=300: maximum number of iterations for eigenvalue solver.

      Operations

      • transform(mach, Xnew): Return a lower dimensional projection of the input Xnew, which should have the same scitype as X above.
      • inverse_transform(mach, Xsmall): For a dimension-reduced table Xsmall, such as returned by transform, reconstruct a table, having same the number of columns as the original training data X, that transforms to Xsmall. Mathematically, inverse_transform is a right-inverse for the PCA projection map, whose image is orthogonal to the kernel of that map. In particular, if Xsmall = transform(mach, Xnew), then inverse_transform(Xsmall) is only an approximation to Xnew.

      Fitted parameters

      The fields of fitted_params(mach) are:

      • projection: Returns the projection matrix, which has size (indim, outdim), where indim and outdim are the number of features of the input and ouput respectively.

      Report

      The fields of report(mach) are:

      • indim: Dimension (number of columns) of the training data and new data to be transformed.
      • outdim: Dimension of transformed data.
      • principalvars: The variance of the principal components.

      Examples

      using MLJ
      +using LinearAlgebra
      +
      +KernelPCA = @load KernelPCA pkg=MultivariateStats
      +
      +X, y = @load_iris ## a table and a vector
      +
      +function rbf_kernel(length_scale)
      +    return (x,y) -> norm(x-y)^2 / ((2 * length_scale)^2)
      +end
      +
      +model = KernelPCA(maxoutdim=2, kernel=rbf_kernel(1))
      +mach = machine(model, X) |> fit!
      +
      +Xproj = transform(mach, X)

      See also PCA, ICA, FactorAnalysis, PPCA

      diff --git a/v0.20.3/models/KernelPerceptronClassifier_BetaML/index.html b/v0.20.3/models/KernelPerceptronClassifier_BetaML/index.html new file mode 100644 index 000000000..cbcf6f010 --- /dev/null +++ b/v0.20.3/models/KernelPerceptronClassifier_BetaML/index.html @@ -0,0 +1,29 @@ + +KernelPerceptronClassifier · MLJ

      KernelPerceptronClassifier

      mutable struct KernelPerceptronClassifier <: MLJModelInterface.Probabilistic

      The kernel perceptron algorithm using one-vs-one for multiclass, from the Beta Machine Learning Toolkit (BetaML).

      Hyperparameters:

      • kernel::Function: Kernel function to employ. See ?radial_kernel or ?polynomial_kernel (once loaded the BetaML package) for details or check ?BetaML.Utils to verify if other kernels are defined (you can alsways define your own kernel) [def: radial_kernel]
      • epochs::Int64: Maximum number of epochs, i.e. passages trough the whole training sample [def: 100]
      • initial_errors::Union{Nothing, Vector{Vector{Int64}}}: Initial distribution of the number of errors errors [def: nothing, i.e. zeros]. If provided, this should be a nModels-lenght vector of nRecords integer values vectors , where nModels is computed as (n_classes * (n_classes - 1)) / 2
      • shuffle::Bool: Whether to randomly shuffle the data at each iteration (epoch) [def: true]
      • rng::Random.AbstractRNG: A Random Number Generator to be used in stochastic parts of the code [deafult: Random.GLOBAL_RNG]

      Example:

      julia> using MLJ
      +
      +julia> X, y        = @load_iris;
      +
      +julia> modelType   = @load KernelPerceptronClassifier pkg = "BetaML"
      +[ Info: For silent loading, specify `verbosity=0`. 
      +import BetaML ✔
      +BetaML.Perceptron.KernelPerceptronClassifier
      +
      +julia> model       = modelType()
      +KernelPerceptronClassifier(
      +  kernel = BetaML.Utils.radial_kernel, 
      +  epochs = 100, 
      +  initial_errors = nothing, 
      +  shuffle = true, 
      +  rng = Random._GLOBAL_RNG())
      +
      +julia> mach        = machine(model, X, y);
      +
      +julia> fit!(mach);
      +
      +julia> est_classes = predict(mach, X)
      +150-element CategoricalDistributions.UnivariateFiniteVector{Multiclass{3}, String, UInt8, Float64}:
      + UnivariateFinite{Multiclass{3}}(setosa=>0.665, versicolor=>0.245, virginica=>0.09)
      + UnivariateFinite{Multiclass{3}}(setosa=>0.665, versicolor=>0.245, virginica=>0.09)
      + ⋮
      + UnivariateFinite{Multiclass{3}}(setosa=>0.09, versicolor=>0.245, virginica=>0.665)
      + UnivariateFinite{Multiclass{3}}(setosa=>0.09, versicolor=>0.665, virginica=>0.245)
      diff --git a/v0.20.3/models/LADRegressor_MLJLinearModels/index.html b/v0.20.3/models/LADRegressor_MLJLinearModels/index.html new file mode 100644 index 000000000..8f293e80f --- /dev/null +++ b/v0.20.3/models/LADRegressor_MLJLinearModels/index.html @@ -0,0 +1,6 @@ + +LADRegressor · MLJ

      LADRegressor

      LADRegressor

      A model type for constructing a lad regressor, based on MLJLinearModels.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      LADRegressor = @load LADRegressor pkg=MLJLinearModels

      Do model = LADRegressor() to construct an instance with default hyper-parameters.

      Least absolute deviation regression is a linear model with objective function

      $

      ∑ρ(Xθ - y) + n⋅λ|θ|₂² + n⋅γ|θ|₁ $

      where $ρ$ is the absolute loss and $n$ is the number of observations.

      If scale_penalty_with_samples = false the objective function is instead

      $

      ∑ρ(Xθ - y) + λ|θ|₂² + γ|θ|₁ $

      .

      Different solver options exist, as indicated under "Hyperparameters" below.

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, X, y)

      where:

      • X is any table of input features (eg, a DataFrame) whose columns have Continuous scitype; check column scitypes with schema(X)
      • y is the target, which can be any AbstractVector whose element scitype is Continuous; check the scitype with scitype(y)

      Train the machine using fit!(mach, rows=...).

      Hyperparameters

      See also RobustRegressor.

      Parameters

      • lambda::Real: strength of the regularizer if penalty is :l2 or :l1. Strength of the L2 regularizer if penalty is :en. Default: 1.0

      • gamma::Real: strength of the L1 regularizer if penalty is :en. Default: 0.0

      • penalty::Union{String, Symbol}: the penalty to use, either :l2, :l1, :en (elastic net) or :none. Default: :l2

      • fit_intercept::Bool: whether to fit the intercept or not. Default: true

      • penalize_intercept::Bool: whether to penalize the intercept. Default: false

      • scale_penalty_with_samples::Bool: whether to scale the penalty with the number of observations. Default: true

      • solver::Union{Nothing, MLJLinearModels.Solver}: some instance of MLJLinearModels.S where S is one of: LBFGS, IWLSCG, if penalty = :l2, and ProxGrad otherwise.

        If solver = nothing (default) then LBFGS() is used, if penalty = :l2, and otherwise ProxGrad(accel=true) (FISTA) is used.

        Solver aliases: FISTA(; kwargs...) = ProxGrad(accel=true, kwargs...), ISTA(; kwargs...) = ProxGrad(accel=false, kwargs...) Default: nothing

      Example

      using MLJ
      +X, y = make_regression()
      +mach = fit!(machine(LADRegressor(), X, y))
      +predict(mach, X)
      +fitted_params(mach)
      diff --git a/v0.20.3/models/LDA_MultivariateStats/index.html b/v0.20.3/models/LDA_MultivariateStats/index.html new file mode 100644 index 000000000..00e66c507 --- /dev/null +++ b/v0.20.3/models/LDA_MultivariateStats/index.html @@ -0,0 +1,14 @@ + +LDA · MLJ

      LDA

      LDA

      A model type for constructing a linear discriminant analysis model, based on MultivariateStats.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      LDA = @load LDA pkg=MultivariateStats

      Do model = LDA() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in LDA(method=...).

      Multiclass linear discriminant analysis learns a projection in a space of features to a lower dimensional space, in a way that attempts to preserve as much as possible the degree to which the classes of a discrete target variable can be discriminated. This can be used either for dimension reduction of the features (see transform below) or for probabilistic classification of the target (see predict below).

      In the case of prediction, the class probability for a new observation reflects the proximity of that observation to training observations associated with that class, and how far away the observation is from observations associated with other classes. Specifically, the distances, in the transformed (projected) space, of a new observation, from the centroid of each target class, is computed; the resulting vector of distances, multiplied by minus one, is passed to a softmax function to obtain a class probability prediction. Here "distance" is computed using a user-specified distance function.

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, X, y)

      Here:

      • X is any table of input features (eg, a DataFrame) whose columns are of scitype Continuous; check column scitypes with schema(X).
      • y is the target, which can be any AbstractVector whose element scitype is OrderedFactor or Multiclass; check the scitype with scitype(y)

      Train the machine using fit!(mach, rows=...).

      Hyper-parameters

      • method::Symbol=:gevd: The solver, one of :gevd or :whiten methods.
      • cov_w::StatsBase.SimpleCovariance(): An estimator for the within-class covariance (used in computing the within-class scatter matrix, Sw). Any robust estimator from CovarianceEstimation.jl can be used.
      • cov_b::StatsBase.SimpleCovariance(): The same as cov_w but for the between-class covariance (used in computing the between-class scatter matrix, Sb).
      • outdim::Int=0: The output dimension, i.e dimension of the transformed space, automatically set to min(indim, nclasses-1) if equal to 0.
      • regcoef::Float64=1e-6: The regularization coefficient. A positive value regcoef*eigmax(Sw) where Sw is the within-class scatter matrix, is added to the diagonal of Sw to improve numerical stability. This can be useful if using the standard covariance estimator.
      • dist=Distances.SqEuclidean(): The distance metric to use when performing classification (to compare the distance between a new point and centroids in the transformed space); must be a subtype of Distances.SemiMetric from Distances.jl, e.g., Distances.CosineDist.

      Operations

      • transform(mach, Xnew): Return a lower dimensional projection of the input Xnew, which should have the same scitype as X above.
      • predict(mach, Xnew): Return predictions of the target given features Xnew having the same scitype as X above. Predictions are probabilistic but uncalibrated.
      • predict_mode(mach, Xnew): Return the modes of the probabilistic predictions returned above.

      Fitted parameters

      The fields of fitted_params(mach) are:

      • classes: The classes seen during model fitting.
      • projection_matrix: The learned projection matrix, of size (indim, outdim), where indim and outdim are the input and output dimensions respectively (See Report section below).

      Report

      The fields of report(mach) are:

      • indim: The dimension of the input space i.e the number of training features.
      • outdim: The dimension of the transformed space the model is projected to.
      • mean: The mean of the untransformed training data. A vector of length indim.
      • nclasses: The number of classes directly observed in the training data (which can be less than the total number of classes in the class pool).
      • class_means: The class-specific means of the training data. A matrix of size (indim, nclasses) with the ith column being the class-mean of the ith class in classes (See fitted params section above).
      • class_weights: The weights (class counts) of each class. A vector of length nclasses with the ith element being the class weight of the ith class in classes. (See fitted params section above.)
      • Sb: The between class scatter matrix.
      • Sw: The within class scatter matrix.

      Examples

      using MLJ
      +
      +LDA = @load LDA pkg=MultivariateStats
      +
      +X, y = @load_iris ## a table and a vector
      +
      +model = LDA()
      +mach = machine(model, X, y) |> fit!
      +
      +Xproj = transform(mach, X)
      +y_hat = predict(mach, X)
      +labels = predict_mode(mach, X)
      +

      See also BayesianLDA, SubspaceLDA, BayesianSubspaceLDA

      diff --git a/v0.20.3/models/LGBMClassifier_LightGBM/index.html b/v0.20.3/models/LGBMClassifier_LightGBM/index.html new file mode 100644 index 000000000..0a0de5c72 --- /dev/null +++ b/v0.20.3/models/LGBMClassifier_LightGBM/index.html @@ -0,0 +1,2 @@ + +LGBMClassifier · MLJ diff --git a/v0.20.3/models/LGBMRegressor_LightGBM/index.html b/v0.20.3/models/LGBMRegressor_LightGBM/index.html new file mode 100644 index 000000000..ef00fa735 --- /dev/null +++ b/v0.20.3/models/LGBMRegressor_LightGBM/index.html @@ -0,0 +1,2 @@ + +LGBMRegressor · MLJ diff --git a/v0.20.3/models/LMDDDetector_OutlierDetectionPython/index.html b/v0.20.3/models/LMDDDetector_OutlierDetectionPython/index.html new file mode 100644 index 000000000..45f5e6476 --- /dev/null +++ b/v0.20.3/models/LMDDDetector_OutlierDetectionPython/index.html @@ -0,0 +1,4 @@ + +LMDDDetector · MLJ diff --git a/v0.20.3/models/LOCIDetector_OutlierDetectionPython/index.html b/v0.20.3/models/LOCIDetector_OutlierDetectionPython/index.html new file mode 100644 index 000000000..47bc3261b --- /dev/null +++ b/v0.20.3/models/LOCIDetector_OutlierDetectionPython/index.html @@ -0,0 +1,3 @@ + +LOCIDetector · MLJ diff --git a/v0.20.3/models/LODADetector_OutlierDetectionPython/index.html b/v0.20.3/models/LODADetector_OutlierDetectionPython/index.html new file mode 100644 index 000000000..450907e0c --- /dev/null +++ b/v0.20.3/models/LODADetector_OutlierDetectionPython/index.html @@ -0,0 +1,3 @@ + +LODADetector · MLJ diff --git a/v0.20.3/models/LOFDetector_OutlierDetectionNeighbors/index.html b/v0.20.3/models/LOFDetector_OutlierDetectionNeighbors/index.html new file mode 100644 index 000000000..6b023d5da --- /dev/null +++ b/v0.20.3/models/LOFDetector_OutlierDetectionNeighbors/index.html @@ -0,0 +1,11 @@ + +LOFDetector · MLJ

      LOFDetector

      LOFDetector(k = 5,
      +            metric = Euclidean(),
      +            algorithm = :kdtree,
      +            leafsize = 10,
      +            reorder = true,
      +            parallel = false)

      Calculate an anomaly score based on the density of an instance in comparison to its neighbors. This algorithm introduced the notion of local outliers and was developed by Breunig et al., see [1].

      Parameters

      k::Integer

      Number of neighbors (must be greater than 0).

      metric::Metric

      This is one of the Metric types defined in the Distances.jl package. It is possible to define your own metrics by creating new types that are subtypes of Metric.

      algorithm::Symbol

      One of (:kdtree, :balltree). In a kdtree, points are recursively split into groups using hyper-planes. Therefore a KDTree only works with axis aligned metrics which are: Euclidean, Chebyshev, Minkowski and Cityblock. A brutetree linearly searches all points in a brute force fashion and works with any Metric. A balltree recursively splits points into groups bounded by hyper-spheres and works with any Metric.

      static::Union{Bool, Symbol}

      One of (true, false, :auto). Whether the input data for fitting and transform should be statically or dynamically allocated. If true, the data is statically allocated. If false, the data is dynamically allocated. If :auto, the data is dynamically allocated if the product of all dimensions except the last is greater than 100.

      leafsize::Int

      Determines at what number of points to stop splitting the tree further. There is a trade-off between traversing the tree and having to evaluate the metric function for increasing number of points.

      reorder::Bool

      While building the tree this will put points close in distance close in memory since this helps with cache locality. In this case, a copy of the original data will be made so that the original data is left unmodified. This can have a significant impact on performance and is by default set to true.

      parallel::Bool

      Parallelize score and predict using all threads available. The number of threads can be set with the JULIA_NUM_THREADS environment variable. Note: fit is not parallel.

      Examples

      using OutlierDetection: LOFDetector, fit, transform
      +detector = LOFDetector()
      +X = rand(10, 100)
      +model, result = fit(detector, X; verbosity=0)
      +test_scores = transform(detector, model, X)

      References

      [1] Breunig, Markus M.; Kriegel, Hans-Peter; Ng, Raymond T.; Sander, Jörg (2000): LOF: Identifying Density-Based Local Outliers.

      diff --git a/v0.20.3/models/LOFDetector_OutlierDetectionPython/index.html b/v0.20.3/models/LOFDetector_OutlierDetectionPython/index.html new file mode 100644 index 000000000..004b481ac --- /dev/null +++ b/v0.20.3/models/LOFDetector_OutlierDetectionPython/index.html @@ -0,0 +1,9 @@ + +LOFDetector · MLJ diff --git a/v0.20.3/models/LarsCVRegressor_MLJScikitLearnInterface/index.html b/v0.20.3/models/LarsCVRegressor_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..018439067 --- /dev/null +++ b/v0.20.3/models/LarsCVRegressor_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +LarsCVRegressor · MLJ

      LarsCVRegressor

      LarsCVRegressor

      A model type for constructing a least angle regressor with built-in cross-validation, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      LarsCVRegressor = @load LarsCVRegressor pkg=MLJScikitLearnInterface

      Do model = LarsCVRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in LarsCVRegressor(fit_intercept=...).

      Hyper-parameters

      • fit_intercept = true
      • verbose = false
      • max_iter = 500
      • normalize = false
      • precompute = auto
      • cv = 5
      • max_n_alphas = 1000
      • n_jobs = nothing
      • eps = 2.220446049250313e-16
      • copy_X = true
      diff --git a/v0.20.3/models/LarsRegressor_MLJScikitLearnInterface/index.html b/v0.20.3/models/LarsRegressor_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..74c53b019 --- /dev/null +++ b/v0.20.3/models/LarsRegressor_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +LarsRegressor · MLJ

      LarsRegressor

      LarsRegressor

      A model type for constructing a least angle regressor (LARS), based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      LarsRegressor = @load LarsRegressor pkg=MLJScikitLearnInterface

      Do model = LarsRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in LarsRegressor(fit_intercept=...).

      Hyper-parameters

      • fit_intercept = true
      • verbose = false
      • normalize = false
      • precompute = auto
      • n_nonzero_coefs = 500
      • eps = 2.220446049250313e-16
      • copy_X = true
      • fit_path = true
      diff --git a/v0.20.3/models/LassoCVRegressor_MLJScikitLearnInterface/index.html b/v0.20.3/models/LassoCVRegressor_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..b01db62cd --- /dev/null +++ b/v0.20.3/models/LassoCVRegressor_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +LassoCVRegressor · MLJ

      LassoCVRegressor

      LassoCVRegressor

      A model type for constructing a lasso regressor with built-in cross-validation, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      LassoCVRegressor = @load LassoCVRegressor pkg=MLJScikitLearnInterface

      Do model = LassoCVRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in LassoCVRegressor(eps=...).

      Hyper-parameters

      • eps = 0.001
      • n_alphas = 100
      • alphas = nothing
      • fit_intercept = true
      • precompute = auto
      • max_iter = 1000
      • tol = 0.0001
      • copy_X = true
      • cv = 5
      • verbose = false
      • n_jobs = nothing
      • positive = false
      • random_state = nothing
      • selection = cyclic
      diff --git a/v0.20.3/models/LassoLarsCVRegressor_MLJScikitLearnInterface/index.html b/v0.20.3/models/LassoLarsCVRegressor_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..718eefc74 --- /dev/null +++ b/v0.20.3/models/LassoLarsCVRegressor_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +LassoLarsCVRegressor · MLJ

      LassoLarsCVRegressor

      LassoLarsCVRegressor

      A model type for constructing a Lasso model fit with least angle regression (LARS) with built-in cross-validation, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      LassoLarsCVRegressor = @load LassoLarsCVRegressor pkg=MLJScikitLearnInterface

      Do model = LassoLarsCVRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in LassoLarsCVRegressor(fit_intercept=...).

      Hyper-parameters

      • fit_intercept = true
      • verbose = false
      • max_iter = 500
      • normalize = false
      • precompute = auto
      • cv = 5
      • max_n_alphas = 1000
      • n_jobs = nothing
      • eps = 2.220446049250313e-16
      • copy_X = true
      • positive = false
      diff --git a/v0.20.3/models/LassoLarsICRegressor_MLJScikitLearnInterface/index.html b/v0.20.3/models/LassoLarsICRegressor_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..df6ac0713 --- /dev/null +++ b/v0.20.3/models/LassoLarsICRegressor_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +LassoLarsICRegressor · MLJ

      LassoLarsICRegressor

      LassoLarsICRegressor

      A model type for constructing a Lasso model with LARS using BIC or AIC for model selection, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      LassoLarsICRegressor = @load LassoLarsICRegressor pkg=MLJScikitLearnInterface

      Do model = LassoLarsICRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in LassoLarsICRegressor(criterion=...).

      Hyper-parameters

      • criterion = aic
      • fit_intercept = true
      • verbose = false
      • normalize = false
      • precompute = auto
      • max_iter = 500
      • eps = 2.220446049250313e-16
      • copy_X = true
      • positive = false
      diff --git a/v0.20.3/models/LassoLarsRegressor_MLJScikitLearnInterface/index.html b/v0.20.3/models/LassoLarsRegressor_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..63e2be2b8 --- /dev/null +++ b/v0.20.3/models/LassoLarsRegressor_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +LassoLarsRegressor · MLJ

      LassoLarsRegressor

      LassoLarsRegressor

      A model type for constructing a Lasso model fit with least angle regression (LARS), based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      LassoLarsRegressor = @load LassoLarsRegressor pkg=MLJScikitLearnInterface

      Do model = LassoLarsRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in LassoLarsRegressor(alpha=...).

      Hyper-parameters

      • alpha = 1.0
      • fit_intercept = true
      • verbose = false
      • normalize = false
      • precompute = auto
      • max_iter = 500
      • eps = 2.220446049250313e-16
      • copy_X = true
      • fit_path = true
      • positive = false
      diff --git a/v0.20.3/models/LassoRegressor_MLJLinearModels/index.html b/v0.20.3/models/LassoRegressor_MLJLinearModels/index.html new file mode 100644 index 000000000..d0b921b5d --- /dev/null +++ b/v0.20.3/models/LassoRegressor_MLJLinearModels/index.html @@ -0,0 +1,6 @@ + +LassoRegressor · MLJ

      LassoRegressor

      LassoRegressor

      A model type for constructing a lasso regressor, based on MLJLinearModels.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      LassoRegressor = @load LassoRegressor pkg=MLJLinearModels

      Do model = LassoRegressor() to construct an instance with default hyper-parameters.

      Lasso regression is a linear model with objective function

      $

      |Xθ - y|₂²/2 + n⋅λ|θ|₁ $

      where $n$ is the number of observations.

      If scale_penalty_with_samples = false the objective function is

      $

      |Xθ - y|₂²/2 + λ|θ|₁ $

      .

      Different solver options exist, as indicated under "Hyperparameters" below.

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, X, y)

      where:

      • X is any table of input features (eg, a DataFrame) whose columns have Continuous scitype; check column scitypes with schema(X)
      • y is the target, which can be any AbstractVector whose element scitype is Continuous; check the scitype with scitype(y)

      Train the machine using fit!(mach, rows=...).

      Hyperparameters

      • lambda::Real: strength of the L1 regularization. Default: 1.0
      • fit_intercept::Bool: whether to fit the intercept or not. Default: true
      • penalize_intercept::Bool: whether to penalize the intercept. Default: false
      • scale_penalty_with_samples::Bool: whether to scale the penalty with the number of observations. Default: true
      • solver::Union{Nothing, MLJLinearModels.Solver}: any instance of MLJLinearModels.ProxGrad. If solver=nothing (default) then ProxGrad(accel=true) (FISTA) is used. Solver aliases: FISTA(; kwargs...) = ProxGrad(accel=true, kwargs...), ISTA(; kwargs...) = ProxGrad(accel=false, kwargs...). Default: nothing

      Example

      using MLJ
      +X, y = make_regression()
      +mach = fit!(machine(LassoRegressor(), X, y))
      +predict(mach, X)
      +fitted_params(mach)

      See also ElasticNetRegressor.

      diff --git a/v0.20.3/models/LassoRegressor_MLJScikitLearnInterface/index.html b/v0.20.3/models/LassoRegressor_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..f8bd6ea6f --- /dev/null +++ b/v0.20.3/models/LassoRegressor_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +LassoRegressor · MLJ

      LassoRegressor

      LassoRegressor

      A model type for constructing a lasso regressor, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      LassoRegressor = @load LassoRegressor pkg=MLJScikitLearnInterface

      Do model = LassoRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in LassoRegressor(alpha=...).

      Hyper-parameters

      • alpha = 1.0
      • fit_intercept = true
      • precompute = false
      • copy_X = true
      • max_iter = 1000
      • tol = 0.0001
      • warm_start = false
      • positive = false
      • random_state = nothing
      • selection = cyclic
      diff --git a/v0.20.3/models/LinearBinaryClassifier_GLM/index.html b/v0.20.3/models/LinearBinaryClassifier_GLM/index.html new file mode 100644 index 000000000..d1e919b59 --- /dev/null +++ b/v0.20.3/models/LinearBinaryClassifier_GLM/index.html @@ -0,0 +1,28 @@ + +LinearBinaryClassifier · MLJ

      LinearBinaryClassifier

      LinearBinaryClassifier

      A model type for constructing a linear binary classifier, based on GLM.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      LinearBinaryClassifier = @load LinearBinaryClassifier pkg=GLM

      Do model = LinearBinaryClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in LinearBinaryClassifier(fit_intercept=...).

      LinearBinaryClassifier is a generalized linear model, specialised to the case of a binary target variable, with a user-specified link function. Options exist to specify an intercept or offset feature.

      Training data

      In MLJ or MLJBase, bind an instance model to data with one of:

      mach = machine(model, X, y)
      +mach = machine(model, X, y, w)

      Here

      • X: is any table of input features (eg, a DataFrame) whose columns are of scitype Continuous; check the scitype with schema(X)
      • y: is the target, which can be any AbstractVector whose element scitype is <:OrderedFactor(2) or <:Multiclass(2); check the scitype with schema(y)
      • w: is a vector of Real per-observation weights

      Train the machine using fit!(mach, rows=...).

      Hyper-parameters

      • fit_intercept=true: Whether to calculate the intercept for this model. If set to false, no intercept will be calculated (e.g. the data is expected to be centered)
      • link=GLM.LogitLink: The function which links the linear prediction function to the probability of a particular outcome or class. This must have type GLM.Link01. Options include GLM.LogitLink(), GLM.ProbitLink(), CloglogLink(),CauchitLink()`.
      • offsetcol=nothing: Name of the column to be used as an offset, if any. An offset is a variable which is known to have a coefficient of 1.
      • maxiter::Integer=30: The maximum number of iterations allowed to achieve convergence.
      • atol::Real=1e-6: Absolute threshold for convergence. Convergence is achieved when the relative change in deviance is less than `max(rtol*dev, atol). This term exists to avoid failure when deviance is unchanged except for rounding errors.
      • rtol::Real=1e-6: Relative threshold for convergence. Convergence is achieved when the relative change in deviance is less than `max(rtol*dev, atol). This term exists to avoid failure when deviance is unchanged except for rounding errors.
      • minstepfac::Real=0.001: Minimum step fraction. Must be between 0 and 1. Lower bound for the factor used to update the linear fit.
      • report_keys: Vector of keys for the report. Possible keys are: :deviance, :dof_residual, :stderror, :vcov, :coef_table and :glm_model. By default only :glm_model is excluded.

      Operations

      • predict(mach, Xnew): Return predictions of the target given features Xnew having the same scitype as X above. Predictions are probabilistic.
      • predict_mode(mach, Xnew): Return the modes of the probabilistic predictions returned above.

      Fitted parameters

      The fields of fitted_params(mach) are:

      • features: The names of the features used during model fitting.
      • coef: The linear coefficients determined by the model.
      • intercept: The intercept determined by the model.

      Report

      The fields of report(mach) are:

      • deviance: Measure of deviance of fitted model with respect to a perfectly fitted model. For a linear model, this is the weighted residual sum of squares
      • dof_residual: The degrees of freedom for residuals, when meaningful.
      • stderror: The standard errors of the coefficients.
      • vcov: The estimated variance-covariance matrix of the coefficient estimates.
      • coef_table: Table which displays coefficients and summarizes their significance and confidence intervals.
      • glm_model: The raw fitted model returned by GLM.lm. Note this points to training data. Refer to the GLM.jl documentation for usage.

      Examples

      using MLJ
      +import GLM ## namespace must be available
      +
      +LinearBinaryClassifier = @load LinearBinaryClassifier pkg=GLM
      +clf = LinearBinaryClassifier(fit_intercept=false, link=GLM.ProbitLink())
      +
      +X, y = @load_crabs
      +
      +mach = machine(clf, X, y) |> fit!
      +
      +Xnew = (;FL = [8.1, 24.8, 7.2],
      +        RW = [5.1, 25.7, 6.4],
      +        CL = [15.9, 46.7, 14.3],
      +        CW = [18.7, 59.7, 12.2],
      +        BD = [6.2, 23.6, 8.4],)
      +
      +yhat = predict(mach, Xnew) ## probabilistic predictions
      +pdf(yhat, levels(y)) ## probability matrix
      +p_B = pdf.(yhat, "B")
      +class_labels = predict_mode(mach, Xnew)
      +
      +fitted_params(mach).features
      +fitted_params(mach).coef
      +fitted_params(mach).intercept
      +
      +report(mach)

      See also LinearRegressor, LinearCountRegressor

      diff --git a/v0.20.3/models/LinearCountRegressor_GLM/index.html b/v0.20.3/models/LinearCountRegressor_GLM/index.html new file mode 100644 index 000000000..cb498cb55 --- /dev/null +++ b/v0.20.3/models/LinearCountRegressor_GLM/index.html @@ -0,0 +1,34 @@ + +LinearCountRegressor · MLJ

      LinearCountRegressor

      LinearCountRegressor

      A model type for constructing a linear count regressor, based on GLM.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      LinearCountRegressor = @load LinearCountRegressor pkg=GLM

      Do model = LinearCountRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in LinearCountRegressor(fit_intercept=...).

      LinearCountRegressor is a generalized linear model, specialised to the case of a Count target variable (non-negative, unbounded integer) with user-specified link function. Options exist to specify an intercept or offset feature.

      Training data

      In MLJ or MLJBase, bind an instance model to data with one of:

      mach = machine(model, X, y)
      +mach = machine(model, X, y, w)

      Here

      • X: is any table of input features (eg, a DataFrame) whose columns are of scitype Continuous; check the scitype with schema(X)
      • y: is the target, which can be any AbstractVector whose element scitype is Count; check the scitype with schema(y)
      • w: is a vector of Real per-observation weights

      Train the machine using fit!(mach, rows=...).

      Hyper-parameters

      • fit_intercept=true: Whether to calculate the intercept for this model. If set to false, no intercept will be calculated (e.g. the data is expected to be centered)
      • distribution=Distributions.Poisson(): The distribution which the residuals/errors of the model should fit.
      • link=GLM.LogLink(): The function which links the linear prediction function to the probability of a particular outcome or class. This should be one of the following: GLM.IdentityLink(), GLM.InverseLink(), GLM.InverseSquareLink(), GLM.LogLink(), GLM.SqrtLink().
      • offsetcol=nothing: Name of the column to be used as an offset, if any. An offset is a variable which is known to have a coefficient of 1.
      • maxiter::Integer=30: The maximum number of iterations allowed to achieve convergence.
      • atol::Real=1e-6: Absolute threshold for convergence. Convergence is achieved when the relative change in deviance is less than `max(rtol*dev, atol). This term exists to avoid failure when deviance is unchanged except for rounding errors.
      • rtol::Real=1e-6: Relative threshold for convergence. Convergence is achieved when the relative change in deviance is less than `max(rtol*dev, atol). This term exists to avoid failure when deviance is unchanged except for rounding errors.
      • minstepfac::Real=0.001: Minimum step fraction. Must be between 0 and 1. Lower bound for the factor used to update the linear fit.
      • report_keys: Vector of keys for the report. Possible keys are: :deviance, :dof_residual, :stderror, :vcov, :coef_table and :glm_model. By default only :glm_model is excluded.

      Operations

      • predict(mach, Xnew): return predictions of the target given new features Xnew having the same Scitype as X above. Predictions are probabilistic.
      • predict_mean(mach, Xnew): instead return the mean of each prediction above
      • predict_median(mach, Xnew): instead return the median of each prediction above.

      Fitted parameters

      The fields of fitted_params(mach) are:

      • features: The names of the features encountered during model fitting.
      • coef: The linear coefficients determined by the model.
      • intercept: The intercept determined by the model.

      Report

      The fields of report(mach) are:

      • deviance: Measure of deviance of fitted model with respect to a perfectly fitted model. For a linear model, this is the weighted residual sum of squares
      • dof_residual: The degrees of freedom for residuals, when meaningful.
      • stderror: The standard errors of the coefficients.
      • vcov: The estimated variance-covariance matrix of the coefficient estimates.
      • coef_table: Table which displays coefficients and summarizes their significance and confidence intervals.
      • glm_model: The raw fitted model returned by GLM.lm. Note this points to training data. Refer to the GLM.jl documentation for usage.

      Examples

      using MLJ
      +import MLJ.Distributions.Poisson
      +
      +## Generate some data whose target y looks Poisson when conditioned on
      +## X:
      +N = 10_000
      +w = [1.0, -2.0, 3.0]
      +mu(x) = exp(w'x) ## mean for a log link function
      +Xmat = rand(N, 3)
      +X = MLJ.table(Xmat)
      +y = map(1:N) do i
      +    x = Xmat[i, :]
      +    rand(Poisson(mu(x)))
      +end;
      +
      +CountRegressor = @load LinearCountRegressor pkg=GLM
      +model = CountRegressor(fit_intercept=false)
      +mach = machine(model, X, y)
      +fit!(mach)
      +
      +Xnew = MLJ.table(rand(3, 3))
      +yhat = predict(mach, Xnew)
      +yhat_point = predict_mean(mach, Xnew)
      +
      +## get coefficients approximating `w`:
      +julia> fitted_params(mach).coef
      +3-element Vector{Float64}:
      +  0.9969008753103842
      + -2.0255901752504775
      +  3.014407534033522
      +
      +report(mach)

      See also LinearRegressor, LinearBinaryClassifier

      diff --git a/v0.20.3/models/LinearRegressor_GLM/index.html b/v0.20.3/models/LinearRegressor_GLM/index.html new file mode 100644 index 000000000..a79f1ef71 --- /dev/null +++ b/v0.20.3/models/LinearRegressor_GLM/index.html @@ -0,0 +1,18 @@ + +LinearRegressor · MLJ

      LinearRegressor

      LinearRegressor

      A model type for constructing a linear regressor, based on GLM.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      LinearRegressor = @load LinearRegressor pkg=GLM

      Do model = LinearRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in LinearRegressor(fit_intercept=...).

      LinearRegressor assumes the target is a continuous variable whose conditional distribution is normal with constant variance, and whose expected value is a linear combination of the features (identity link function). Options exist to specify an intercept or offset feature.

      Training data

      In MLJ or MLJBase, bind an instance model to data with one of:

      mach = machine(model, X, y)
      +mach = machine(model, X, y, w)

      Here

      • X: is any table of input features (eg, a DataFrame) whose columns are of scitype Continuous; check the scitype with schema(X)
      • y: is the target, which can be any AbstractVector whose element scitype is Continuous; check the scitype with scitype(y)
      • w: is a vector of Real per-observation weights

      Hyper-parameters

      • fit_intercept=true: Whether to calculate the intercept for this model. If set to false, no intercept will be calculated (e.g. the data is expected to be centered)
      • dropcollinear=false: Whether to drop features in the training data to ensure linear independence. If true , only the first of each set of linearly-dependent features is used. The coefficient for redundant linearly dependent features is 0.0 and all associated statistics are set to NaN.
      • offsetcol=nothing: Name of the column to be used as an offset, if any. An offset is a variable which is known to have a coefficient of 1.
      • report_keys: Vector of keys for the report. Possible keys are: :deviance, :dof_residual, :stderror, :vcov, :coef_table and :glm_model. By default only :glm_model is excluded.

      Train the machine using fit!(mach, rows=...).

      Operations

      • predict(mach, Xnew): return predictions of the target given new features Xnew having the same Scitype as X above. Predictions are probabilistic.
      • predict_mean(mach, Xnew): instead return the mean of each prediction above
      • predict_median(mach, Xnew): instead return the median of each prediction above.

      Fitted parameters

      The fields of fitted_params(mach) are:

      • features: The names of the features encountered during model fitting.
      • coef: The linear coefficients determined by the model.
      • intercept: The intercept determined by the model.

      Report

      When all keys are enabled in report_keys, the following fields are available in report(mach):

      • deviance: Measure of deviance of fitted model with respect to a perfectly fitted model. For a linear model, this is the weighted residual sum of squares
      • dof_residual: The degrees of freedom for residuals, when meaningful.
      • stderror: The standard errors of the coefficients.
      • vcov: The estimated variance-covariance matrix of the coefficient estimates.
      • coef_table: Table which displays coefficients and summarizes their significance and confidence intervals.
      • glm_model: The raw fitted model returned by GLM.lm. Note this points to training data. Refer to the GLM.jl documentation for usage.

      Examples

      using MLJ
      +LinearRegressor = @load LinearRegressor pkg=GLM
      +glm = LinearRegressor()
      +
      +X, y = make_regression(100, 2) ## synthetic data
      +mach = machine(glm, X, y) |> fit!
      +
      +Xnew, _ = make_regression(3, 2)
      +yhat = predict(mach, Xnew) ## new predictions
      +yhat_point = predict_mean(mach, Xnew) ## new predictions
      +
      +fitted_params(mach).features
      +fitted_params(mach).coef ## x1, x2, intercept
      +fitted_params(mach).intercept
      +
      +report(mach)

      See also LinearCountRegressor, LinearBinaryClassifier

      diff --git a/v0.20.3/models/LinearRegressor_MLJLinearModels/index.html b/v0.20.3/models/LinearRegressor_MLJLinearModels/index.html new file mode 100644 index 000000000..832e243f4 --- /dev/null +++ b/v0.20.3/models/LinearRegressor_MLJLinearModels/index.html @@ -0,0 +1,6 @@ + +LinearRegressor · MLJ

      LinearRegressor

      LinearRegressor

      A model type for constructing a linear regressor, based on MLJLinearModels.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      LinearRegressor = @load LinearRegressor pkg=MLJLinearModels

      Do model = LinearRegressor() to construct an instance with default hyper-parameters.

      This model provides standard linear regression with objective function

      $

      |Xθ - y|₂²/2 $

      Different solver options exist, as indicated under "Hyperparameters" below.

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, X, y)

      where:

      • X is any table of input features (eg, a DataFrame) whose columns have Continuous scitype; check column scitypes with schema(X)
      • y is the target, which can be any AbstractVector whose element scitype is Continuous; check the scitype with scitype(y)

      Train the machine using fit!(mach, rows=...).

      Hyperparameters

      • fit_intercept::Bool: whether to fit the intercept or not. Default: true

      • solver::Union{Nothing, MLJLinearModels.Solver}: "any instance of MLJLinearModels.Analytical. Use Analytical() for Cholesky and CG()=Analytical(iterative=true) for conjugate-gradient.

        If solver = nothing (default) then Analytical() is used. Default: nothing

      Example

      using MLJ
      +X, y = make_regression()
      +mach = fit!(machine(LinearRegressor(), X, y))
      +predict(mach, X)
      +fitted_params(mach)
      diff --git a/v0.20.3/models/LinearRegressor_MLJScikitLearnInterface/index.html b/v0.20.3/models/LinearRegressor_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..3495c2a51 --- /dev/null +++ b/v0.20.3/models/LinearRegressor_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +LinearRegressor · MLJ

      LinearRegressor

      LinearRegressor

      A model type for constructing a ordinary least-squares regressor (OLS), based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      LinearRegressor = @load LinearRegressor pkg=MLJScikitLearnInterface

      Do model = LinearRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in LinearRegressor(fit_intercept=...).

      Hyper-parameters

      • fit_intercept = true
      • copy_X = true
      • n_jobs = nothing
      diff --git a/v0.20.3/models/LinearRegressor_MultivariateStats/index.html b/v0.20.3/models/LinearRegressor_MultivariateStats/index.html new file mode 100644 index 000000000..afb062dbd --- /dev/null +++ b/v0.20.3/models/LinearRegressor_MultivariateStats/index.html @@ -0,0 +1,11 @@ + +LinearRegressor · MLJ

      LinearRegressor

      LinearRegressor

      A model type for constructing a linear regressor, based on MultivariateStats.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      LinearRegressor = @load LinearRegressor pkg=MultivariateStats

      Do model = LinearRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in LinearRegressor(bias=...).

      LinearRegressor assumes the target is a Continuous variable and trains a linear prediction function using the least squares algorithm. Options exist to specify a bias term.

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, X, y)

      Here:

      • X is any table of input features (eg, a DataFrame) whose columns are of scitype Continuous; check the column scitypes with schema(X).
      • y is the target, which can be any AbstractVector whose element scitype is Continuous; check the scitype with scitype(y).

      Train the machine using fit!(mach, rows=...).

      Hyper-parameters

      • bias=true: Include the bias term if true, otherwise fit without bias term.

      Operations

      • predict(mach, Xnew): Return predictions of the target given new features Xnew, which should have the same scitype as X above.

      Fitted parameters

      The fields of fitted_params(mach) are:

      • coefficients: The linear coefficients determined by the model.
      • intercept: The intercept determined by the model.

      Examples

      using MLJ
      +
      +LinearRegressor = @load LinearRegressor pkg=MultivariateStats
      +linear_regressor = LinearRegressor()
      +
      +X, y = make_regression(100, 2) ## a table and a vector (synthetic data)
      +mach = machine(linear_regressor, X, y) |> fit!
      +
      +Xnew, _ = make_regression(3, 2)
      +yhat = predict(mach, Xnew) ## new predictions

      See also MultitargetLinearRegressor, RidgeRegressor, MultitargetRidgeRegressor

      diff --git a/v0.20.3/models/LinearSVC_LIBSVM/index.html b/v0.20.3/models/LinearSVC_LIBSVM/index.html new file mode 100644 index 000000000..0270c36e3 --- /dev/null +++ b/v0.20.3/models/LinearSVC_LIBSVM/index.html @@ -0,0 +1,28 @@ + +LinearSVC · MLJ

      LinearSVC

      LinearSVC

      A model type for constructing a linear support vector classifier, based on LIBSVM.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      LinearSVC = @load LinearSVC pkg=LIBSVM

      Do model = LinearSVC() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in LinearSVC(solver=...).

      Reference for algorithm and core C-library: Rong-En Fan et al (2008): "LIBLINEAR: A Library for Large Linear Classification." Journal of Machine Learning Research 9 1871-1874. Available at https://www.csie.ntu.edu.tw/~cjlin/papers/liblinear.pdf.

      This model type is similar to SVC from the same package with the setting kernel=LIBSVM.Kernel.KERNEL.Linear, but is optimized for the linear case.

      Training data

      In MLJ or MLJBase, bind an instance model to data with one of:

      mach = machine(model, X, y)
      +mach = machine(model, X, y, w)

      where

      • X: any table of input features (eg, a DataFrame) whose columns each have Continuous element scitype; check column scitypes with schema(X)
      • y: is the target, which can be any AbstractVector whose element scitype is <:OrderedFactor or <:Multiclass; check the scitype with scitype(y)
      • w: a dictionary of class weights, keyed on levels(y).

      Train the machine using fit!(mach, rows=...).

      Hyper-parameters

      • solver=LIBSVM.Linearsolver.L2R_L2LOSS_SVC_DUAL: linear solver, which must be one of the following from the LIBSVM.jl package:

        • LIBSVM.Linearsolver.L2R_LR: L2-regularized logistic regression (primal))
        • LIBSVM.Linearsolver.L2R_L2LOSS_SVC_DUAL: L2-regularized L2-loss support vector classification (dual)
        • LIBSVM.Linearsolver.L2R_L2LOSS_SVC: L2-regularized L2-loss support vector classification (primal)
        • LIBSVM.Linearsolver.L2R_L1LOSS_SVC_DUAL: L2-regularized L1-loss support vector classification (dual)
        • LIBSVM.Linearsolver.MCSVM_CS: support vector classification by Crammer and Singer) LIBSVM.Linearsolver.L1R_L2LOSS_SVC: L1-regularized L2-loss support vector classification)
        • LIBSVM.Linearsolver.L1R_LR: L1-regularized logistic regression
        • LIBSVM.Linearsolver.L2R_LR_DUAL: L2-regularized logistic regression (dual)
      • tolerance::Float64=Inf: tolerance for the stopping criterion;

      • cost=1.0 (range (0, Inf)): the parameter denoted $C$ in the cited reference; for greater regularization, decrease cost

      • bias= -1.0: if bias >= 0, instance x becomes [x; bias]; if bias < 0, no bias term added (default -1)

      Operations

      • predict(mach, Xnew): return predictions of the target given features Xnew having the same scitype as X above.

      Fitted parameters

      The fields of fitted_params(mach) are:

      • libsvm_model: the trained model object created by the LIBSVM.jl package
      • encoding: class encoding used internally by libsvm_model - a dictionary of class labels keyed on the internal integer representation

      Examples

      using MLJ
      +import LIBSVM
      +
      +LinearSVC = @load LinearSVC pkg=LIBSVM               ## model type
      +model = LinearSVC(solver=LIBSVM.Linearsolver.L2R_LR) ## instance
      +
      +X, y = @load_iris ## table, vector
      +mach = machine(model, X, y) |> fit!
      +
      +Xnew = (sepal_length = [6.4, 7.2, 7.4],
      +        sepal_width = [2.8, 3.0, 2.8],
      +        petal_length = [5.6, 5.8, 6.1],
      +        petal_width = [2.1, 1.6, 1.9],)
      +
      +julia> yhat = predict(mach, Xnew)
      +3-element CategoricalArrays.CategoricalArray{String,1,UInt32}:
      + "virginica"
      + "versicolor"
      + "virginica"

      Incorporating class weights

      weights = Dict("virginica" => 1, "versicolor" => 20, "setosa" => 1)
      +mach = machine(model, X, y, weights) |> fit!
      +
      +julia> yhat = predict(mach, Xnew)
      +3-element CategoricalArrays.CategoricalArray{String,1,UInt32}:
      + "versicolor"
      + "versicolor"
      + "versicolor"

      See also the SVC and NuSVC classifiers, and LIVSVM.jl and the original C implementation documentation.

      diff --git a/v0.20.3/models/LogisticCVClassifier_MLJScikitLearnInterface/index.html b/v0.20.3/models/LogisticCVClassifier_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..65c5b3618 --- /dev/null +++ b/v0.20.3/models/LogisticCVClassifier_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +LogisticCVClassifier · MLJ

      LogisticCVClassifier

      LogisticCVClassifier

      A model type for constructing a logistic regression classifier with built-in cross-validation, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      LogisticCVClassifier = @load LogisticCVClassifier pkg=MLJScikitLearnInterface

      Do model = LogisticCVClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in LogisticCVClassifier(Cs=...).

      Hyper-parameters

      • Cs = 10
      • fit_intercept = true
      • cv = 5
      • dual = false
      • penalty = l2
      • scoring = nothing
      • solver = lbfgs
      • tol = 0.0001
      • max_iter = 100
      • class_weight = nothing
      • n_jobs = nothing
      • verbose = 0
      • refit = true
      • intercept_scaling = 1.0
      • multi_class = auto
      • random_state = nothing
      • l1_ratios = nothing
      diff --git a/v0.20.3/models/LogisticClassifier_MLJLinearModels/index.html b/v0.20.3/models/LogisticClassifier_MLJLinearModels/index.html new file mode 100644 index 000000000..cd7c85ae3 --- /dev/null +++ b/v0.20.3/models/LogisticClassifier_MLJLinearModels/index.html @@ -0,0 +1,6 @@ + +LogisticClassifier · MLJ

      LogisticClassifier

      LogisticClassifier

      A model type for constructing a logistic classifier, based on MLJLinearModels.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      LogisticClassifier = @load LogisticClassifier pkg=MLJLinearModels

      Do model = LogisticClassifier() to construct an instance with default hyper-parameters.

      This model is more commonly known as "logistic regression". It is a standard classifier for both binary and multiclass classification. The objective function applies either a logistic loss (binary target) or multinomial (softmax) loss, and has a mixed L1/L2 penalty:

      $

      L(y, Xθ) + n⋅λ|θ|₂²/2 + n⋅γ|θ|₁ $

      .

      Here $L$ is either MLJLinearModels.LogisticLoss or MLJLinearModels.MultiClassLoss, $λ$ and $γ$ indicate the strength of the L2 (resp. L1) regularization components and $n$ is the number of training observations.

      With scale_penalty_with_samples = false the objective function is instead

      $

      L(y, Xθ) + λ|θ|₂²/2 + γ|θ|₁ $

      .

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, X, y)

      where:

      • X is any table of input features (eg, a DataFrame) whose columns have Continuous scitype; check column scitypes with schema(X)
      • y is the target, which can be any AbstractVector whose element scitype is <:OrderedFactor or <:Multiclass; check the scitype with scitype(y)

      Train the machine using fit!(mach, rows=...).

      Hyperparameters

      • lambda::Real: strength of the regularizer if penalty is :l2 or :l1 and strength of the L2 regularizer if penalty is :en. Default: eps()

      • gamma::Real: strength of the L1 regularizer if penalty is :en. Default: 0.0

      • penalty::Union{String, Symbol}: the penalty to use, either :l2, :l1, :en (elastic net) or :none. Default: :l2

      • fit_intercept::Bool: whether to fit the intercept or not. Default: true

      • penalize_intercept::Bool: whether to penalize the intercept. Default: false

      • scale_penalty_with_samples::Bool: whether to scale the penalty with the number of samples. Default: true

      • solver::Union{Nothing, MLJLinearModels.Solver}: some instance of MLJLinearModels.S where S is one of: LBFGS, Newton, NewtonCG, ProxGrad; but subject to the following restrictions:

        • If penalty = :l2, ProxGrad is disallowed. Otherwise, ProxGrad is the only option.
        • Unless scitype(y) <: Finite{2} (binary target) Newton is disallowed.

        If solver = nothing (default) then ProxGrad(accel=true) (FISTA) is used, unless gamma = 0, in which case LBFGS() is used.

        Solver aliases: FISTA(; kwargs...) = ProxGrad(accel=true, kwargs...), ISTA(; kwargs...) = ProxGrad(accel=false, kwargs...) Default: nothing

      Example

      using MLJ
      +X, y = make_blobs(centers = 2)
      +mach = fit!(machine(LogisticClassifier(), X, y))
      +predict(mach, X)
      +fitted_params(mach)

      See also MultinomialClassifier.

      diff --git a/v0.20.3/models/LogisticClassifier_MLJScikitLearnInterface/index.html b/v0.20.3/models/LogisticClassifier_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..6f071b71c --- /dev/null +++ b/v0.20.3/models/LogisticClassifier_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +LogisticClassifier · MLJ

      LogisticClassifier

      LogisticClassifier

      A model type for constructing a logistic regression classifier, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      LogisticClassifier = @load LogisticClassifier pkg=MLJScikitLearnInterface

      Do model = LogisticClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in LogisticClassifier(penalty=...).

      Hyper-parameters

      • penalty = l2
      • dual = false
      • tol = 0.0001
      • C = 1.0
      • fit_intercept = true
      • intercept_scaling = 1.0
      • class_weight = nothing
      • random_state = nothing
      • solver = lbfgs
      • max_iter = 100
      • multi_class = auto
      • verbose = 0
      • warm_start = false
      • n_jobs = nothing
      • l1_ratio = nothing
      diff --git a/v0.20.3/models/MCDDetector_OutlierDetectionPython/index.html b/v0.20.3/models/MCDDetector_OutlierDetectionPython/index.html new file mode 100644 index 000000000..b90cff5cf --- /dev/null +++ b/v0.20.3/models/MCDDetector_OutlierDetectionPython/index.html @@ -0,0 +1,5 @@ + +MCDDetector · MLJ diff --git a/v0.20.3/models/MeanShift_MLJScikitLearnInterface/index.html b/v0.20.3/models/MeanShift_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..7fc81eb3a --- /dev/null +++ b/v0.20.3/models/MeanShift_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +MeanShift · MLJ

      MeanShift

      MeanShift

      A model type for constructing a mean shift, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      MeanShift = @load MeanShift pkg=MLJScikitLearnInterface

      Do model = MeanShift() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in MeanShift(bandwidth=...).

      Mean shift clustering using a flat kernel. Mean shift clustering aims to discover "blobs" in a smooth density of samples. It is a centroid-based algorithm, which works by updating candidates for centroids to be the mean of the points within a given region. These candidates are then filtered in a post-processing stage to eliminate near-duplicates to form the final set of centroids."

      diff --git a/v0.20.3/models/MiniBatchKMeans_MLJScikitLearnInterface/index.html b/v0.20.3/models/MiniBatchKMeans_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..0d42cde51 --- /dev/null +++ b/v0.20.3/models/MiniBatchKMeans_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +MiniBatchKMeans · MLJ

      MiniBatchKMeans

      MiniBatchKMeans

      A model type for constructing a Mini-Batch K-Means clustering., based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      MiniBatchKMeans = @load MiniBatchKMeans pkg=MLJScikitLearnInterface

      Do model = MiniBatchKMeans() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in MiniBatchKMeans(n_clusters=...).

      Hyper-parameters

      • n_clusters = 8
      • max_iter = 100
      • batch_size = 100
      • verbose = 0
      • compute_labels = true
      • random_state = nothing
      • tol = 0.0
      • max_no_improvement = 10
      • init_size = nothing
      • n_init = 3
      • init = k-means++
      • reassignment_ratio = 0.01
      diff --git a/v0.20.3/models/MultiTaskElasticNetCVRegressor_MLJScikitLearnInterface/index.html b/v0.20.3/models/MultiTaskElasticNetCVRegressor_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..b09dcbd7e --- /dev/null +++ b/v0.20.3/models/MultiTaskElasticNetCVRegressor_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +MultiTaskElasticNetCVRegressor · MLJ

      MultiTaskElasticNetCVRegressor

      MultiTaskElasticNetCVRegressor

      A model type for constructing a multi-target elastic net regressor with built-in cross-validation, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      MultiTaskElasticNetCVRegressor = @load MultiTaskElasticNetCVRegressor pkg=MLJScikitLearnInterface

      Do model = MultiTaskElasticNetCVRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in MultiTaskElasticNetCVRegressor(l1_ratio=...).

      Hyper-parameters

      • l1_ratio = 0.5
      • eps = 0.001
      • n_alphas = 100
      • alphas = nothing
      • fit_intercept = true
      • max_iter = 1000
      • tol = 0.0001
      • cv = 5
      • copy_X = true
      • verbose = 0
      • n_jobs = nothing
      • random_state = nothing
      • selection = cyclic
      diff --git a/v0.20.3/models/MultiTaskElasticNetRegressor_MLJScikitLearnInterface/index.html b/v0.20.3/models/MultiTaskElasticNetRegressor_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..6ded220aa --- /dev/null +++ b/v0.20.3/models/MultiTaskElasticNetRegressor_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +MultiTaskElasticNetRegressor · MLJ

      MultiTaskElasticNetRegressor

      MultiTaskElasticNetRegressor

      A model type for constructing a multi-target elastic net regressor, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      MultiTaskElasticNetRegressor = @load MultiTaskElasticNetRegressor pkg=MLJScikitLearnInterface

      Do model = MultiTaskElasticNetRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in MultiTaskElasticNetRegressor(alpha=...).

      Hyper-parameters

      • alpha = 1.0
      • l1_ratio = 0.5
      • fit_intercept = true
      • copy_X = true
      • max_iter = 1000
      • tol = 0.0001
      • warm_start = false
      • random_state = nothing
      • selection = cyclic
      diff --git a/v0.20.3/models/MultiTaskLassoCVRegressor_MLJScikitLearnInterface/index.html b/v0.20.3/models/MultiTaskLassoCVRegressor_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..21434d8c4 --- /dev/null +++ b/v0.20.3/models/MultiTaskLassoCVRegressor_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +MultiTaskLassoCVRegressor · MLJ

      MultiTaskLassoCVRegressor

      MultiTaskLassoCVRegressor

      A model type for constructing a multi-target lasso regressor with built-in cross-validation, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      MultiTaskLassoCVRegressor = @load MultiTaskLassoCVRegressor pkg=MLJScikitLearnInterface

      Do model = MultiTaskLassoCVRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in MultiTaskLassoCVRegressor(eps=...).

      Hyper-parameters

      • eps = 0.001
      • n_alphas = 100
      • alphas = nothing
      • fit_intercept = true
      • max_iter = 300
      • tol = 0.0001
      • copy_X = true
      • cv = 5
      • verbose = false
      • n_jobs = 1
      • random_state = nothing
      • selection = cyclic
      diff --git a/v0.20.3/models/MultiTaskLassoRegressor_MLJScikitLearnInterface/index.html b/v0.20.3/models/MultiTaskLassoRegressor_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..6ca9eb652 --- /dev/null +++ b/v0.20.3/models/MultiTaskLassoRegressor_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +MultiTaskLassoRegressor · MLJ

      MultiTaskLassoRegressor

      MultiTaskLassoRegressor

      A model type for constructing a multi-target lasso regressor, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      MultiTaskLassoRegressor = @load MultiTaskLassoRegressor pkg=MLJScikitLearnInterface

      Do model = MultiTaskLassoRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in MultiTaskLassoRegressor(alpha=...).

      Hyper-parameters

      • alpha = 1.0
      • fit_intercept = true
      • max_iter = 1000
      • tol = 0.0001
      • copy_X = true
      • random_state = nothing
      • selection = cyclic
      diff --git a/v0.20.3/models/MultinomialClassifier_MLJLinearModels/index.html b/v0.20.3/models/MultinomialClassifier_MLJLinearModels/index.html new file mode 100644 index 000000000..626af8910 --- /dev/null +++ b/v0.20.3/models/MultinomialClassifier_MLJLinearModels/index.html @@ -0,0 +1,6 @@ + +MultinomialClassifier · MLJ

      MultinomialClassifier

      MultinomialClassifier

      A model type for constructing a multinomial classifier, based on MLJLinearModels.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      MultinomialClassifier = @load MultinomialClassifier pkg=MLJLinearModels

      Do model = MultinomialClassifier() to construct an instance with default hyper-parameters.

      This model coincides with LogisticClassifier, except certain optimizations possible in the special binary case will not be applied. Its hyperparameters are identical.

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, X, y)

      where:

      • X is any table of input features (eg, a DataFrame) whose columns have Continuous scitype; check column scitypes with schema(X)
      • y is the target, which can be any AbstractVector whose element scitype is <:OrderedFactor or <:Multiclass; check the scitype with scitype(y)

      Train the machine using fit!(mach, rows=...).

      Hyperparameters

      • lambda::Real: strength of the regularizer if penalty is :l2 or :l1. Strength of the L2 regularizer if penalty is :en. Default: eps()

      • gamma::Real: strength of the L1 regularizer if penalty is :en. Default: 0.0

      • penalty::Union{String, Symbol}: the penalty to use, either :l2, :l1, :en (elastic net) or :none. Default: :l2

      • fit_intercept::Bool: whether to fit the intercept or not. Default: true

      • penalize_intercept::Bool: whether to penalize the intercept. Default: false

      • scale_penalty_with_samples::Bool: whether to scale the penalty with the number of samples. Default: true

      • solver::Union{Nothing, MLJLinearModels.Solver}: some instance of MLJLinearModels.S where S is one of: LBFGS, NewtonCG, ProxGrad; but subject to the following restrictions:

        • If penalty = :l2, ProxGrad is disallowed. Otherwise, ProxGrad is the only option.
        • Unless scitype(y) <: Finite{2} (binary target) Newton is disallowed.

        If solver = nothing (default) then ProxGrad(accel=true) (FISTA) is used, unless gamma = 0, in which case LBFGS() is used.

        Solver aliases: FISTA(; kwargs...) = ProxGrad(accel=true, kwargs...), ISTA(; kwargs...) = ProxGrad(accel=false, kwargs...) Default: nothing

      Example

      using MLJ
      +X, y = make_blobs(centers = 3)
      +mach = fit!(machine(MultinomialClassifier(), X, y))
      +predict(mach, X)
      +fitted_params(mach)

      See also LogisticClassifier.

      diff --git a/v0.20.3/models/MultinomialNBClassifier_MLJScikitLearnInterface/index.html b/v0.20.3/models/MultinomialNBClassifier_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..fc68d7dc0 --- /dev/null +++ b/v0.20.3/models/MultinomialNBClassifier_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +MultinomialNBClassifier · MLJ

      MultinomialNBClassifier

      MultinomialNBClassifier

      A model type for constructing a multinomial naive Bayes classifier, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      MultinomialNBClassifier = @load MultinomialNBClassifier pkg=MLJScikitLearnInterface

      Do model = MultinomialNBClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in MultinomialNBClassifier(alpha=...).

      Multinomial naive bayes classifier. It is suitable for classification with discrete features (e.g. word counts for text classification).

      diff --git a/v0.20.3/models/MultinomialNBClassifier_NaiveBayes/index.html b/v0.20.3/models/MultinomialNBClassifier_NaiveBayes/index.html new file mode 100644 index 000000000..239338e8f --- /dev/null +++ b/v0.20.3/models/MultinomialNBClassifier_NaiveBayes/index.html @@ -0,0 +1,44 @@ + +MultinomialNBClassifier · MLJ

      MultinomialNBClassifier

      MultinomialNBClassifier

      A model type for constructing a multinomial naive Bayes classifier, based on NaiveBayes.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      MultinomialNBClassifier = @load MultinomialNBClassifier pkg=NaiveBayes

      Do model = MultinomialNBClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in MultinomialNBClassifier(alpha=...).

      The multinomial naive Bayes classifier is often applied when input features consist of a counts (scitype Count) and when observations for a fixed target class are generated from a multinomial distribution with fixed probability vector, but whose sample length varies from observation to observation. For example, features might represent word counts in text documents being classified by sentiment.

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, X, y)

      Here:

      • X is any table of input features (eg, a DataFrame) whose columns are of scitype Count; check the column scitypes with schema(X).
      • y is the target, which can be any AbstractVector whose element scitype is Finite; check the scitype with schema(y).

      Train the machine using fit!(mach, rows=...).

      Hyper-parameters

      • alpha=1: Lindstone smoothing in estimation of multinomial probability vectors from training histograms (default corresponds to Laplacian smoothing).

      Operations

      • predict(mach, Xnew): return predictions of the target given new features Xnew, which should have the same scitype as X above.
      • predict_mode(mach, Xnew): Return the mode of above predictions.

      Fitted parameters

      The fields of fitted_params(mach) are:

      • c_counts: A dictionary containing the observed count of each input class.
      • x_counts: A dictionary containing the categorical counts of each input class.
      • x_totals: The sum of each count (input feature), ungrouped.
      • n_obs: The total number of observations in the training data.

      Examples

      using MLJ
      +import TextAnalysis
      +
      +CountTransformer = @load CountTransformer pkg=MLJText
      +MultinomialNBClassifier = @load MultinomialNBClassifier pkg=NaiveBayes
      +
      +tokenized_docs = TextAnalysis.tokenize.([
      +    "I am very mad. You never listen.",
      +    "You seem to be having trouble? Can I help you?",
      +    "Our boss is mad at me. I hope he dies.",
      +    "His boss wants to help me. She is nice.",
      +    "Thank you for your help. It is nice working with you.",
      +    "Never do that again! I am so mad. ",
      +])
      +
      +sentiment = [
      +    "negative",
      +    "positive",
      +    "negative",
      +    "positive",
      +    "positive",
      +    "negative",
      +]
      +
      +mach1 = machine(CountTransformer(), tokenized_docs) |> fit!
      +
      +## matrix of counts:
      +X = transform(mach1, tokenized_docs)
      +
      +## to ensure scitype(y) <: AbstractVector{<:OrderedFactor}:
      +y = coerce(sentiment, OrderedFactor)
      +
      +classifier = MultinomialNBClassifier()
      +mach2 = machine(classifier, X, y)
      +fit!(mach2, rows=1:4)
      +
      +## probabilistic predictions:
      +y_prob = predict(mach2, rows=5:6) ## distributions
      +pdf.(y_prob, "positive") ## probabilities for "positive"
      +log_loss(y_prob, y[5:6])
      +
      +## point predictions:
      +yhat = mode.(y_prob) ## or `predict_mode(mach2, rows=5:6)`

      See also GaussianNBClassifier

      diff --git a/v0.20.3/models/MultitargetGaussianMixtureRegressor_BetaML/index.html b/v0.20.3/models/MultitargetGaussianMixtureRegressor_BetaML/index.html new file mode 100644 index 000000000..1864924fe --- /dev/null +++ b/v0.20.3/models/MultitargetGaussianMixtureRegressor_BetaML/index.html @@ -0,0 +1,35 @@ + +MultitargetGaussianMixtureRegressor · MLJ

      MultitargetGaussianMixtureRegressor

      mutable struct MultitargetGaussianMixtureRegressor <: MLJModelInterface.Deterministic

      A non-linear regressor derived from fitting the data on a probabilistic model (Gaussian Mixture Model). Relatively fast but generally not very precise, except for data with a structure matching the chosen underlying mixture.

      This is the multi-target version of the model. If you want to predict a single label (y), use the MLJ model GaussianMixtureRegressor.

      Hyperparameters:

      • n_classes::Int64: Number of mixtures (latent classes) to consider [def: 3]

      • initial_probmixtures::Vector{Float64}: Initial probabilities of the categorical distribution (n_classes x 1) [default: []]

      • mixtures::Union{Type, Vector{<:BetaML.GMM.AbstractMixture}}: An array (of length n_classes) of the mixtures to employ (see the [?GMM](@ref GMM) module). Each mixture object can be provided with or without its parameters (e.g. mean and variance for the gaussian ones). Fully qualified mixtures are useful only if theinitialisationstrategyparameter is set to "gived" This parameter can also be given symply in term of a _type. In this case it is automatically extended to a vector of n_classesmixtures of the specified type. Note that mixing of different mixture types is not currently supported. [def:[DiagonalGaussian() for i in 1:n_classes]`]

      • tol::Float64: Tolerance to stop the algorithm [default: 10^(-6)]

      • minimum_variance::Float64: Minimum variance for the mixtures [default: 0.05]

      • minimum_covariance::Float64: Minimum covariance for the mixtures with full covariance matrix [default: 0]. This should be set different than minimum_variance (see notes).

      • initialisation_strategy::String: The computation method of the vector of the initial mixtures. One of the following:

        • "grid": using a grid approach
        • "given": using the mixture provided in the fully qualified mixtures parameter
        • "kmeans": use first kmeans (itself initialised with a "grid" strategy) to set the initial mixture centers [default]

        Note that currently "random" and "shuffle" initialisations are not supported in gmm-based algorithms.

      • maximum_iterations::Int64: Maximum number of iterations [def: typemax(Int64), i.e. ∞]

      • rng::Random.AbstractRNG: Random Number Generator [deafult: Random.GLOBAL_RNG]

      Example:

      julia> using MLJ
      +
      +julia> X, y        = @load_boston;
      +
      +julia> ydouble     = hcat(y, y .*2  .+5);
      +
      +julia> modelType   = @load MultitargetGaussianMixtureRegressor pkg = "BetaML" verbosity=0
      +BetaML.GMM.MultitargetGaussianMixtureRegressor
      +
      +julia> model       = modelType()
      +MultitargetGaussianMixtureRegressor(
      +  n_classes = 3, 
      +  initial_probmixtures = Float64[], 
      +  mixtures = BetaML.GMM.DiagonalGaussian{Float64}[BetaML.GMM.DiagonalGaussian{Float64}(nothing, nothing), BetaML.GMM.DiagonalGaussian{Float64}(nothing, nothing), BetaML.GMM.DiagonalGaussian{Float64}(nothing, nothing)], 
      +  tol = 1.0e-6, 
      +  minimum_variance = 0.05, 
      +  minimum_covariance = 0.0, 
      +  initialisation_strategy = "kmeans", 
      +  maximum_iterations = 9223372036854775807, 
      +  rng = Random._GLOBAL_RNG())
      +
      +julia> mach        = machine(model, X, ydouble);
      +
      +julia> fit!(mach);
      +[ Info: Training machine(MultitargetGaussianMixtureRegressor(n_classes = 3, …), …).
      +Iter. 1:        Var. of the post  20.46947926187522       Log-likelihood -23662.72770575145
      +
      +julia> ŷdouble    = predict(mach, X)
      +506×2 Matrix{Float64}:
      + 23.3358  51.6717
      + 23.3358  51.6717
      +  ⋮       
      + 16.6843  38.3686
      + 16.6843  38.3686
      diff --git a/v0.20.3/models/MultitargetKNNClassifier_NearestNeighborModels/index.html b/v0.20.3/models/MultitargetKNNClassifier_NearestNeighborModels/index.html new file mode 100644 index 000000000..4c93731ce --- /dev/null +++ b/v0.20.3/models/MultitargetKNNClassifier_NearestNeighborModels/index.html @@ -0,0 +1,31 @@ + +MultitargetKNNClassifier · MLJ

      MultitargetKNNClassifier

      MultitargetKNNClassifier

      A model type for constructing a multitarget K-nearest neighbor classifier, based on NearestNeighborModels.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      MultitargetKNNClassifier = @load MultitargetKNNClassifier pkg=NearestNeighborModels

      Do model = MultitargetKNNClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in MultitargetKNNClassifier(K=...).

      Multi-target K-Nearest Neighbors Classifier (MultitargetKNNClassifier) is a variation of KNNClassifier that assumes the target variable is vector-valued with Multiclass or OrderedFactor components. (Target data must be presented as a table, however.)

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, X, y)

      OR

      mach = machine(model, X, y, w)

      Here:

      • X is any table of input features (eg, a DataFrame) whose columns are of scitype Continuous; check column scitypes with schema(X).
      • yis the target, which can be any table of responses whose element scitype is either<:Finite(<:Multiclassor<:OrderedFactorwill do); check the columns scitypes withschema(y). Each column ofy` is assumed to belong to a common categorical pool.
      • w is the observation weights which can either be nothing(default) or an AbstractVector whose element scitype is Count or Continuous. This is different from weights kernel which is a model hyperparameter, see below.

      Train the machine using fit!(mach, rows=...).

      Hyper-parameters

      • K::Int=5 : number of neighbors
      • algorithm::Symbol = :kdtree : one of (:kdtree, :brutetree, :balltree)
      • metric::Metric = Euclidean() : any Metric from Distances.jl for the distance between points. For algorithm = :kdtree only metrics which are instances of Union{Distances.Chebyshev, Distances.Cityblock, Distances.Euclidean, Distances.Minkowski, Distances.WeightedCityblock, Distances.WeightedEuclidean, Distances.WeightedMinkowski} are supported.
      • leafsize::Int = algorithm == 10 : determines the number of points at which to stop splitting the tree. This option is ignored and always taken as 0 for algorithm = :brutetree, since brutetree isn't actually a tree.
      • reorder::Bool = true : if true then points which are close in distance are placed close in memory. In this case, a copy of the original data will be made so that the original data is left unmodified. Setting this to true can significantly improve performance of the specified algorithm (except :brutetree). This option is ignored and always taken as false for algorithm = :brutetree.
      • weights::KNNKernel=Uniform() : kernel used in assigning weights to the k-nearest neighbors for each observation. An instance of one of the types in list_kernels(). User-defined weighting functions can be passed by wrapping the function in a UserDefinedKernel kernel (do ?NearestNeighborModels.UserDefinedKernel for more info). If observation weights w are passed during machine construction then the weight assigned to each neighbor vote is the product of the kernel generated weight for that neighbor and the corresponding observation weight.
      • output_type::Type{<:MultiUnivariateFinite}=DictTable : One of (ColumnTable, DictTable). The type of table type to use for predictions. Setting to ColumnTable might improve performance for narrow tables while setting to DictTable improves performance for wide tables.

      Operations

      • predict(mach, Xnew): Return predictions of the target given features Xnew, which should have same scitype as X above. Predictions are either a ColumnTable or DictTable of UnivariateFiniteVector columns depending on the value set for the output_type parameter discussed above. The probabilistic predictions are uncalibrated.
      • predict_mode(mach, Xnew): Return the modes of each column of the table of probabilistic predictions returned above.

      Fitted parameters

      The fields of fitted_params(mach) are:

      • tree: An instance of either KDTree, BruteTree or BallTree depending on the value of the algorithm hyperparameter (See hyper-parameters section above). These are data structures that stores the training data with the view of making quicker nearest neighbor searches on test data points.

      Examples

      using MLJ, StableRNGs
      +
      +## set rng for reproducibility
      +rng = StableRNG(10)
      +
      +## Dataset generation
      +n, p = 10, 3
      +X = table(randn(rng, n, p)) ## feature table
      +fruit, color = categorical(["apple", "orange"]), categorical(["blue", "green"])
      +y = [(fruit = rand(rng, fruit), color = rand(rng, color)) for _ in 1:n] ## target_table
      +## Each column in y has a common categorical pool as expected
      +selectcols(y, :fruit) ## categorical array
      +selectcols(y, :color) ## categorical array
      +
      +## Load MultitargetKNNClassifier
      +MultitargetKNNClassifier = @load MultitargetKNNClassifier pkg=NearestNeighborModels
      +
      +## view possible kernels
      +NearestNeighborModels.list_kernels()
      +
      +## MultitargetKNNClassifier instantiation
      +model = MultitargetKNNClassifier(K=3, weights = NearestNeighborModels.Inverse())
      +
      +## wrap model and required data in an MLJ machine and fit
      +mach = machine(model, X, y) |> fit!
      +
      +## predict
      +y_hat = predict(mach, X)
      +labels = predict_mode(mach, X)
      +

      See also KNNClassifier

      diff --git a/v0.20.3/models/MultitargetKNNRegressor_NearestNeighborModels/index.html b/v0.20.3/models/MultitargetKNNRegressor_NearestNeighborModels/index.html new file mode 100644 index 000000000..00fc313d0 --- /dev/null +++ b/v0.20.3/models/MultitargetKNNRegressor_NearestNeighborModels/index.html @@ -0,0 +1,21 @@ + +MultitargetKNNRegressor · MLJ

      MultitargetKNNRegressor

      MultitargetKNNRegressor

      A model type for constructing a multitarget K-nearest neighbor regressor, based on NearestNeighborModels.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      MultitargetKNNRegressor = @load MultitargetKNNRegressor pkg=NearestNeighborModels

      Do model = MultitargetKNNRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in MultitargetKNNRegressor(K=...).

      Multi-target K-Nearest Neighbors regressor (MultitargetKNNRegressor) is a variation of KNNRegressor that assumes the target variable is vector-valued with Continuous components. (Target data must be presented as a table, however.)

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, X, y)

      OR

      mach = machine(model, X, y, w)

      Here:

      • X is any table of input features (eg, a DataFrame) whose columns are of scitype Continuous; check column scitypes with schema(X).
      • y is the target, which can be any table of responses whose element scitype is Continuous; check column scitypes with schema(y).
      • w is the observation weights which can either be nothing(default) or an AbstractVector whoose element scitype is Count or Continuous. This is different from weights kernel which is an hyperparameter to the model, see below.

      Train the machine using fit!(mach, rows=...).

      Hyper-parameters

      • K::Int=5 : number of neighbors
      • algorithm::Symbol = :kdtree : one of (:kdtree, :brutetree, :balltree)
      • metric::Metric = Euclidean() : any Metric from Distances.jl for the distance between points. For algorithm = :kdtree only metrics which are instances of Union{Distances.Chebyshev, Distances.Cityblock, Distances.Euclidean, Distances.Minkowski, Distances.WeightedCityblock, Distances.WeightedEuclidean, Distances.WeightedMinkowski} are supported.
      • leafsize::Int = algorithm == 10 : determines the number of points at which to stop splitting the tree. This option is ignored and always taken as 0 for algorithm = :brutetree, since brutetree isn't actually a tree.
      • reorder::Bool = true : if true then points which are close in distance are placed close in memory. In this case, a copy of the original data will be made so that the original data is left unmodified. Setting this to true can significantly improve performance of the specified algorithm (except :brutetree). This option is ignored and always taken as false for algorithm = :brutetree.
      • weights::KNNKernel=Uniform() : kernel used in assigning weights to the k-nearest neighbors for each observation. An instance of one of the types in list_kernels(). User-defined weighting functions can be passed by wrapping the function in a UserDefinedKernel kernel (do ?NearestNeighborModels.UserDefinedKernel for more info). If observation weights w are passed during machine construction then the weight assigned to each neighbor vote is the product of the kernel generated weight for that neighbor and the corresponding observation weight.

      Operations

      • predict(mach, Xnew): Return predictions of the target given features Xnew, which should have same scitype as X above.

      Fitted parameters

      The fields of fitted_params(mach) are:

      • tree: An instance of either KDTree, BruteTree or BallTree depending on the value of the algorithm hyperparameter (See hyper-parameters section above). These are data structures that stores the training data with the view of making quicker nearest neighbor searches on test data points.

      Examples

      using MLJ
      +
      +## Create Data
      +X, y = make_regression(10, 5, n_targets=2)
      +
      +## load MultitargetKNNRegressor
      +MultitargetKNNRegressor = @load MultitargetKNNRegressor pkg=NearestNeighborModels
      +
      +## view possible kernels
      +NearestNeighborModels.list_kernels()
      +
      +## MutlitargetKNNRegressor instantiation
      +model = MultitargetKNNRegressor(weights = NearestNeighborModels.Inverse())
      +
      +## Wrap model and required data in an MLJ machine and fit.
      +mach = machine(model, X, y) |> fit! 
      +
      +## Predict
      +y_hat = predict(mach, X)
      +

      See also KNNRegressor

      diff --git a/v0.20.3/models/MultitargetLinearRegressor_MultivariateStats/index.html b/v0.20.3/models/MultitargetLinearRegressor_MultivariateStats/index.html new file mode 100644 index 000000000..89b5d32dc --- /dev/null +++ b/v0.20.3/models/MultitargetLinearRegressor_MultivariateStats/index.html @@ -0,0 +1,13 @@ + +MultitargetLinearRegressor · MLJ

      MultitargetLinearRegressor

      MultitargetLinearRegressor

      A model type for constructing a multitarget linear regressor, based on MultivariateStats.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      MultitargetLinearRegressor = @load MultitargetLinearRegressor pkg=MultivariateStats

      Do model = MultitargetLinearRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in MultitargetLinearRegressor(bias=...).

      MultitargetLinearRegressor assumes the target variable is vector-valued with continuous components. It trains a linear prediction function using the least squares algorithm. Options exist to specify a bias term.

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, X, y)

      Here:

      • X is any table of input features (eg, a DataFrame) whose columns are of scitype Continuous; check column scitypes with schema(X).
      • y is the target, which can be any table of responses whose element scitype is Continuous; check the scitype with scitype(y).

      Train the machine using fit!(mach, rows=...).

      Hyper-parameters

      • bias=true: Include the bias term if true, otherwise fit without bias term.

      Operations

      • predict(mach, Xnew): Return predictions of the target given new features Xnew, which should have the same scitype as X above.

      Fitted parameters

      The fields of fitted_params(mach) are:

      • coefficients: The linear coefficients determined by the model.
      • intercept: The intercept determined by the model.

      Examples

      using MLJ
      +using DataFrames
      +
      +LinearRegressor = @load MultitargetLinearRegressor pkg=MultivariateStats
      +linear_regressor = LinearRegressor()
      +
      +X, y = make_regression(100, 9; n_targets = 2) ## a table and a table (synthetic data)
      +
      +mach = machine(linear_regressor, X, y) |> fit!
      +
      +Xnew, _ = make_regression(3, 9)
      +yhat = predict(mach, Xnew) ## new predictions

      See also LinearRegressor, RidgeRegressor, MultitargetRidgeRegressor

      diff --git a/v0.20.3/models/MultitargetNeuralNetworkRegressor_BetaML/index.html b/v0.20.3/models/MultitargetNeuralNetworkRegressor_BetaML/index.html new file mode 100644 index 000000000..d98ce485b --- /dev/null +++ b/v0.20.3/models/MultitargetNeuralNetworkRegressor_BetaML/index.html @@ -0,0 +1,41 @@ + +MultitargetNeuralNetworkRegressor · MLJ

      MultitargetNeuralNetworkRegressor

      mutable struct MultitargetNeuralNetworkRegressor <: MLJModelInterface.Deterministic

      A simple but flexible Feedforward Neural Network, from the Beta Machine Learning Toolkit (BetaML) for regression of multiple dimensional targets.

      Parameters:

      • layers: Array of layer objects [def: nothing, i.e. basic network]. See subtypes(BetaML.AbstractLayer) for supported layers

      • loss: Loss (cost) function [def: BetaML.squared_cost]. Should always assume y and ŷ as matrices.

        Warning

        If you change the parameter loss, you need to either provide its derivative on the parameter dloss or use autodiff with dloss=nothing.

      • dloss: Derivative of the loss function [def: BetaML.dsquared_cost, i.e. use the derivative of the squared cost]. Use nothing for autodiff.

      • epochs: Number of epochs, i.e. passages trough the whole training sample [def: 300]

      • batch_size: Size of each individual batch [def: 16]

      • opt_alg: The optimisation algorithm to update the gradient at each batch [def: BetaML.ADAM()]. See subtypes(BetaML.OptimisationAlgorithm) for supported optimizers

      • shuffle: Whether to randomly shuffle the data at each iteration (epoch) [def: true]

      • descr: An optional title and/or description for this model

      • cb: A call back function to provide information during training [def: BetaML.fitting_info]

      • rng: Random Number Generator (see FIXEDSEED) [deafult: Random.GLOBAL_RNG]

      Notes:

      • data must be numerical
      • the label should be a n-records by n-dimensions matrix

      Example:

      julia> using MLJ
      +
      +julia> X, y        = @load_boston;
      +
      +julia> ydouble     = hcat(y, y .*2  .+5);
      +
      +julia> modelType   = @load MultitargetNeuralNetworkRegressor pkg = "BetaML" verbosity=0
      +BetaML.Nn.MultitargetNeuralNetworkRegressor
      +
      +julia> layers                      = [BetaML.DenseLayer(12,50,f=BetaML.relu),BetaML.DenseLayer(50,50,f=BetaML.relu),BetaML.DenseLayer(50,50,f=BetaML.relu),BetaML.DenseLayer(50,2,f=BetaML.relu)];
      +
      +julia> model       = modelType(layers=layers,opt_alg=BetaML.ADAM(),epochs=500)
      +MultitargetNeuralNetworkRegressor(
      +  layers = BetaML.Nn.AbstractLayer[BetaML.Nn.DenseLayer([-0.2591582523441157 -0.027962845131416225 … 0.16044535560124418 -0.12838827994676857; -0.30381834909561184 0.2405495243851402 … -0.2588144861880588 0.09538577909777807; … ; -0.017320292924711156 -0.14042266424603767 … 0.06366999105841187 -0.13419651752478906; 0.07393079961409338 0.24521350531110264 … 0.04256867886217541 -0.0895506802948175], [0.14249427336553644, 0.24719379413682485, -0.25595911822556566, 0.10034088778965933, -0.017086404878505712, 0.21932184025609347, -0.031413516834861266, -0.12569076082247596, -0.18080140982481183, 0.14551901873323253  …  -0.13321995621967364, 0.2436582233332092, 0.0552222336976439, 0.07000814133633904, 0.2280064379660025, -0.28885681475734193, -0.07414214246290696, -0.06783184733650621, -0.055318068046308455, -0.2573488383282579], BetaML.Utils.relu, BetaML.Utils.drelu), BetaML.Nn.DenseLayer([-0.0395424111703751 -0.22531232360829911 … -0.04341228943744482 0.024336206858365517; -0.16481887432946268 0.17798073384748508 … -0.18594039305095766 0.051159225856547474; … ; -0.011639475293705043 -0.02347011206244673 … 0.20508869536159186 -0.1158382446274592; -0.19078069527757857 -0.007487540070740484 … -0.21341165344291158 -0.24158671316310726], [-0.04283623889330032, 0.14924461547060602, -0.17039563392959683, 0.00907774027816255, 0.21738885963113852, -0.06308040225941691, -0.14683286822101105, 0.21726892197970937, 0.19784321784707126, -0.0344988665714947  …  -0.23643089430602846, -0.013560425201427584, 0.05323948910726356, -0.04644175812567475, -0.2350400292671211, 0.09628312383424742, 0.07016420995205697, -0.23266392927140334, -0.18823664451487, 0.2304486691429084], BetaML.Utils.relu, BetaML.Utils.drelu), BetaML.Nn.DenseLayer([-0.11504184627266828 0.08601794194664503 … 0.03843129724045469 -0.18417305624127284; 0.10181551438831654 0.13459759904443674 … 0.11094951365942118 -0.1549466590355218; … ; 0.15279817525427697 0.0846661196058916 … -0.07993619892911122 0.07145402617285884; -0.1614160186346092 -0.13032002335149 … -0.12310552194729624 -0.15915773071049827], [-0.03435885900946367, -0.1198543931290306, 0.008454985905194445, -0.17980887188986966, -0.03557204910359624, 0.19125847393334877, -0.10949700778538696, -0.09343206702591, -0.12229583511781811, -0.09123969069220564  …  0.22119233518322862, 0.2053873143308657, 0.12756489387198222, 0.11567243705173319, -0.20982445664020496, 0.1595157838386987, -0.02087331046544119, -0.20556423263489765, -0.1622837764237961, -0.019220998739847395], BetaML.Utils.relu, BetaML.Utils.drelu), BetaML.Nn.DenseLayer([-0.25796717031347993 0.17579536633402948 … -0.09992960168785256 -0.09426177454620635; -0.026436330246675632 0.18070899284865127 … -0.19310119102392206 -0.06904005900252091], [0.16133004882307822, -0.3061228721091248], BetaML.Utils.relu, BetaML.Utils.drelu)], 
      +  loss = BetaML.Utils.squared_cost, 
      +  dloss = BetaML.Utils.dsquared_cost, 
      +  epochs = 500, 
      +  batch_size = 32, 
      +  opt_alg = BetaML.Nn.ADAM(BetaML.Nn.var"#90#93"(), 1.0, 0.9, 0.999, 1.0e-8, BetaML.Nn.Learnable[], BetaML.Nn.Learnable[]), 
      +  shuffle = true, 
      +  descr = "", 
      +  cb = BetaML.Nn.fitting_info, 
      +  rng = Random._GLOBAL_RNG())
      +
      +julia> mach        = machine(model, X, ydouble);
      +
      +julia> fit!(mach);
      +
      +julia> ŷdouble    = predict(mach, X);
      +
      +julia> hcat(ydouble,ŷdouble)
      +506×4 Matrix{Float64}:
      + 24.0  53.0  28.4624  62.8607
      + 21.6  48.2  22.665   49.7401
      + 34.7  74.4  31.5602  67.9433
      + 33.4  71.8  33.0869  72.4337
      +  ⋮                   
      + 23.9  52.8  23.3573  50.654
      + 22.0  49.0  22.1141  48.5926
      + 11.9  28.8  19.9639  45.5823
      diff --git a/v0.20.3/models/MultitargetNeuralNetworkRegressor_MLJFlux/index.html b/v0.20.3/models/MultitargetNeuralNetworkRegressor_MLJFlux/index.html new file mode 100644 index 000000000..695eac517 --- /dev/null +++ b/v0.20.3/models/MultitargetNeuralNetworkRegressor_MLJFlux/index.html @@ -0,0 +1,27 @@ + +MultitargetNeuralNetworkRegressor · MLJ

      MultitargetNeuralNetworkRegressor

      MultitargetNeuralNetworkRegressor

      A model type for constructing a multitarget neural network regressor, based on MLJFlux.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      MultitargetNeuralNetworkRegressor = @load MultitargetNeuralNetworkRegressor pkg=MLJFlux

      Do model = MultitargetNeuralNetworkRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in MultitargetNeuralNetworkRegressor(builder=...).

      MultitargetNeuralNetworkRegressor is for training a data-dependent Flux.jl neural network to predict a multi-valued Continuous target, represented as a table, given a table of Continuous features. Users provide a recipe for constructing the network, based on properties of the data that is encountered, by specifying an appropriate builder. See MLJFlux documentation for more on builders.

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, X, y)

      Here:

      • X is either a Matrix or any table of input features (eg, a DataFrame) whose columns are of scitype Continuous; check column scitypes with schema(X). If X is a Matrix, it is assumed to have columns corresponding to features and rows corresponding to observations.
      • y is the target, which can be any table or matrix of output targets whose element scitype is Continuous; check column scitypes with schema(y). If y is a Matrix, it is assumed to have columns corresponding to variables and rows corresponding to observations.

      Hyper-parameters

      • builder=MLJFlux.Linear(σ=Flux.relu): An MLJFlux builder that constructs a neural network. Possible builders include: Linear, Short, and MLP. See MLJFlux documentation for more on builders, and the example below for using the @builder convenience macro.

      • optimiser::Flux.Adam(): A Flux.Optimise optimiser. The optimiser performs the updating of the weights of the network. For further reference, see the Flux optimiser documentation. To choose a learning rate (the update rate of the optimizer), a good rule of thumb is to start out at 10e-3, and tune using powers of 10 between 1 and 1e-7.

      • loss=Flux.mse: The loss function which the network will optimize. Should be a function which can be called in the form loss(yhat, y). Possible loss functions are listed in the Flux loss function documentation. For a regression task, natural loss functions are:

        • Flux.mse
        • Flux.mae
        • Flux.msle
        • Flux.huber_loss

        Currently MLJ measures are not supported as loss functions here.

      • epochs::Int=10: The duration of training, in epochs. Typically, one epoch represents one pass through the complete the training dataset.

      • batch_size::int=1: the batch size to be used for training, representing the number of samples per update of the network weights. Typically, batch size is between 8 and

        1. Increassing batch size may accelerate training if acceleration=CUDALibs() and a

        GPU is available.

      • lambda::Float64=0: The strength of the weight regularization penalty. Can be any value in the range [0, ∞).

      • alpha::Float64=0: The L2/L1 mix of regularization, in the range [0, 1]. A value of 0 represents L2 regularization, and a value of 1 represents L1 regularization.

      • rng::Union{AbstractRNG, Int64}: The random number generator or seed used during training.

      • optimizer_changes_trigger_retraining::Bool=false: Defines what happens when re-fitting a machine if the associated optimiser has changed. If true, the associated machine will retrain from scratch on fit! call, otherwise it will not.

      • acceleration::AbstractResource=CPU1(): Defines on what hardware training is done. For Training on GPU, use CUDALibs().

      Operations

      • predict(mach, Xnew): return predictions of the target given new features Xnew having the same scitype as X above. Predictions are deterministic.

      Fitted parameters

      The fields of fitted_params(mach) are:

      • chain: The trained "chain" (Flux.jl model), namely the series of layers, functions, and activations which make up the neural network.

      Report

      The fields of report(mach) are:

      • training_losses: A vector of training losses (penalised if lambda != 0) in historical order, of length epochs + 1. The first element is the pre-training loss.

      Examples

      In this example we apply a multi-target regression model to synthetic data:

      using MLJ
      +import MLJFlux
      +using Flux

      First, we generate some synthetic data (needs MLJBase 0.20.16 or higher):

      X, y = make_regression(100, 9; n_targets = 2) ## both tables
      +schema(y)
      +schema(X)

      Splitting off a test set:

      (X, Xtest), (y, ytest) = partition((X, y), 0.7, multi=true);

      Next, we can define a builder, making use of a convenience macro to do so. In the following @builder call, n_in is a proxy for the number input features and n_out the number of target variables (both known at fit! time), while rng is a proxy for a RNG (which will be passed from the rng field of model defined below).

      builder = MLJFlux.@builder begin
      +    init=Flux.glorot_uniform(rng)
      +    Chain(
      +        Dense(n_in, 64, relu, init=init),
      +        Dense(64, 32, relu, init=init),
      +        Dense(32, n_out, init=init),
      +    )
      +end

      Instantiating the regression model:

      MultitargetNeuralNetworkRegressor = @load MultitargetNeuralNetworkRegressor
      +model = MultitargetNeuralNetworkRegressor(builder=builder, rng=123, epochs=20)

      We will arrange for standardization of the the target by wrapping our model in TransformedTargetModel, and standardization of the features by inserting the wrapped model in a pipeline:

      pipe = Standardizer |> TransformedTargetModel(model, target=Standardizer)

      If we fit with a high verbosity (>1), we will see the losses during training. We can also see the losses in the output of report(mach)

      mach = machine(pipe, X, y)
      +fit!(mach, verbosity=2)
      +
      +## first element initial loss, 2:end per epoch training losses
      +report(mach).transformed_target_model_deterministic.model.training_losses

      For experimenting with learning rate, see the NeuralNetworkRegressor example.

      pipe.transformed_target_model_deterministic.model.optimiser.eta = 0.0001

      With the learning rate fixed, we can now compute a CV estimate of the performance (using all data bound to mach) and compare this with performance on the test set:

      ## custom MLJ loss:
      +multi_loss(yhat, y) = l2(MLJ.matrix(yhat), MLJ.matrix(y)) |> mean
      +
      +## CV estimate, based on `(X, y)`:
      +evaluate!(mach, resampling=CV(nfolds=5), measure=multi_loss)
      +
      +## loss for `(Xtest, test)`:
      +fit!(mach) ## trains on all data `(X, y)`
      +yhat = predict(mach, Xtest)
      +multi_loss(yhat, ytest)

      See also NeuralNetworkRegressor

      diff --git a/v0.20.3/models/MultitargetRidgeRegressor_MultivariateStats/index.html b/v0.20.3/models/MultitargetRidgeRegressor_MultivariateStats/index.html new file mode 100644 index 000000000..2ee441a04 --- /dev/null +++ b/v0.20.3/models/MultitargetRidgeRegressor_MultivariateStats/index.html @@ -0,0 +1,13 @@ + +MultitargetRidgeRegressor · MLJ

      MultitargetRidgeRegressor

      MultitargetRidgeRegressor

      A model type for constructing a multitarget ridge regressor, based on MultivariateStats.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      MultitargetRidgeRegressor = @load MultitargetRidgeRegressor pkg=MultivariateStats

      Do model = MultitargetRidgeRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in MultitargetRidgeRegressor(lambda=...).

      Multi-target ridge regression adds a quadratic penalty term to multi-target least squares regression, for regularization. Ridge regression is particularly useful in the case of multicollinearity. In this case, the output represents a response vector. Options exist to specify a bias term, and to adjust the strength of the penalty term.

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, X, y)

      Here:

      • X is any table of input features (eg, a DataFrame) whose columns are of scitype Continuous; check column scitypes with schema(X).
      • y is the target, which can be any table of responses whose element scitype is Continuous; check the scitype with scitype(y).

      Train the machine using fit!(mach, rows=...).

      Hyper-parameters

      • lambda=1.0: Is the non-negative parameter for the regularization strength. If lambda is 0, ridge regression is equivalent to linear least squares regression, and as lambda approaches infinity, all the linear coefficients approach 0.
      • bias=true: Include the bias term if true, otherwise fit without bias term.

      Operations

      • predict(mach, Xnew): Return predictions of the target given new features Xnew, which should have the same scitype as X above.

      Fitted parameters

      The fields of fitted_params(mach) are:

      • coefficients: The linear coefficients determined by the model.
      • intercept: The intercept determined by the model.

      Examples

      using MLJ
      +using DataFrames
      +
      +RidgeRegressor = @load MultitargetRidgeRegressor pkg=MultivariateStats
      +
      +X, y = make_regression(100, 6; n_targets = 2)  ## a table and a table (synthetic data)
      +
      +ridge_regressor = RidgeRegressor(lambda=1.5)
      +mach = machine(ridge_regressor, X, y) |> fit!
      +
      +Xnew, _ = make_regression(3, 6)
      +yhat = predict(mach, Xnew) ## new predictions

      See also LinearRegressor, MultitargetLinearRegressor, RidgeRegressor

      diff --git a/v0.20.3/models/MultitargetSRRegressor_SymbolicRegression/index.html b/v0.20.3/models/MultitargetSRRegressor_SymbolicRegression/index.html new file mode 100644 index 000000000..1c42cd635 --- /dev/null +++ b/v0.20.3/models/MultitargetSRRegressor_SymbolicRegression/index.html @@ -0,0 +1,20 @@ + +MultitargetSRRegressor · MLJ

      MultitargetSRRegressor

      MultitargetSRRegressor

      A model type for constructing a Multi-Target Symbolic Regression via Evolutionary Search, based on SymbolicRegression.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      MultitargetSRRegressor = @load MultitargetSRRegressor pkg=SymbolicRegression

      Do model = MultitargetSRRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in MultitargetSRRegressor(binary_operators=...).

      Multi-target Symbolic Regression regressor (MultitargetSRRegressor) conducts several searches for expressions that predict each target variable from a set of input variables. All data is assumed to be Continuous. The search is performed using an evolutionary algorithm. This algorithm is described in the paper https://arxiv.org/abs/2305.01582.

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, X, y)

      OR

      mach = machine(model, X, y, w)

      Here:

      • X is any table of input features (eg, a DataFrame) whose columns are of scitype

      Continuous; check column scitypes with schema(X). Variable names in discovered expressions will be taken from the column names of X, if available. Units in columns of X (use DynamicQuantities for units) will trigger dimensional analysis to be used.

      • y is the target, which can be any table of target variables whose element scitype is Continuous; check the scitype with schema(y). Units in columns of y (use DynamicQuantities for units) will trigger dimensional analysis to be used.
      • w is the observation weights which can either be nothing (default) or an AbstractVector whoose element scitype is Count or Continuous. The same weights are used for all targets.

      Train the machine using fit!(mach), inspect the discovered expressions with report(mach), and predict on new data with predict(mach, Xnew). Note that unlike other regressors, symbolic regression stores a list of lists of trained models. The models chosen from each of these lists is defined by the function selection_method keyword argument, which by default balances accuracy and complexity.

      Hyper-parameters

      • binary_operators: Vector of binary operators (functions) to use. Each operator should be defined for two input scalars, and one output scalar. All operators need to be defined over the entire real line (excluding infinity - these are stopped before they are input), or return NaN where not defined. For speed, define it so it takes two reals of the same type as input, and outputs the same type. For the SymbolicUtils simplification backend, you will need to define a generic method of the operator so it takes arbitrary types.

      • unary_operators: Same, but for unary operators (one input scalar, gives an output scalar).

      • constraints: Array of pairs specifying size constraints for each operator. The constraints for a binary operator should be a 2-tuple (e.g., (-1, -1)) and the constraints for a unary operator should be an Int. A size constraint is a limit to the size of the subtree in each argument of an operator. e.g., [(^)=>(-1, 3)] means that the ^ operator can have arbitrary size (-1) in its left argument, but a maximum size of 3 in its right argument. Default is no constraints.

      • batching: Whether to evolve based on small mini-batches of data, rather than the entire dataset.

      • batch_size: What batch size to use if using batching.

      • elementwise_loss: What elementwise loss function to use. Can be one of the following losses, or any other loss of type SupervisedLoss. You can also pass a function that takes a scalar target (left argument), and scalar predicted (right argument), and returns a scalar. This will be averaged over the predicted data. If weights are supplied, your function should take a third argument for the weight scalar. Included losses: Regression: - LPDistLoss{P}(), - L1DistLoss(), - L2DistLoss() (mean square), - LogitDistLoss(), - HuberLoss(d), - L1EpsilonInsLoss(ϵ), - L2EpsilonInsLoss(ϵ), - PeriodicLoss(c), - QuantileLoss(τ), Classification: - ZeroOneLoss(), - PerceptronLoss(), - L1HingeLoss(), - SmoothedL1HingeLoss(γ), - ModifiedHuberLoss(), - L2MarginLoss(), - ExpLoss(), - SigmoidLoss(), - DWDMarginLoss(q).

      • loss_function: Alternatively, you may redefine the loss used as any function of tree::Node{T}, dataset::Dataset{T}, and options::Options, so long as you output a non-negative scalar of type T. This is useful if you want to use a loss that takes into account derivatives, or correlations across the dataset. This also means you could use a custom evaluation for a particular expression. If you are using batching=true, then your function should accept a fourth argument idx, which is either nothing (indicating that the full dataset should be used), or a vector of indices to use for the batch. For example,

          function my_loss(tree, dataset::Dataset{T,L}, options)::L where {T,L}
        +      prediction, flag = eval_tree_array(tree, dataset.X, options)
        +      if !flag
        +          return L(Inf)
        +      end
        +      return sum((prediction .- dataset.y) .^ 2) / dataset.n
        +  end
      • populations: How many populations of equations to use.

      • population_size: How many equations in each population.

      • ncycles_per_iteration: How many generations to consider per iteration.

      • tournament_selection_n: Number of expressions considered in each tournament.

      • tournament_selection_p: The fittest expression in a tournament is to be selected with probability p, the next fittest with probability p*(1-p), and so forth.

      • topn: Number of equations to return to the host process, and to consider for the hall of fame.

      • complexity_of_operators: What complexity should be assigned to each operator, and the occurrence of a constant or variable. By default, this is 1 for all operators. Can be a real number as well, in which case the complexity of an expression will be rounded to the nearest integer. Input this in the form of, e.g., [(^) => 3, sin => 2].

      • complexity_of_constants: What complexity should be assigned to use of a constant. By default, this is 1.

      • complexity_of_variables: What complexity should be assigned to each variable. By default, this is 1.

      • alpha: The probability of accepting an equation mutation during regularized evolution is given by exp(-delta_loss/(alpha * T)), where T goes from 1 to 0. Thus, alpha=infinite is the same as no annealing.

      • maxsize: Maximum size of equations during the search.

      • maxdepth: Maximum depth of equations during the search, by default this is set equal to the maxsize.

      • parsimony: A multiplicative factor for how much complexity is punished.

      • dimensional_constraint_penalty: An additive factor if the dimensional constraint is violated.

      • use_frequency: Whether to use a parsimony that adapts to the relative proportion of equations at each complexity; this will ensure that there are a balanced number of equations considered for every complexity.

      • use_frequency_in_tournament: Whether to use the adaptive parsimony described above inside the score, rather than just at the mutation accept/reject stage.

      • adaptive_parsimony_scaling: How much to scale the adaptive parsimony term in the loss. Increase this if the search is spending too much time optimizing the most complex equations.

      • turbo: Whether to use LoopVectorization.@turbo to evaluate expressions. This can be significantly faster, but is only compatible with certain operators. Experimental!

      • migration: Whether to migrate equations between processes.

      • hof_migration: Whether to migrate equations from the hall of fame to processes.

      • fraction_replaced: What fraction of each population to replace with migrated equations at the end of each cycle.

      • fraction_replaced_hof: What fraction to replace with hall of fame equations at the end of each cycle.

      • should_simplify: Whether to simplify equations. If you pass a custom objective, this will be set to false.

      • should_optimize_constants: Whether to use an optimization algorithm to periodically optimize constants in equations.

      • optimizer_nrestarts: How many different random starting positions to consider for optimization of constants.

      • optimizer_algorithm: Select algorithm to use for optimizing constants. Default is "BFGS", but "NelderMead" is also supported.

      • optimizer_options: General options for the constant optimization. For details we refer to the documentation on Optim.Options from the Optim.jl package. Options can be provided here as NamedTuple, e.g. (iterations=16,), as a Dict, e.g. Dict(:x_tol => 1.0e-32,), or as an Optim.Options instance.

      • output_file: What file to store equations to, as a backup.

      • perturbation_factor: When mutating a constant, either multiply or divide by (1+perturbation_factor)^(rand()+1).

      • probability_negate_constant: Probability of negating a constant in the equation when mutating it.

      • mutation_weights: Relative probabilities of the mutations. The struct MutationWeights should be passed to these options. See its documentation on MutationWeights for the different weights.

      • crossover_probability: Probability of performing crossover.

      • annealing: Whether to use simulated annealing.

      • warmup_maxsize_by: Whether to slowly increase the max size from 5 up to maxsize. If nonzero, specifies the fraction through the search at which the maxsize should be reached.

      • verbosity: Whether to print debugging statements or not.

      • print_precision: How many digits to print when printing equations. By default, this is 5.

      • save_to_file: Whether to save equations to a file during the search.

      • bin_constraints: See constraints. This is the same, but specified for binary operators only (for example, if you have an operator that is both a binary and unary operator).

      • una_constraints: Likewise, for unary operators.

      • seed: What random seed to use. nothing uses no seed.

      • progress: Whether to use a progress bar output (verbosity will have no effect).

      • early_stop_condition: Float - whether to stop early if the mean loss gets below this value. Function - a function taking (loss, complexity) as arguments and returning true or false.

      • timeout_in_seconds: Float64 - the time in seconds after which to exit (as an alternative to the number of iterations).

      • max_evals: Int (or Nothing) - the maximum number of evaluations of expressions to perform.

      • skip_mutation_failures: Whether to simply skip over mutations that fail or are rejected, rather than to replace the mutated expression with the original expression and proceed normally.

      • enable_autodiff: Whether to enable automatic differentiation functionality. This is turned off by default. If turned on, this will be turned off if one of the operators does not have well-defined gradients.

      • nested_constraints: Specifies how many times a combination of operators can be nested. For example, [sin => [cos => 0], cos => [cos => 2]] specifies that cos may never appear within a sin, but sin can be nested with itself an unlimited number of times. The second term specifies that cos can be nested up to 2 times within a cos, so that cos(cos(cos(x))) is allowed (as well as any combination of + or - within it), but cos(cos(cos(cos(x)))) is not allowed. When an operator is not specified, it is assumed that it can be nested an unlimited number of times. This requires that there is no operator which is used both in the unary operators and the binary operators (e.g., - could be both subtract, and negation). For binary operators, both arguments are treated the same way, and the max of each argument is constrained.

      • deterministic: Use a global counter for the birth time, rather than calls to time(). This gives perfect resolution, and is therefore deterministic. However, it is not thread safe, and must be used in serial mode.

      • define_helper_functions: Whether to define helper functions for constructing and evaluating trees.

      • niterations::Int=10: The number of iterations to perform the search. More iterations will improve the results.

      • parallelism=:multithreading: What parallelism mode to use. The options are :multithreading, :multiprocessing, and :serial. By default, multithreading will be used. Multithreading uses less memory, but multiprocessing can handle multi-node compute. If using :multithreading mode, the number of threads available to julia are used. If using :multiprocessing, numprocs processes will be created dynamically if procs is unset. If you have already allocated processes, pass them to the procs argument and they will be used. You may also pass a string instead of a symbol, like "multithreading".

      • numprocs::Union{Int, Nothing}=nothing: The number of processes to use, if you want equation_search to set this up automatically. By default this will be 4, but can be any number (you should pick a number <= the number of cores available).

      • procs::Union{Vector{Int}, Nothing}=nothing: If you have set up a distributed run manually with procs = addprocs() and @everywhere, pass the procs to this keyword argument.

      • addprocs_function::Union{Function, Nothing}=nothing: If using multiprocessing (parallelism=:multithreading), and are not passing procs manually, then they will be allocated dynamically using addprocs. However, you may also pass a custom function to use instead of addprocs. This function should take a single positional argument, which is the number of processes to use, as well as the lazy keyword argument. For example, if set up on a slurm cluster, you could pass addprocs_function = addprocs_slurm, which will set up slurm processes.

      • heap_size_hint_in_bytes::Union{Int,Nothing}=nothing: On Julia 1.9+, you may set the --heap-size-hint flag on Julia processes, recommending garbage collection once a process is close to the recommended size. This is important for long-running distributed jobs where each process has an independent memory, and can help avoid out-of-memory errors. By default, this is set to Sys.free_memory() / numprocs.

      • runtests::Bool=true: Whether to run (quick) tests before starting the search, to see if there will be any problems during the equation search related to the host environment.

      • loss_type::Type=Nothing: If you would like to use a different type for the loss than for the data you passed, specify the type here. Note that if you pass complex data ::Complex{L}, then the loss type will automatically be set to L.

      • selection_method::Function: Function to selection expression from the Pareto frontier for use in predict. See SymbolicRegression.MLJInterfaceModule.choose_best for an example. This function should return a single integer specifying the index of the expression to use. By default, choose_best maximizes the score (a pound-for-pound rating) of expressions reaching the threshold of 1.5x the minimum loss. To fix the index at 5, you could just write Returns(5).

      • dimensions_type::AbstractDimensions: The type of dimensions to use when storing the units of the data. By default this is DynamicQuantities.SymbolicDimensions.

      Operations

      • predict(mach, Xnew): Return predictions of the target given features Xnew, which should have same scitype as X above. The expression used for prediction is defined by the selection_method function, which can be seen by viewing report(mach).best_idx.

      Fitted parameters

      The fields of fitted_params(mach) are:

      • best_idx::Vector{Int}: The index of the best expression in each Pareto frontier, as determined by the selection_method function.
      • equations::Vector{Vector{Node{T}}}: The expressions discovered by the search, represented in a dominating Pareto frontier (i.e., the best expressions found for each complexity). The outer vector is indexed by target variable, and the inner vector is ordered by increasing complexity. T is equal to the element type of the passed data.
      • equation_strings::Vector{Vector{String}}: The expressions discovered by the search, represented as strings for easy inspection.

      Report

      The fields of report(mach) are:

      • best_idx::Vector{Int}: The index of the best expression in each Pareto frontier, as determined by the selection_method function.
      • equations::Vector{Vector{Node{T}}}: The expressions discovered by the search, represented in a dominating Pareto frontier (i.e., the best expressions found for each complexity). The outer vector is indexed by target variable, and the inner vector is ordered by increasing complexity.
      • equation_strings::Vector{Vector{String}}: The expressions discovered by the search, represented as strings for easy inspection.
      • complexities::Vector{Vector{Int}}: The complexity of each expression in each Pareto frontier.
      • losses::Vector{Vector{L}}: The loss of each expression in each Pareto frontier, according to the loss function specified in the model. The type L is the loss type, which is usually the same as the element type of data passed (i.e., T), but can differ if complex data types are passed.
      • scores::Vector{Vector{L}}: A metric which considers both the complexity and loss of an expression, equal to the change in the log-loss divided by the change in complexity, relative to the previous expression along the Pareto frontier. A larger score aims to indicate an expression is more likely to be the true expression generating the data, but this is very problem-dependent and generally several other factors should be considered.

      Examples

      using MLJ
      +MultitargetSRRegressor = @load MultitargetSRRegressor pkg=SymbolicRegression
      +X = (a=rand(100), b=rand(100), c=rand(100))
      +Y = (y1=(@. cos(X.c) * 2.1 - 0.9), y2=(@. X.a * X.b + X.c))
      +model = MultitargetSRRegressor(binary_operators=[+, -, *], unary_operators=[exp], niterations=100)
      +mach = machine(model, X, Y)
      +fit!(mach)
      +y_hat = predict(mach, X)
      +## View the equations used:
      +r = report(mach)
      +for (output_index, (eq, i)) in enumerate(zip(r.equation_strings, r.best_idx))
      +    println("Equation used for ", output_index, ": ", eq[i])
      +end

      See also SRRegressor.

      diff --git a/v0.20.3/models/NeuralNetworkClassifier_BetaML/index.html b/v0.20.3/models/NeuralNetworkClassifier_BetaML/index.html new file mode 100644 index 000000000..2089ec640 --- /dev/null +++ b/v0.20.3/models/NeuralNetworkClassifier_BetaML/index.html @@ -0,0 +1,37 @@ + +NeuralNetworkClassifier · MLJ

      NeuralNetworkClassifier

      mutable struct NeuralNetworkClassifier <: MLJModelInterface.Probabilistic

      A simple but flexible Feedforward Neural Network, from the Beta Machine Learning Toolkit (BetaML) for classification problems.

      Parameters:

      • layers: Array of layer objects [def: nothing, i.e. basic network]. See subtypes(BetaML.AbstractLayer) for supported layers. The last "softmax" layer is automatically added.

      • loss: Loss (cost) function [def: BetaML.crossentropy]. Should always assume y and ŷ as matrices.

        Warning

        If you change the parameter loss, you need to either provide its derivative on the parameter dloss or use autodiff with dloss=nothing.

      • dloss: Derivative of the loss function [def: BetaML.dcrossentropy, i.e. the derivative of the cross-entropy]. Use nothing for autodiff.

      • epochs: Number of epochs, i.e. passages trough the whole training sample [def: 200]

      • batch_size: Size of each individual batch [def: 16]

      • opt_alg: The optimisation algorithm to update the gradient at each batch [def: BetaML.ADAM()]. See subtypes(BetaML.OptimisationAlgorithm) for supported optimizers

      • shuffle: Whether to randomly shuffle the data at each iteration (epoch) [def: true]

      • descr: An optional title and/or description for this model

      • cb: A call back function to provide information during training [def: BetaML.fitting_info]

      • categories: The categories to represent as columns. [def: nothing, i.e. unique training values].

      • handle_unknown: How to handle categories not seens in training or not present in the provided categories array? "error" (default) rises an error, "infrequent" adds a specific column for these categories.

      • other_categories_name: Which value during prediction to assign to this "other" category (i.e. categories not seen on training or not present in the provided categories array? [def: nothing, i.e. typemax(Int64) for integer vectors and "other" for other types]. This setting is active only if handle_unknown="infrequent" and in that case it MUST be specified if Y is neither integer or strings

      • rng: Random Number Generator [deafult: Random.GLOBAL_RNG]

      Notes:

      • data must be numerical
      • the label should be a n-records by n-dimensions matrix (e.g. a one-hot-encoded data for classification), where the output columns should be interpreted as the probabilities for each categories.

      Example:

      julia> using MLJ
      +
      +julia> X, y        = @load_iris;
      +
      +julia> modelType   = @load NeuralNetworkClassifier pkg = "BetaML" verbosity=0
      +BetaML.Nn.NeuralNetworkClassifier
      +
      +julia> layers      = [BetaML.DenseLayer(4,8,f=BetaML.relu),BetaML.DenseLayer(8,8,f=BetaML.relu),BetaML.DenseLayer(8,3,f=BetaML.relu),BetaML.VectorFunctionLayer(3,f=BetaML.softmax)];
      +
      +julia> model       = modelType(layers=layers,opt_alg=BetaML.ADAM())
      +NeuralNetworkClassifier(
      +  layers = BetaML.Nn.AbstractLayer[BetaML.Nn.DenseLayer([-0.376173352338049 0.7029289511758696 -0.5589563304592478 -0.21043274001651874; 0.044758889527899415 0.6687689636685921 0.4584331114653877 0.6820506583840453; … ; -0.26546358457167507 -0.28469736227283804 -0.164225549922154 -0.516785639164486; -0.5146043550684141 -0.0699113265130964 0.14959906603941908 -0.053706860039406834], [0.7003943613125758, -0.23990840466587576, -0.23823126271387746, 0.4018101580410387, 0.2274483050356888, -0.564975060667734, 0.1732063297031089, 0.11880299829896945], BetaML.Utils.relu, BetaML.Utils.drelu), BetaML.Nn.DenseLayer([-0.029467850439546583 0.4074661266592745 … 0.36775675246760053 -0.595524555448422; 0.42455597698371306 -0.2458082732997091 … -0.3324220683462514 0.44439454998610595; … ; -0.2890883863364267 -0.10109249362508033 … -0.0602680568207582 0.18177278845097555; -0.03432587226449335 -0.4301192922760063 … 0.5646018168286626 0.47269177680892693], [0.13777442835428688, 0.5473306726675433, 0.3781939472904011, 0.24021813428130567, -0.0714779477402877, -0.020386373530818958, 0.5465466618404464, -0.40339790713616525], BetaML.Utils.relu, BetaML.Utils.drelu), BetaML.Nn.DenseLayer([0.6565120540082393 0.7139211611842745 … 0.07809812467915389 -0.49346311403373844; -0.4544472987041656 0.6502667641568863 … 0.43634608676548214 0.7213049952968921; 0.41212264783075303 -0.21993289366360613 … 0.25365007887755064 -0.5664469566269569], [-0.6911986792747682, -0.2149343209329364, -0.6347727539063817], BetaML.Utils.relu, BetaML.Utils.drelu), BetaML.Nn.VectorFunctionLayer{0}(fill(NaN), 3, 3, BetaML.Utils.softmax, BetaML.Utils.dsoftmax, nothing)], 
      +  loss = BetaML.Utils.crossentropy, 
      +  dloss = BetaML.Utils.dcrossentropy, 
      +  epochs = 100, 
      +  batch_size = 32, 
      +  opt_alg = BetaML.Nn.ADAM(BetaML.Nn.var"#90#93"(), 1.0, 0.9, 0.999, 1.0e-8, BetaML.Nn.Learnable[], BetaML.Nn.Learnable[]), 
      +  shuffle = true, 
      +  descr = "", 
      +  cb = BetaML.Nn.fitting_info, 
      +  categories = nothing, 
      +  handle_unknown = "error", 
      +  other_categories_name = nothing, 
      +  rng = Random._GLOBAL_RNG())
      +
      +julia> mach        = machine(model, X, y);
      +
      +julia> fit!(mach);
      +
      +julia> classes_est = predict(mach, X)
      +150-element CategoricalDistributions.UnivariateFiniteVector{Multiclass{3}, String, UInt8, Float64}:
      + UnivariateFinite{Multiclass{3}}(setosa=>0.575, versicolor=>0.213, virginica=>0.213)
      + UnivariateFinite{Multiclass{3}}(setosa=>0.573, versicolor=>0.213, virginica=>0.213)
      + ⋮
      + UnivariateFinite{Multiclass{3}}(setosa=>0.236, versicolor=>0.236, virginica=>0.529)
      + UnivariateFinite{Multiclass{3}}(setosa=>0.254, versicolor=>0.254, virginica=>0.492)
      diff --git a/v0.20.3/models/NeuralNetworkClassifier_MLJFlux/index.html b/v0.20.3/models/NeuralNetworkClassifier_MLJFlux/index.html new file mode 100644 index 000000000..ad988dbdb --- /dev/null +++ b/v0.20.3/models/NeuralNetworkClassifier_MLJFlux/index.html @@ -0,0 +1,22 @@ + +NeuralNetworkClassifier · MLJ

      NeuralNetworkClassifier

      NeuralNetworkClassifier

      A model type for constructing a neural network classifier, based on MLJFlux.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      NeuralNetworkClassifier = @load NeuralNetworkClassifier pkg=MLJFlux

      Do model = NeuralNetworkClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in NeuralNetworkClassifier(builder=...).

      NeuralNetworkClassifier is for training a data-dependent Flux.jl neural network for making probabilistic predictions of a Multiclass or OrderedFactor target, given a table of Continuous features. Users provide a recipe for constructing the network, based on properties of the data that is encountered, by specifying an appropriate builder. See MLJFlux documentation for more on builders.

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, X, y)

      Here:

      • X is either a Matrix or any table of input features (eg, a DataFrame) whose columns are of scitype Continuous; check column scitypes with schema(X). If X is a Matrix, it is assumed to have columns corresponding to features and rows corresponding to observations.
      • y is the target, which can be any AbstractVector whose element scitype is Multiclass or OrderedFactor; check the scitype with scitype(y)

      Train the machine with fit!(mach, rows=...).

      Hyper-parameters

      • builder=MLJFlux.Short(): An MLJFlux builder that constructs a neural network. Possible builders include: MLJFlux.Linear, MLJFlux.Short, and MLJFlux.MLP. See MLJFlux.jl documentation for examples of user-defined builders. See also finaliser below.

      • optimiser::Flux.Adam(): A Flux.Optimise optimiser. The optimiser performs the updating of the weights of the network. For further reference, see the Flux optimiser documentation. To choose a learning rate (the update rate of the optimizer), a good rule of thumb is to start out at 10e-3, and tune using powers of 10 between 1 and 1e-7.

      • loss=Flux.crossentropy: The loss function which the network will optimize. Should be a function which can be called in the form loss(yhat, y). Possible loss functions are listed in the Flux loss function documentation. For a classification task, the most natural loss functions are:

        • Flux.crossentropy: Standard multiclass classification loss, also known as the log loss.
        • Flux.logitcrossentopy: Mathematically equal to crossentropy, but numerically more stable than finalising the outputs with softmax and then calculating crossentropy. You will need to specify finaliser=identity to remove MLJFlux's default softmax finaliser, and understand that the output of predict is then unnormalized (no longer probabilistic).
        • Flux.tversky_loss: Used with imbalanced data to give more weight to false negatives.
        • Flux.focal_loss: Used with highly imbalanced data. Weights harder examples more than easier examples.

        Currently MLJ measures are not supported values of loss.

      • epochs::Int=10: The duration of training, in epochs. Typically, one epoch represents one pass through the complete the training dataset.

      • batch_size::int=1: the batch size to be used for training, representing the number of samples per update of the network weights. Typically, batch size is between 8 and

        1. Increassing batch size may accelerate training if acceleration=CUDALibs() and a

        GPU is available.

      • lambda::Float64=0: The strength of the weight regularization penalty. Can be any value in the range [0, ∞).

      • alpha::Float64=0: The L2/L1 mix of regularization, in the range [0, 1]. A value of 0 represents L2 regularization, and a value of 1 represents L1 regularization.

      • rng::Union{AbstractRNG, Int64}: The random number generator or seed used during training.

      • optimizer_changes_trigger_retraining::Bool=false: Defines what happens when re-fitting a machine if the associated optimiser has changed. If true, the associated machine will retrain from scratch on fit! call, otherwise it will not.

      • acceleration::AbstractResource=CPU1(): Defines on what hardware training is done. For Training on GPU, use CUDALibs().

      • finaliser=Flux.softmax: The final activation function of the neural network (applied after the network defined by builder). Defaults to Flux.softmax.

      Operations

      • predict(mach, Xnew): return predictions of the target given new features Xnew, which should have the same scitype as X above. Predictions are probabilistic but uncalibrated.
      • predict_mode(mach, Xnew): Return the modes of the probabilistic predictions returned above.

      Fitted parameters

      The fields of fitted_params(mach) are:

      • chain: The trained "chain" (Flux.jl model), namely the series of layers, functions, and activations which make up the neural network. This includes the final layer specified by finaliser (eg, softmax).

      Report

      The fields of report(mach) are:

      • training_losses: A vector of training losses (penalised if lambda != 0) in historical order, of length epochs + 1. The first element is the pre-training loss.

      Examples

      In this example we build a classification model using the Iris dataset. This is a very basic example, using a default builder and no standardization. For a more advanced illustration, see NeuralNetworkRegressor or ImageClassifier, and examples in the MLJFlux.jl documentation.

      using MLJ
      +using Flux
      +import RDatasets

      First, we can load the data:

      iris = RDatasets.dataset("datasets", "iris");
      +y, X = unpack(iris, ==(:Species), rng=123); ## a vector and a table
      +NeuralNetworkClassifier = @load NeuralNetworkClassifier pkg=MLJFlux
      +clf = NeuralNetworkClassifier()

      Next, we can train the model:

      mach = machine(clf, X, y)
      +fit!(mach)

      We can train the model in an incremental fashion, altering the learning rate as we go, provided optimizer_changes_trigger_retraining is false (the default). Here, we also change the number of (total) iterations:

      clf.optimiser.eta = clf.optimiser.eta * 2
      +clf.epochs = clf.epochs + 5
      +
      +fit!(mach, verbosity=2) ## trains 5 more epochs

      We can inspect the mean training loss using the cross_entropy function:

      training_loss = cross_entropy(predict(mach, X), y) |> mean

      And we can access the Flux chain (model) using fitted_params:

      chain = fitted_params(mach).chain

      Finally, we can see how the out-of-sample performance changes over time, using MLJ's learning_curve function:

      r = range(clf, :epochs, lower=1, upper=200, scale=:log10)
      +curve = learning_curve(clf, X, y,
      +                     range=r,
      +                     resampling=Holdout(fraction_train=0.7),
      +                     measure=cross_entropy)
      +using Plots
      +plot(curve.parameter_values,
      +     curve.measurements,
      +     xlab=curve.parameter_name,
      +     xscale=curve.parameter_scale,
      +     ylab = "Cross Entropy")
      +

      See also ImageClassifier.

      diff --git a/v0.20.3/models/NeuralNetworkRegressor_BetaML/index.html b/v0.20.3/models/NeuralNetworkRegressor_BetaML/index.html new file mode 100644 index 000000000..335b9894b --- /dev/null +++ b/v0.20.3/models/NeuralNetworkRegressor_BetaML/index.html @@ -0,0 +1,38 @@ + +NeuralNetworkRegressor · MLJ

      NeuralNetworkRegressor

      mutable struct NeuralNetworkRegressor <: MLJModelInterface.Deterministic

      A simple but flexible Feedforward Neural Network, from the Beta Machine Learning Toolkit (BetaML) for regression of a single dimensional target.

      Parameters:

      • layers: Array of layer objects [def: nothing, i.e. basic network]. See subtypes(BetaML.AbstractLayer) for supported layers

      • loss: Loss (cost) function [def: BetaML.squared_cost]. Should always assume y and ŷ as matrices, even if the regression task is 1-D

        Warning

        If you change the parameter loss, you need to either provide its derivative on the parameter dloss or use autodiff with dloss=nothing.

      • dloss: Derivative of the loss function [def: BetaML.dsquared_cost, i.e. use the derivative of the squared cost]. Use nothing for autodiff.

      • epochs: Number of epochs, i.e. passages trough the whole training sample [def: 200]

      • batch_size: Size of each individual batch [def: 16]

      • opt_alg: The optimisation algorithm to update the gradient at each batch [def: BetaML.ADAM()]. See subtypes(BetaML.OptimisationAlgorithm) for supported optimizers

      • shuffle: Whether to randomly shuffle the data at each iteration (epoch) [def: true]

      • descr: An optional title and/or description for this model

      • cb: A call back function to provide information during training [def: fitting_info]

      • rng: Random Number Generator (see FIXEDSEED) [deafult: Random.GLOBAL_RNG]

      Notes:

      • data must be numerical
      • the label should be be a n-records vector.

      Example:

      julia> using MLJ
      +
      +julia> X, y        = @load_boston;
      +
      +julia> modelType   = @load NeuralNetworkRegressor pkg = "BetaML" verbosity=0
      +BetaML.Nn.NeuralNetworkRegressor
      +
      +julia> layers                      = [BetaML.DenseLayer(12,20,f=BetaML.relu),BetaML.DenseLayer(20,20,f=BetaML.relu),BetaML.DenseLayer(20,1,f=BetaML.relu)];
      +
      +julia> model       = modelType(layers=layers,opt_alg=BetaML.ADAM());
      +NeuralNetworkRegressor(
      +  layers = BetaML.Nn.AbstractLayer[BetaML.Nn.DenseLayer([-0.23249759178069676 -0.4125090172711131 … 0.41401934928739 -0.33017881111237535; -0.27912169279319965 0.270551221249931 … 0.19258414323473344 0.1703002982374256; … ; 0.31186742456482447 0.14776438287394805 … 0.3624993442655036 0.1438885872964824; 0.24363744610286758 -0.3221033024934767 … 0.14886090419299408 0.038411663101909355], [-0.42360286004241765, -0.34355377040029594, 0.11510963232946697, 0.29078650404397893, -0.04940236502546075, 0.05142849152316714, -0.177685375947775, 0.3857630523957018, -0.25454667127064756, -0.1726731848206195, 0.29832456225553444, -0.21138505291162835, -0.15763643112604903, -0.08477044513587562, -0.38436681165349196, 0.20538016429104916, -0.25008157754468335, 0.268681800562054, 0.10600581996650865, 0.4262194464325672], BetaML.Utils.relu, BetaML.Utils.drelu), BetaML.Nn.DenseLayer([-0.08534180387478185 0.19659398307677617 … -0.3413633217504578 -0.0484925247381256; 0.0024419192794883915 -0.14614102508129 … -0.21912059923003044 0.2680725396694708; … ; 0.25151545823147886 -0.27532269951606037 … 0.20739970895058063 0.2891938885916349; -0.1699020711688904 -0.1350423717084296 … 0.16947589410758873 0.3629006047373296], [0.2158116357688406, -0.3255582642532289, -0.057314442103850394, 0.29029696770539953, 0.24994080694366455, 0.3624239027782297, -0.30674318230919984, -0.3854738338935017, 0.10809721838554087, 0.16073511121016176, -0.005923262068960489, 0.3157147976348795, -0.10938918304264739, -0.24521229198853187, -0.307167732178712, 0.0808907777008302, -0.014577497150872254, -0.0011287181458157214, 0.07522282588658086, 0.043366500526073104], BetaML.Utils.relu, BetaML.Utils.drelu), BetaML.Nn.DenseLayer([-0.021367697115938555 -0.28326652172347155 … 0.05346175368370165 -0.26037328415871647], [-0.2313659199724562], BetaML.Utils.relu, BetaML.Utils.drelu)], 
      +  loss = BetaML.Utils.squared_cost, 
      +  dloss = BetaML.Utils.dsquared_cost, 
      +  epochs = 100, 
      +  batch_size = 32, 
      +  opt_alg = BetaML.Nn.ADAM(BetaML.Nn.var"#90#93"(), 1.0, 0.9, 0.999, 1.0e-8, BetaML.Nn.Learnable[], BetaML.Nn.Learnable[]), 
      +  shuffle = true, 
      +  descr = "", 
      +  cb = BetaML.Nn.fitting_info, 
      +  rng = Random._GLOBAL_RNG())
      +
      +julia> mach        = machine(model, X, y);
      +
      +julia> fit!(mach);
      +
      +julia> ŷ    = predict(mach, X);
      +
      +julia> hcat(y,ŷ)
      +506×2 Matrix{Float64}:
      + 24.0  30.7726
      + 21.6  28.0811
      + 34.7  31.3194
      +  ⋮    
      + 23.9  30.9032
      + 22.0  29.49
      + 11.9  27.2438
      diff --git a/v0.20.3/models/NeuralNetworkRegressor_MLJFlux/index.html b/v0.20.3/models/NeuralNetworkRegressor_MLJFlux/index.html new file mode 100644 index 000000000..30d3fefd6 --- /dev/null +++ b/v0.20.3/models/NeuralNetworkRegressor_MLJFlux/index.html @@ -0,0 +1,45 @@ + +NeuralNetworkRegressor · MLJ

      NeuralNetworkRegressor

      NeuralNetworkRegressor

      A model type for constructing a neural network regressor, based on MLJFlux.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      NeuralNetworkRegressor = @load NeuralNetworkRegressor pkg=MLJFlux

      Do model = NeuralNetworkRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in NeuralNetworkRegressor(builder=...).

      NeuralNetworkRegressor is for training a data-dependent Flux.jl neural network to predict a Continuous target, given a table of Continuous features. Users provide a recipe for constructing the network, based on properties of the data that is encountered, by specifying an appropriate builder. See MLJFlux documentation for more on builders.

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, X, y)

      Here:

      • X is either a Matrix or any table of input features (eg, a DataFrame) whose columns are of scitype Continuous; check column scitypes with schema(X). If X is a Matrix, it is assumed to have columns corresponding to features and rows corresponding to observations.
      • y is the target, which can be any AbstractVector whose element scitype is Continuous; check the scitype with scitype(y)

      Train the machine with fit!(mach, rows=...).

      Hyper-parameters

      • builder=MLJFlux.Linear(σ=Flux.relu): An MLJFlux builder that constructs a neural network. Possible builders include: MLJFlux.Linear, MLJFlux.Short, and MLJFlux.MLP. See MLJFlux documentation for more on builders, and the example below for using the @builder convenience macro.

      • optimiser::Flux.Adam(): A Flux.Optimise optimiser. The optimiser performs the updating of the weights of the network. For further reference, see the Flux optimiser documentation. To choose a learning rate (the update rate of the optimizer), a good rule of thumb is to start out at 10e-3, and tune using powers of 10 between 1 and 1e-7.

      • loss=Flux.mse: The loss function which the network will optimize. Should be a function which can be called in the form loss(yhat, y). Possible loss functions are listed in the Flux loss function documentation. For a regression task, natural loss functions are:

        • Flux.mse
        • Flux.mae
        • Flux.msle
        • Flux.huber_loss

        Currently MLJ measures are not supported as loss functions here.

      • epochs::Int=10: The duration of training, in epochs. Typically, one epoch represents one pass through the complete the training dataset.

      • batch_size::int=1: the batch size to be used for training, representing the number of samples per update of the network weights. Typically, batch size is between 8 and

        1. Increasing batch size may accelerate training if acceleration=CUDALibs() and a

        GPU is available.

      • lambda::Float64=0: The strength of the weight regularization penalty. Can be any value in the range [0, ∞).

      • alpha::Float64=0: The L2/L1 mix of regularization, in the range [0, 1]. A value of 0 represents L2 regularization, and a value of 1 represents L1 regularization.

      • rng::Union{AbstractRNG, Int64}: The random number generator or seed used during training.

      • optimizer_changes_trigger_retraining::Bool=false: Defines what happens when re-fitting a machine if the associated optimiser has changed. If true, the associated machine will retrain from scratch on fit! call, otherwise it will not.

      • acceleration::AbstractResource=CPU1(): Defines on what hardware training is done. For Training on GPU, use CUDALibs().

      Operations

      • predict(mach, Xnew): return predictions of the target given new features Xnew, which should have the same scitype as X above.

      Fitted parameters

      The fields of fitted_params(mach) are:

      • chain: The trained "chain" (Flux.jl model), namely the series of layers, functions, and activations which make up the neural network.

      Report

      The fields of report(mach) are:

      • training_losses: A vector of training losses (penalized if lambda != 0) in historical order, of length epochs + 1. The first element is the pre-training loss.

      Examples

      In this example we build a regression model for the Boston house price dataset.

      using MLJ
      +import MLJFlux
      +using Flux

      First, we load in the data: The :MEDV column becomes the target vector y, and all remaining columns go into a table X, with the exception of :CHAS:

      data = OpenML.load(531); ## Loads from https://www.openml.org/d/531
      +y, X = unpack(data, ==(:MEDV), !=(:CHAS); rng=123);
      +
      +scitype(y)
      +schema(X)

      Since MLJFlux models do not handle ordered factors, we'll treat :RAD as Continuous:

      X = coerce(X, :RAD=>Continuous)

      Splitting off a test set:

      (X, Xtest), (y, ytest) = partition((X, y), 0.7, multi=true);

      Next, we can define a builder, making use of a convenience macro to do so. In the following @builder call, n_in is a proxy for the number input features (which will be known at fit! time) and rng is a proxy for a RNG (which will be passed from the rng field of model defined below). We also have the parameter n_out which is the number of output features. As we are doing single target regression, the value passed will always be 1, but the builder we define will also work for MultitargetNeuralRegressor.

      builder = MLJFlux.@builder begin
      +    init=Flux.glorot_uniform(rng)
      +    Chain(
      +        Dense(n_in, 64, relu, init=init),
      +        Dense(64, 32, relu, init=init),
      +        Dense(32, n_out, init=init),
      +    )
      +end

      Instantiating a model:

      NeuralNetworkRegressor = @load NeuralNetworkRegressor pkg=MLJFlux
      +model = NeuralNetworkRegressor(
      +    builder=builder,
      +    rng=123,
      +    epochs=20
      +)

      We arrange for standardization of the the target by wrapping our model in TransformedTargetModel, and standardization of the features by inserting the wrapped model in a pipeline:

      pipe = Standardizer |> TransformedTargetModel(model, target=Standardizer)

      If we fit with a high verbosity (>1), we will see the losses during training. We can also see the losses in the output of report(mach).

      mach = machine(pipe, X, y)
      +fit!(mach, verbosity=2)
      +
      +## first element initial loss, 2:end per epoch training losses
      +report(mach).transformed_target_model_deterministic.model.training_losses

      Experimenting with learning rate

      We can visually compare how the learning rate affects the predictions:

      using Plots
      +
      +rates = rates = [5e-5, 1e-4, 0.005, 0.001, 0.05]
      +plt=plot()
      +
      +foreach(rates) do η
      +  pipe.transformed_target_model_deterministic.model.optimiser.eta = η
      +  fit!(mach, force=true, verbosity=0)
      +  losses =
      +      report(mach).transformed_target_model_deterministic.model.training_losses[3:end]
      +  plot!(1:length(losses), losses, label=η)
      +end
      +
      +plt
      +
      +pipe.transformed_target_model_deterministic.model.optimiser.eta = 0.0001

      With the learning rate fixed, we compute a CV estimate of the performance (using all data bound to mach) and compare this with performance on the test set:

      ## CV estimate, based on `(X, y)`:
      +evaluate!(mach, resampling=CV(nfolds=5), measure=l2)
      +
      +## loss for `(Xtest, test)`:
      +fit!(mach) ## train on `(X, y)`
      +yhat = predict(mach, Xtest)
      +l2(yhat, ytest)  |> mean

      These losses, for the pipeline model, refer to the target on the original, unstandardized, scale.

      For implementing stopping criterion and other iteration controls, refer to examples linked from the MLJFlux documentation.

      See also MultitargetNeuralNetworkRegressor

      diff --git a/v0.20.3/models/NuSVC_LIBSVM/index.html b/v0.20.3/models/NuSVC_LIBSVM/index.html new file mode 100644 index 000000000..770370fdb --- /dev/null +++ b/v0.20.3/models/NuSVC_LIBSVM/index.html @@ -0,0 +1,28 @@ + +NuSVC · MLJ

      NuSVC

      NuSVC

      A model type for constructing a ν-support vector classifier, based on LIBSVM.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      NuSVC = @load NuSVC pkg=LIBSVM

      Do model = NuSVC() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in NuSVC(kernel=...).

      This model is a re-parameterization of the SVC classifier, where nu replaces cost, and is mathematically equivalent to it. The parameter nu allows more direct control over the number of support vectors (see under "Hyper-parameters").

      This model always predicts actual class labels. For probabilistic predictions, use instead ProbabilisticNuSVC.

      Reference for algorithm and core C-library: C.-C. Chang and C.-J. Lin (2011): "LIBSVM: a library for support vector machines." ACM Transactions on Intelligent Systems and Technology, 2(3):27:1–27:27. Updated at https://www.csie.ntu.edu.tw/~cjlin/papers/libsvm.pdf.

      Training data

      In MLJ or MLJBase, bind an instance model to data with:

      mach = machine(model, X, y)

      where

      • X: any table of input features (eg, a DataFrame) whose columns each have Continuous element scitype; check column scitypes with schema(X)
      • y: is the target, which can be any AbstractVector whose element scitype is <:OrderedFactor or <:Multiclass; check the scitype with scitype(y)

      Train the machine using fit!(mach, rows=...).

      Hyper-parameters

      • kernel=LIBSVM.Kernel.RadialBasis: either an object that can be called, as in kernel(x1, x2), or one of the built-in kernels from the LIBSVM.jl package listed below. Here x1 and x2 are vectors whose lengths match the number of columns of the training data X (see "Examples" below).

        • LIBSVM.Kernel.Linear: (x1, x2) -> x1'*x2
        • LIBSVM.Kernel.Polynomial: (x1, x2) -> gamma*x1'*x2 + coef0)^degree
        • LIBSVM.Kernel.RadialBasis: (x1, x2) -> (exp(-gamma*norm(x1 - x2)^2))
        • LIBSVM.Kernel.Sigmoid: (x1, x2) - > tanh(gamma*x1'*x2 + coef0)

        Here gamma, coef0, degree are other hyper-parameters. Serialization of models with user-defined kernels comes with some restrictions. See LIVSVM.jl issue91

      • gamma = 0.0: kernel parameter (see above); if gamma==-1.0 then gamma = 1/nfeatures is used in training, where nfeatures is the number of features (columns of X). If gamma==0.0 then gamma = 1/(var(Tables.matrix(X))*nfeatures) is used. Actual value used appears in the report (see below).

      • coef0 = 0.0: kernel parameter (see above)

      • degree::Int32 = Int32(3): degree in polynomial kernel (see above)

      • nu=0.5 (range (0, 1]): An upper bound on the fraction of margin errors and a lower bound of the fraction of support vectors. Denoted ν in the cited paper. Changing nu changes the thickness of the margin (a neighborhood of the decision surface) and a margin error is said to have occurred if a training observation lies on the wrong side of the surface or within the margin.

      • cachesize=200.0 cache memory size in MB

      • tolerance=0.001: tolerance for the stopping criterion

      • shrinking=true: whether to use shrinking heuristics

      Operations

      • predict(mach, Xnew): return predictions of the target given features Xnew having the same scitype as X above.

      Fitted parameters

      The fields of fitted_params(mach) are:

      • libsvm_model: the trained model object created by the LIBSVM.jl package
      • encoding: class encoding used internally by libsvm_model - a dictionary of class labels keyed on the internal integer representation

      Report

      The fields of report(mach) are:

      • gamma: actual value of the kernel parameter gamma used in training

      Examples

      Using a built-in kernel

      using MLJ
      +import LIBSVM
      +
      +NuSVC = @load NuSVC pkg=LIBSVM                 ## model type
      +model = NuSVC(kernel=LIBSVM.Kernel.Polynomial) ## instance
      +
      +X, y = @load_iris ## table, vector
      +mach = machine(model, X, y) |> fit!
      +
      +Xnew = (sepal_length = [6.4, 7.2, 7.4],
      +        sepal_width = [2.8, 3.0, 2.8],
      +        petal_length = [5.6, 5.8, 6.1],
      +        petal_width = [2.1, 1.6, 1.9],)
      +
      +julia> yhat = predict(mach, Xnew)
      +3-element CategoricalArrays.CategoricalArray{String,1,UInt32}:
      + "virginica"
      + "virginica"
      + "virginica"

      User-defined kernels

      k(x1, x2) = x1'*x2 ## equivalent to `LIBSVM.Kernel.Linear`
      +model = NuSVC(kernel=k)
      +mach = machine(model, X, y) |> fit!
      +
      +julia> yhat = predict(mach, Xnew)
      +3-element CategoricalArrays.CategoricalArray{String,1,UInt32}:
      + "virginica"
      + "virginica"
      + "virginica"

      See also the classifiers SVC and LinearSVC, LIVSVM.jl and the original C implementation. documentation.

      diff --git a/v0.20.3/models/NuSVR_LIBSVM/index.html b/v0.20.3/models/NuSVR_LIBSVM/index.html new file mode 100644 index 000000000..f11c4324c --- /dev/null +++ b/v0.20.3/models/NuSVR_LIBSVM/index.html @@ -0,0 +1,25 @@ + +NuSVR · MLJ

      NuSVR

      NuSVR

      A model type for constructing a ν-support vector regressor, based on LIBSVM.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      NuSVR = @load NuSVR pkg=LIBSVM

      Do model = NuSVR() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in NuSVR(kernel=...).

      Reference for algorithm and core C-library: C.-C. Chang and C.-J. Lin (2011): "LIBSVM: a library for support vector machines." ACM Transactions on Intelligent Systems and Technology, 2(3):27:1–27:27. Updated at https://www.csie.ntu.edu.tw/~cjlin/papers/libsvm.pdf.

      This model is a re-parameterization of EpsilonSVR in which the epsilon hyper-parameter is replaced with a new parameter nu (denoted $ν$ in the cited reference) which attempts to control the number of support vectors directly.

      Training data

      In MLJ or MLJBase, bind an instance model to data with:

      mach = machine(model, X, y)

      where

      • X: any table of input features (eg, a DataFrame) whose columns each have Continuous element scitype; check column scitypes with schema(X)
      • y: is the target, which can be any AbstractVector whose element scitype is Continuous; check the scitype with scitype(y)

      Train the machine using fit!(mach, rows=...).

      Hyper-parameters

        • kernel=LIBSVM.Kernel.RadialBasis: either an object that can be

        called, as in kernel(x1, x2), or one of the built-in kernels from the LIBSVM.jl package listed below. Here x1 and x2 are vectors whose lengths match the number of columns of the training data X (see "Examples" below).

        • LIBSVM.Kernel.Linear: (x1, x2) -> x1'*x2
        • LIBSVM.Kernel.Polynomial: (x1, x2) -> gamma*x1'*x2 + coef0)^degree
        • LIBSVM.Kernel.RadialBasis: (x1, x2) -> (exp(-gamma*norm(x1 - x2)^2))
        • LIBSVM.Kernel.Sigmoid: (x1, x2) - > tanh(gamma*x1'*x2 + coef0)

        Here gamma, coef0, degree are other hyper-parameters. Serialization of models with user-defined kernels comes with some restrictions. See LIVSVM.jl issue91

      • gamma = 0.0: kernel parameter (see above); if gamma==-1.0 then gamma = 1/nfeatures is used in training, where nfeatures is the number of features (columns of X). If gamma==0.0 then gamma = 1/(var(Tables.matrix(X))*nfeatures) is used. Actual value used appears in the report (see below).

      • coef0 = 0.0: kernel parameter (see above)

      • degree::Int32 = Int32(3): degree in polynomial kernel (see above)

      • cost=1.0 (range (0, Inf)): the parameter denoted $C$ in the cited reference; for greater regularization, decrease cost

      • nu=0.5 (range (0, 1]): An upper bound on the fraction of training errors and a lower bound of the fraction of support vectors. Denoted $ν$ in the cited paper. Changing nu changes the thickness of some neighborhood of the graph of the prediction function ("tube" or "slab") and a training error is said to occur when a data point (x, y) lies outside of that neighborhood.

      • cachesize=200.0 cache memory size in MB

      • tolerance=0.001: tolerance for the stopping criterion

      • shrinking=true: whether to use shrinking heuristics

      Operations

      • predict(mach, Xnew): return predictions of the target given features Xnew having the same scitype as X above.

      Fitted parameters

      The fields of fitted_params(mach) are:

      • libsvm_model: the trained model object created by the LIBSVM.jl package

      Report

      The fields of report(mach) are:

      • gamma: actual value of the kernel parameter gamma used in training

      Examples

      Using a built-in kernel

      using MLJ
      +import LIBSVM
      +
      +NuSVR = @load NuSVR pkg=LIBSVM                 ## model type
      +model = NuSVR(kernel=LIBSVM.Kernel.Polynomial) ## instance
      +
      +X, y = make_regression(rng=123) ## table, vector
      +mach = machine(model, X, y) |> fit!
      +
      +Xnew, _ = make_regression(3, rng=123)
      +
      +julia> yhat = predict(mach, Xnew)
      +3-element Vector{Float64}:
      +  0.2008156459920009
      +  0.1131520519131709
      + -0.2076156254934889

      User-defined kernels

      k(x1, x2) = x1'*x2 ## equivalent to `LIBSVM.Kernel.Linear`
      +model = NuSVR(kernel=k)
      +mach = machine(model, X, y) |> fit!
      +
      +julia> yhat = predict(mach, Xnew)
      +3-element Vector{Float64}:
      +  1.1211558175964662
      +  0.06677125944808422
      + -0.6817578942749346

      See also EpsilonSVR, LIVSVM.jl and the original C implementation documentation.

      diff --git a/v0.20.3/models/OCSVMDetector_OutlierDetectionPython/index.html b/v0.20.3/models/OCSVMDetector_OutlierDetectionPython/index.html new file mode 100644 index 000000000..1776e37fd --- /dev/null +++ b/v0.20.3/models/OCSVMDetector_OutlierDetectionPython/index.html @@ -0,0 +1,11 @@ + +OCSVMDetector · MLJ diff --git a/v0.20.3/models/OPTICS_MLJScikitLearnInterface/index.html b/v0.20.3/models/OPTICS_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..5d45c0d05 --- /dev/null +++ b/v0.20.3/models/OPTICS_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +OPTICS · MLJ

      OPTICS

      OPTICS

      A model type for constructing a optics, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      OPTICS = @load OPTICS pkg=MLJScikitLearnInterface

      Do model = OPTICS() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in OPTICS(min_samples=...).

      OPTICS (Ordering Points To Identify the Clustering Structure), closely related to `DBSCAN', finds core sample of high density and expands clusters from them. Unlike DBSCAN, keeps cluster hierarchy for a variable neighborhood radius. Better suited for usage on large datasets than the current sklearn implementation of DBSCAN.

      diff --git a/v0.20.3/models/OneClassSVM_LIBSVM/index.html b/v0.20.3/models/OneClassSVM_LIBSVM/index.html new file mode 100644 index 000000000..6f51a146f --- /dev/null +++ b/v0.20.3/models/OneClassSVM_LIBSVM/index.html @@ -0,0 +1,67 @@ + +OneClassSVM · MLJ

      OneClassSVM

      OneClassSVM

      A model type for constructing a one-class support vector machine, based on LIBSVM.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      OneClassSVM = @load OneClassSVM pkg=LIBSVM

      Do model = OneClassSVM() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in OneClassSVM(kernel=...).

      Reference for algorithm and core C-library: C.-C. Chang and C.-J. Lin (2011): "LIBSVM: a library for support vector machines." ACM Transactions on Intelligent Systems and Technology, 2(3):27:1–27:27. Updated at https://www.csie.ntu.edu.tw/~cjlin/papers/libsvm.pdf.

      This model is an outlier detection model delivering raw scores based on the decision function of a support vector machine. Like the NuSVC classifier, it uses the nu re-parameterization of the cost parameter appearing in standard support vector classification SVC.

      To extract normalized scores ("probabilities") wrap the model using ProbabilisticDetector from OutlierDetection.jl. For threshold-based classification, wrap the probabilistic model using MLJ's BinaryThresholdPredictor. Examples of wrapping appear below.

      Training data

      In MLJ or MLJBase, bind an instance model to data with:

      mach = machine(model, X, y)

      where

      • X: any table of input features (eg, a DataFrame) whose columns each have Continuous element scitype; check column scitypes with schema(X)

      Train the machine using fit!(mach, rows=...).

      Hyper-parameters

      • kernel=LIBSVM.Kernel.RadialBasis: either an object that can be called, as in kernel(x1, x2), or one of the built-in kernels from the LIBSVM.jl package listed below. Here x1 and x2 are vectors whose lengths match the number of columns of the training data X (see "Examples" below).

        • LIBSVM.Kernel.Linear: (x1, x2) -> x1'*x2
        • LIBSVM.Kernel.Polynomial: (x1, x2) -> gamma*x1'*x2 + coef0)^degree
        • LIBSVM.Kernel.RadialBasis: (x1, x2) -> (exp(-gamma*norm(x1 - x2)^2))
        • LIBSVM.Kernel.Sigmoid: (x1, x2) - > tanh(gamma*x1'*x2 + coef0)

        Here gamma, coef0, degree are other hyper-parameters. Serialization of models with user-defined kernels comes with some restrictions. See LIVSVM.jl issue91

      • gamma = 0.0: kernel parameter (see above); if gamma==-1.0 then gamma = 1/nfeatures is used in training, where nfeatures is the number of features (columns of X). If gamma==0.0 then gamma = 1/(var(Tables.matrix(X))*nfeatures) is used. Actual value used appears in the report (see below).

      • coef0 = 0.0: kernel parameter (see above)

      • degree::Int32 = Int32(3): degree in polynomial kernel (see above)

      • nu=0.5 (range (0, 1]): An upper bound on the fraction of margin errors and a lower bound of the fraction of support vectors. Denoted ν in the cited paper. Changing nu changes the thickness of the margin (a neighborhood of the decision surface) and a margin error is said to have occurred if a training observation lies on the wrong side of the surface or within the margin.

      • cachesize=200.0 cache memory size in MB

      • tolerance=0.001: tolerance for the stopping criterion

      • shrinking=true: whether to use shrinking heuristics

      Operations

      • transform(mach, Xnew): return scores for outlierness, given features Xnew having the same scitype as X above. The greater the score, the more likely it is an outlier. This score is based on the SVM decision function. For normalized scores, wrap model using ProbabilisticDetector from OutlierDetection.jl and call predict instead, and for threshold-based classification, wrap again using BinaryThresholdPredictor. See the examples below.

      Fitted parameters

      The fields of fitted_params(mach) are:

      • libsvm_model: the trained model object created by the LIBSVM.jl package
      • orientation: this equals 1 if the decision function for libsvm_model is increasing with increasing outlierness, and -1 if it is decreasing instead. Correspondingly, the libsvm_model attaches true to outliers in the first case, and false in the second. (The scores given in the MLJ report and generated by MLJ.transform already correct for this ambiguity, which is therefore only an issue for users directly accessing libsvm_model.)

      Report

      The fields of report(mach) are:

      • gamma: actual value of the kernel parameter gamma used in training

      Examples

      Generating raw scores for outlierness

      using MLJ
      +import LIBSVM
      +import StableRNGs.StableRNG
      +
      +OneClassSVM = @load OneClassSVM pkg=LIBSVM           ## model type
      +model = OneClassSVM(kernel=LIBSVM.Kernel.Polynomial) ## instance
      +
      +rng = StableRNG(123)
      +Xmatrix = randn(rng, 5, 3)
      +Xmatrix[1, 1] = 100.0
      +X = MLJ.table(Xmatrix)
      +
      +mach = machine(model, X) |> fit!
      +
      +## training scores (outliers have larger scores):
      +julia> report(mach).scores
      +5-element Vector{Float64}:
      +  6.711689156091755e-7
      + -6.740101976655081e-7
      + -6.711632439648446e-7
      + -6.743015858874887e-7
      + -6.745393717880104e-7
      +
      +## scores for new data:
      +Xnew = MLJ.table(rand(rng, 2, 3))
      +
      +julia> transform(mach, rand(rng, 2, 3))
      +2-element Vector{Float64}:
      + -6.746293022511047e-7
      + -6.744289265348623e-7

      Generating probabilistic predictions of outlierness

      Continuing the previous example:

      using OutlierDetection
      +pmodel = ProbabilisticDetector(model)
      +pmach = machine(pmodel, X) |> fit!
      +
      +## probabilistic predictions on new data:
      +
      +julia> y_prob = predict(pmach, Xnew)
      +2-element UnivariateFiniteVector{OrderedFactor{2}, String, UInt8, Float64}:
      + UnivariateFinite{OrderedFactor{2}}(normal=>1.0, outlier=>9.57e-5)
      + UnivariateFinite{OrderedFactor{2}}(normal=>1.0, outlier=>0.0)
      +
      +## probabilities for outlierness:
      +
      +julia> pdf.(y_prob, "outlier")
      +2-element Vector{Float64}:
      + 9.572583265925801e-5
      + 0.0
      +
      +## raw scores are still available using `transform`:
      +
      +julia> transform(pmach, Xnew)
      +2-element Vector{Float64}:
      + 9.572583265925801e-5
      + 0.0

      Outlier classification using a probability threshold:

      Continuing the previous example:

      dmodel = BinaryThresholdPredictor(pmodel, threshold=0.9)
      +dmach = machine(dmodel, X) |> fit!
      +
      +julia> yhat = predict(dmach, Xnew)
      +2-element CategoricalArrays.CategoricalArray{String,1,UInt8}:
      + "normal"
      + "normal"

      User-defined kernels

      Continuing the first example:

      k(x1, x2) = x1'*x2 ## equivalent to `LIBSVM.Kernel.Linear`
      +model = OneClassSVM(kernel=k)
      +mach = machine(model, X) |> fit!
      +
      +julia> yhat = transform(mach, Xnew)
      +2-element Vector{Float64}:
      + -0.4825363352732942
      + -0.4848772169720227

      See also LIVSVM.jl and the original C implementation documentation. For an alternative source of outlier detection models with an MLJ interface, see OutlierDetection.jl.

      diff --git a/v0.20.3/models/OneHotEncoder_MLJModels/index.html b/v0.20.3/models/OneHotEncoder_MLJModels/index.html new file mode 100644 index 000000000..fc8de733f --- /dev/null +++ b/v0.20.3/models/OneHotEncoder_MLJModels/index.html @@ -0,0 +1,34 @@ + +OneHotEncoder · MLJ

      OneHotEncoder

      OneHotEncoder

      A model type for constructing a one-hot encoder, based on MLJModels.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      OneHotEncoder = @load OneHotEncoder pkg=MLJModels

      Do model = OneHotEncoder() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in OneHotEncoder(features=...).

      Use this model to one-hot encode the Multiclass and OrderedFactor features (columns) of some table, leaving other columns unchanged.

      New data to be transformed may lack features present in the fit data, but no new features can be present.

      Warning: This transformer assumes that levels(col) for any Multiclass or OrderedFactor column, col, is the same for training data and new data to be transformed.

      To ensure all features are transformed into Continuous features, or dropped, use ContinuousEncoder instead.

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, X)

      where

      • X: any Tables.jl compatible table. Columns can be of mixed type but only those with element scitype Multiclass or OrderedFactor can be encoded. Check column scitypes with schema(X).

      Train the machine using fit!(mach, rows=...).

      Hyper-parameters

      • features: a vector of symbols (column names). If empty (default) then all Multiclass and OrderedFactor features are encoded. Otherwise, encoding is further restricted to the specified features (ignore=false) or the unspecified features (ignore=true). This default behavior can be modified by the ordered_factor flag.
      • ordered_factor=false: when true, OrderedFactor features are universally excluded
      • drop_last=true: whether to drop the column corresponding to the final class of encoded features. For example, a three-class feature is spawned into three new features if drop_last=false, but just two features otherwise.

      Fitted parameters

      The fields of fitted_params(mach) are:

      • all_features: names of all features encountered in training
      • fitted_levels_given_feature: dictionary of the levels associated with each feature encoded, keyed on the feature name
      • ref_name_pairs_given_feature: dictionary of pairs r => ftr (such as 0x00000001 => :grad__A) where r is a CategoricalArrays.jl reference integer representing a level, and ftr the corresponding new feature name; the dictionary is keyed on the names of features that are encoded

      Report

      The fields of report(mach) are:

      • features_to_be_encoded: names of input features to be encoded
      • new_features: names of all output features

      Example

      using MLJ
      +
      +X = (name=categorical(["Danesh", "Lee", "Mary", "John"]),
      +     grade=categorical(["A", "B", "A", "C"], ordered=true),
      +     height=[1.85, 1.67, 1.5, 1.67],
      +     n_devices=[3, 2, 4, 3])
      +
      +julia> schema(X)
      +┌───────────┬──────────────────┐
      +│ names     │ scitypes         │
      +├───────────┼──────────────────┤
      +│ name      │ Multiclass{4}    │
      +│ grade     │ OrderedFactor{3} │
      +│ height    │ Continuous       │
      +│ n_devices │ Count            │
      +└───────────┴──────────────────┘
      +
      +hot = OneHotEncoder(drop_last=true)
      +mach = fit!(machine(hot, X))
      +W = transform(mach, X)
      +
      +julia> schema(W)
      +┌──────────────┬────────────┐
      +│ names        │ scitypes   │
      +├──────────────┼────────────┤
      +│ name__Danesh │ Continuous │
      +│ name__John   │ Continuous │
      +│ name__Lee    │ Continuous │
      +│ grade__A     │ Continuous │
      +│ grade__B     │ Continuous │
      +│ height       │ Continuous │
      +│ n_devices    │ Count      │
      +└──────────────┴────────────┘

      See also ContinuousEncoder.

      diff --git a/v0.20.3/models/OneRuleClassifier_OneRule/index.html b/v0.20.3/models/OneRuleClassifier_OneRule/index.html new file mode 100644 index 000000000..f39f4a517 --- /dev/null +++ b/v0.20.3/models/OneRuleClassifier_OneRule/index.html @@ -0,0 +1,30 @@ + +OneRuleClassifier · MLJ

      OneRuleClassifier

      OneRuleClassifier

      A model type for constructing a one rule classifier, based on OneRule.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      OneRuleClassifier = @load OneRuleClassifier pkg=OneRule

      Do model = OneRuleClassifier() to construct an instance with default hyper-parameters.

      OneRuleClassifier implements the OneRule method for classification by Robert Holte ("Very simple classification rules perform well on most commonly used datasets" in: Machine Learning 11.1 (1993), pp. 63-90).

      For more information see:
      +
      +- Witten, Ian H., Eibe Frank, and Mark A. Hall. 
      +  Data Mining Practical Machine Learning Tools and Techniques Third Edition. 
      +  Morgan Kaufmann, 2017, pp. 93-96.
      +- [Machine Learning - (One|Simple) Rule](https://datacadamia.com/data_mining/one_rule)
      +- [OneRClassifier - One Rule for Classification](http://rasbt.github.io/mlxtend/user_guide/classifier/OneRClassifier/)

      Training data

      In MLJ or MLJBase, bind an instance model to data with mach = machine(model, X, y) where

      • X: any table of input features (eg, a DataFrame) whose columns each have one of the following element scitypes: Multiclass, OrderedFactor, or <:Finite; check column scitypes with schema(X)
      • y: is the target, which can be any AbstractVector whose element scitype is OrderedFactor or Multiclass; check the scitype with scitype(y)

      Train the machine using fit!(mach, rows=...).

      Hyper-parameters

      This classifier has no hyper-parameters.

      Operations

      • predict(mach, Xnew): return (deterministic) predictions of the target given features Xnew having the same scitype as X above.

      Fitted parameters

      The fields of fitted_params(mach) are:

      • tree: the tree (a OneTree) returned by the core OneTree.jl algorithm
      • all_classes: all classes (i.e. levels) of the target (used also internally to transfer levels-information to predict)

      Report

      The fields of report(mach) are:

      • tree: The OneTree created based on the training data
      • nrules: The number of rules tree contains
      • error_rate: fraction of wrongly classified instances
      • error_count: number of wrongly classified instances
      • classes_seen: list of target classes actually observed in training
      • features: the names of the features encountered in training

      Examples

      using MLJ
      +
      +ORClassifier = @load OneRuleClassifier pkg=OneRule
      +
      +orc = ORClassifier()
      +
      +outlook = ["sunny", "sunny", "overcast", "rainy", "rainy", "rainy", "overcast", "sunny", "sunny", "rainy",  "sunny", "overcast", "overcast", "rainy"]
      +temperature = ["hot", "hot", "hot", "mild", "cool", "cool", "cool", "mild", "cool", "mild", "mild", "mild", "hot", "mild"]
      +humidity = ["high", "high", "high", "high", "normal", "normal", "normal", "high", "normal", "normal", "normal", "high", "normal", "high"]
      +windy = ["false", "true", "false", "false", "false", "true", "true", "false", "false", "false", "true", "true", "false", "true"]
      +
      +weather_data = (outlook = outlook, temperature = temperature, humidity = humidity, windy = windy)
      +play_data = ["no", "no", "yes", "yes", "yes", "no", "yes", "no", "yes", "yes", "yes", "yes", "yes", "no"]
      +
      +weather = coerce(weather_data, Textual => Multiclass)
      +play = coerce(play, Multiclass)
      +
      +mach = machine(orc, weather, play)
      +fit!(mach)
      +
      +yhat = MLJ.predict(mach, weather)       ## in a real context 'new' `weather` data would be used
      +one_tree = fitted_params(mach).tree
      +report(mach).error_rate

      See also OneRule.jl.

      diff --git a/v0.20.3/models/OrthogonalMatchingPursuitCVRegressor_MLJScikitLearnInterface/index.html b/v0.20.3/models/OrthogonalMatchingPursuitCVRegressor_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..f1cbb3d94 --- /dev/null +++ b/v0.20.3/models/OrthogonalMatchingPursuitCVRegressor_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +OrthogonalMatchingPursuitCVRegressor · MLJ

      OrthogonalMatchingPursuitCVRegressor

      OrthogonalMatchingPursuitCVRegressor

      A model type for constructing a orthogonal ,atching pursuit (OMP) model with built-in cross-validation, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      OrthogonalMatchingPursuitCVRegressor = @load OrthogonalMatchingPursuitCVRegressor pkg=MLJScikitLearnInterface

      Do model = OrthogonalMatchingPursuitCVRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in OrthogonalMatchingPursuitCVRegressor(copy=...).

      Hyper-parameters

      • copy = true
      • fit_intercept = true
      • normalize = false
      • max_iter = nothing
      • cv = 5
      • n_jobs = 1
      • verbose = false
      diff --git a/v0.20.3/models/OrthogonalMatchingPursuitRegressor_MLJScikitLearnInterface/index.html b/v0.20.3/models/OrthogonalMatchingPursuitRegressor_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..077f353c5 --- /dev/null +++ b/v0.20.3/models/OrthogonalMatchingPursuitRegressor_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +OrthogonalMatchingPursuitRegressor · MLJ

      OrthogonalMatchingPursuitRegressor

      OrthogonalMatchingPursuitRegressor

      A model type for constructing a orthogonal matching pursuit regressor, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      OrthogonalMatchingPursuitRegressor = @load OrthogonalMatchingPursuitRegressor pkg=MLJScikitLearnInterface

      Do model = OrthogonalMatchingPursuitRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in OrthogonalMatchingPursuitRegressor(n_nonzero_coefs=...).

      Hyper-parameters

      • n_nonzero_coefs = nothing
      • tol = nothing
      • fit_intercept = true
      • normalize = false
      • precompute = auto
      diff --git a/v0.20.3/models/PCADetector_OutlierDetectionPython/index.html b/v0.20.3/models/PCADetector_OutlierDetectionPython/index.html new file mode 100644 index 000000000..5e4215d40 --- /dev/null +++ b/v0.20.3/models/PCADetector_OutlierDetectionPython/index.html @@ -0,0 +1,11 @@ + +PCADetector · MLJ diff --git a/v0.20.3/models/PCA_MultivariateStats/index.html b/v0.20.3/models/PCA_MultivariateStats/index.html new file mode 100644 index 000000000..ccc400380 --- /dev/null +++ b/v0.20.3/models/PCA_MultivariateStats/index.html @@ -0,0 +1,11 @@ + +PCA · MLJ

      PCA

      PCA

      A model type for constructing a pca, based on MultivariateStats.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      PCA = @load PCA pkg=MultivariateStats

      Do model = PCA() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in PCA(maxoutdim=...).

      Principal component analysis learns a linear projection onto a lower dimensional space while preserving most of the initial variance seen in the training data.

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, X)

      Here:

      • X is any table of input features (eg, a DataFrame) whose columns are of scitype Continuous; check column scitypes with schema(X).

      Train the machine using fit!(mach, rows=...).

      Hyper-parameters

      • maxoutdim=0: Together with variance_ratio, controls the output dimension outdim chosen by the model. Specifically, suppose that k is the smallest integer such that retaining the k most significant principal components accounts for variance_ratio of the total variance in the training data. Then outdim = min(outdim, maxoutdim). If maxoutdim=0 (default) then the effective maxoutdim is min(n, indim - 1) where n is the number of observations and indim the number of features in the training data.

      • variance_ratio::Float64=0.99: The ratio of variance preserved after the transformation

      • method=:auto: The method to use to solve the problem. Choices are

        • :svd: Support Vector Decomposition of the matrix.
        • :cov: Covariance matrix decomposition.
        • :auto: Use :cov if the matrices first dimension is smaller than its second dimension and otherwise use :svd
      • mean=nothing: if nothing, centering will be computed and applied, if set to 0 no centering (data is assumed pre-centered); if a vector is passed, the centering is done with that vector.

      Operations

      • transform(mach, Xnew): Return a lower dimensional projection of the input Xnew, which should have the same scitype as X above.
      • inverse_transform(mach, Xsmall): For a dimension-reduced table Xsmall, such as returned by transform, reconstruct a table, having same the number of columns as the original training data X, that transforms to Xsmall. Mathematically, inverse_transform is a right-inverse for the PCA projection map, whose image is orthogonal to the kernel of that map. In particular, if Xsmall = transform(mach, Xnew), then inverse_transform(Xsmall) is only an approximation to Xnew.

      Fitted parameters

      The fields of fitted_params(mach) are:

      • projection: Returns the projection matrix, which has size (indim, outdim), where indim and outdim are the number of features of the input and output respectively.

      Report

      The fields of report(mach) are:

      • indim: Dimension (number of columns) of the training data and new data to be transformed.
      • outdim = min(n, indim, maxoutdim) is the output dimension; here n is the number of observations.
      • tprincipalvar: Total variance of the principal components.
      • tresidualvar: Total residual variance.
      • tvar: Total observation variance (principal + residual variance).
      • mean: The mean of the untransformed training data, of length indim.
      • principalvars: The variance of the principal components. An AbstractVector of length outdim
      • loadings: The models loadings, weights for each variable used when calculating principal components. A matrix of size (indim, outdim) where indim and outdim are as defined above.

      Examples

      using MLJ
      +
      +PCA = @load PCA pkg=MultivariateStats
      +
      +X, y = @load_iris ## a table and a vector
      +
      +model = PCA(maxoutdim=2)
      +mach = machine(model, X) |> fit!
      +
      +Xproj = transform(mach, X)

      See also KernelPCA, ICA, FactorAnalysis, PPCA

      diff --git a/v0.20.3/models/PLSRegressor_PartialLeastSquaresRegressor/index.html b/v0.20.3/models/PLSRegressor_PartialLeastSquaresRegressor/index.html new file mode 100644 index 000000000..dc790a963 --- /dev/null +++ b/v0.20.3/models/PLSRegressor_PartialLeastSquaresRegressor/index.html @@ -0,0 +1,2 @@ + +PLSRegressor · MLJ diff --git a/v0.20.3/models/PPCA_MultivariateStats/index.html b/v0.20.3/models/PPCA_MultivariateStats/index.html new file mode 100644 index 000000000..84f48c770 --- /dev/null +++ b/v0.20.3/models/PPCA_MultivariateStats/index.html @@ -0,0 +1,11 @@ + +PPCA · MLJ

      PPCA

      PPCA

      A model type for constructing a probabilistic PCA model, based on MultivariateStats.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      PPCA = @load PPCA pkg=MultivariateStats

      Do model = PPCA() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in PPCA(maxoutdim=...).

      Probabilistic principal component analysis is a dimension-reduction algorithm which represents a constrained form of the Gaussian distribution in which the number of free parameters can be restricted while still allowing the model to capture the dominant correlations in a data set. It is expressed as the maximum likelihood solution of a probabilistic latent variable model. For details, see Bishop (2006): C. M. Pattern Recognition and Machine Learning.

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, X)

      Here:

      • X is any table of input features (eg, a DataFrame) whose columns are of scitype Continuous; check column scitypes with schema(X).

      Train the machine using fit!(mach, rows=...).

      Hyper-parameters

      • maxoutdim=0: Controls the the dimension (number of columns) of the output, outdim. Specifically, outdim = min(n, indim, maxoutdim), where n is the number of observations and indim the input dimension.
      • method::Symbol=:ml: The method to use to solve the problem, one of :ml, :em, :bayes.
      • maxiter::Int=1000: The maximum number of iterations.
      • tol::Real=1e-6: The convergence tolerance.
      • mean::Union{Nothing, Real, Vector{Float64}}=nothing: If nothing, centering will be computed and applied; if set to 0 no centering is applied (data is assumed pre-centered); if a vector, the centering is done with that vector.

      Operations

      • transform(mach, Xnew): Return a lower dimensional projection of the input Xnew, which should have the same scitype as X above.
      • inverse_transform(mach, Xsmall): For a dimension-reduced table Xsmall, such as returned by transform, reconstruct a table, having same the number of columns as the original training data X, that transforms to Xsmall. Mathematically, inverse_transform is a right-inverse for the PCA projection map, whose image is orthogonal to the kernel of that map. In particular, if Xsmall = transform(mach, Xnew), then inverse_transform(Xsmall) is only an approximation to Xnew.

      Fitted parameters

      The fields of fitted_params(mach) are:

      • projection: Returns the projection matrix, which has size (indim, outdim), where indim and outdim are the number of features of the input and ouput respectively. Each column of the projection matrix corresponds to a principal component.

      Report

      The fields of report(mach) are:

      • indim: Dimension (number of columns) of the training data and new data to be transformed.
      • outdim: Dimension of transformed data.
      • tvat: The variance of the components.
      • loadings: The model's loadings matrix. A matrix of size (indim, outdim) where indim and outdim as as defined above.

      Examples

      using MLJ
      +
      +PPCA = @load PPCA pkg=MultivariateStats
      +
      +X, y = @load_iris ## a table and a vector
      +
      +model = PPCA(maxoutdim=2)
      +mach = machine(model, X) |> fit!
      +
      +Xproj = transform(mach, X)

      See also KernelPCA, ICA, FactorAnalysis, PCA

      diff --git a/v0.20.3/models/PassiveAggressiveClassifier_MLJScikitLearnInterface/index.html b/v0.20.3/models/PassiveAggressiveClassifier_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..7370c1817 --- /dev/null +++ b/v0.20.3/models/PassiveAggressiveClassifier_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +PassiveAggressiveClassifier · MLJ

      PassiveAggressiveClassifier

      PassiveAggressiveClassifier

      A model type for constructing a passive aggressive classifier, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      PassiveAggressiveClassifier = @load PassiveAggressiveClassifier pkg=MLJScikitLearnInterface

      Do model = PassiveAggressiveClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in PassiveAggressiveClassifier(C=...).

      Hyper-parameters

      • C = 1.0
      • fit_intercept = true
      • max_iter = 100
      • tol = 0.001
      • early_stopping = false
      • validation_fraction = 0.1
      • n_iter_no_change = 5
      • shuffle = true
      • verbose = 0
      • loss = hinge
      • n_jobs = nothing
      • random_state = 0
      • warm_start = false
      • class_weight = nothing
      • average = false
      diff --git a/v0.20.3/models/PassiveAggressiveRegressor_MLJScikitLearnInterface/index.html b/v0.20.3/models/PassiveAggressiveRegressor_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..d91efae22 --- /dev/null +++ b/v0.20.3/models/PassiveAggressiveRegressor_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +PassiveAggressiveRegressor · MLJ

      PassiveAggressiveRegressor

      PassiveAggressiveRegressor

      A model type for constructing a passive aggressive regressor, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      PassiveAggressiveRegressor = @load PassiveAggressiveRegressor pkg=MLJScikitLearnInterface

      Do model = PassiveAggressiveRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in PassiveAggressiveRegressor(C=...).

      Hyper-parameters

      • C = 1.0
      • fit_intercept = true
      • max_iter = 1000
      • tol = 0.0001
      • early_stopping = false
      • validation_fraction = 0.1
      • n_iter_no_change = 5
      • shuffle = true
      • verbose = 0
      • loss = epsilon_insensitive
      • epsilon = 0.1
      • random_state = nothing
      • warm_start = false
      • average = false
      diff --git a/v0.20.3/models/PegasosClassifier_BetaML/index.html b/v0.20.3/models/PegasosClassifier_BetaML/index.html new file mode 100644 index 000000000..f58e16dd0 --- /dev/null +++ b/v0.20.3/models/PegasosClassifier_BetaML/index.html @@ -0,0 +1,31 @@ + +PegasosClassifier · MLJ

      PegasosClassifier

      mutable struct PegasosClassifier <: MLJModelInterface.Probabilistic

      The gradient-based linear "pegasos" classifier using one-vs-all for multiclass, from the Beta Machine Learning Toolkit (BetaML).

      Hyperparameters:

      • initial_coefficients::Union{Nothing, Matrix{Float64}}: N-classes by D-dimensions matrix of initial linear coefficients [def: nothing, i.e. zeros]
      • initial_constant::Union{Nothing, Vector{Float64}}: N-classes vector of initial contant terms [def: nothing, i.e. zeros]
      • learning_rate::Function: Learning rate [def: (epoch -> 1/sqrt(epoch))]
      • learning_rate_multiplicative::Float64: Multiplicative term of the learning rate [def: 0.5]
      • epochs::Int64: Maximum number of epochs, i.e. passages trough the whole training sample [def: 1000]
      • shuffle::Bool: Whether to randomly shuffle the data at each iteration (epoch) [def: true]
      • force_origin::Bool: Whether to force the parameter associated with the constant term to remain zero [def: false]
      • return_mean_hyperplane::Bool: Whether to return the average hyperplane coefficients instead of the final ones [def: false]
      • rng::Random.AbstractRNG: A Random Number Generator to be used in stochastic parts of the code [deafult: Random.GLOBAL_RNG]

      Example:

      julia> using MLJ
      +
      +julia> X, y        = @load_iris;
      +
      +julia> modelType   = @load PegasosClassifier pkg = "BetaML" verbosity=0
      +BetaML.Perceptron.PegasosClassifier
      +
      +julia> model       = modelType()
      +PegasosClassifier(
      +  initial_coefficients = nothing, 
      +  initial_constant = nothing, 
      +  learning_rate = BetaML.Perceptron.var"#71#73"(), 
      +  learning_rate_multiplicative = 0.5, 
      +  epochs = 1000, 
      +  shuffle = true, 
      +  force_origin = false, 
      +  return_mean_hyperplane = false, 
      +  rng = Random._GLOBAL_RNG())
      +
      +julia> mach        = machine(model, X, y);
      +
      +julia> fit!(mach);
      +
      +julia> est_classes = predict(mach, X)
      +150-element CategoricalDistributions.UnivariateFiniteVector{Multiclass{3}, String, UInt8, Float64}:
      + UnivariateFinite{Multiclass{3}}(setosa=>0.817, versicolor=>0.153, virginica=>0.0301)
      + UnivariateFinite{Multiclass{3}}(setosa=>0.791, versicolor=>0.177, virginica=>0.0318)
      + ⋮
      + UnivariateFinite{Multiclass{3}}(setosa=>0.254, versicolor=>0.5, virginica=>0.246)
      + UnivariateFinite{Multiclass{3}}(setosa=>0.283, versicolor=>0.51, virginica=>0.207)
      diff --git a/v0.20.3/models/PerceptronClassifier_BetaML/index.html b/v0.20.3/models/PerceptronClassifier_BetaML/index.html new file mode 100644 index 000000000..4cbb6f9bf --- /dev/null +++ b/v0.20.3/models/PerceptronClassifier_BetaML/index.html @@ -0,0 +1,32 @@ + +PerceptronClassifier · MLJ

      PerceptronClassifier

      mutable struct PerceptronClassifier <: MLJModelInterface.Probabilistic

      The classical perceptron algorithm using one-vs-all for multiclass, from the Beta Machine Learning Toolkit (BetaML).

      Hyperparameters:

      • initial_coefficients::Union{Nothing, Matrix{Float64}}: N-classes by D-dimensions matrix of initial linear coefficients [def: nothing, i.e. zeros]
      • initial_constant::Union{Nothing, Vector{Float64}}: N-classes vector of initial contant terms [def: nothing, i.e. zeros]
      • epochs::Int64: Maximum number of epochs, i.e. passages trough the whole training sample [def: 1000]
      • shuffle::Bool: Whether to randomly shuffle the data at each iteration (epoch) [def: true]
      • force_origin::Bool: Whether to force the parameter associated with the constant term to remain zero [def: false]
      • return_mean_hyperplane::Bool: Whether to return the average hyperplane coefficients instead of the final ones [def: false]
      • rng::Random.AbstractRNG: A Random Number Generator to be used in stochastic parts of the code [deafult: Random.GLOBAL_RNG]

      Example:

      julia> using MLJ
      +
      +julia> X, y        = @load_iris;
      +
      +julia> modelType   = @load PerceptronClassifier pkg = "BetaML"
      +[ Info: For silent loading, specify `verbosity=0`. 
      +import BetaML ✔
      +BetaML.Perceptron.PerceptronClassifier
      +
      +julia> model       = modelType()
      +PerceptronClassifier(
      +  initial_coefficients = nothing, 
      +  initial_constant = nothing, 
      +  epochs = 1000, 
      +  shuffle = true, 
      +  force_origin = false, 
      +  return_mean_hyperplane = false, 
      +  rng = Random._GLOBAL_RNG())
      +
      +julia> mach        = machine(model, X, y);
      +
      +julia> fit!(mach);
      +[ Info: Training machine(PerceptronClassifier(initial_coefficients = nothing, …), …).
      +*** Avg. error after epoch 2 : 0.0 (all elements of the set has been correctly classified)
      +julia> est_classes = predict(mach, X)
      +150-element CategoricalDistributions.UnivariateFiniteVector{Multiclass{3}, String, UInt8, Float64}:
      + UnivariateFinite{Multiclass{3}}(setosa=>1.0, versicolor=>2.53e-34, virginica=>0.0)
      + UnivariateFinite{Multiclass{3}}(setosa=>1.0, versicolor=>1.27e-18, virginica=>1.86e-310)
      + ⋮
      + UnivariateFinite{Multiclass{3}}(setosa=>2.77e-57, versicolor=>1.1099999999999999e-82, virginica=>1.0)
      + UnivariateFinite{Multiclass{3}}(setosa=>3.09e-22, versicolor=>4.03e-25, virginica=>1.0)
      diff --git a/v0.20.3/models/PerceptronClassifier_MLJScikitLearnInterface/index.html b/v0.20.3/models/PerceptronClassifier_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..0d18371ab --- /dev/null +++ b/v0.20.3/models/PerceptronClassifier_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +PerceptronClassifier · MLJ

      PerceptronClassifier

      PerceptronClassifier

      A model type for constructing a perceptron classifier, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      PerceptronClassifier = @load PerceptronClassifier pkg=MLJScikitLearnInterface

      Do model = PerceptronClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in PerceptronClassifier(penalty=...).

      Hyper-parameters

      • penalty = nothing
      • alpha = 0.0001
      • fit_intercept = true
      • max_iter = 1000
      • tol = 0.001
      • shuffle = true
      • verbose = 0
      • eta0 = 1.0
      • n_jobs = nothing
      • random_state = 0
      • early_stopping = false
      • validation_fraction = 0.1
      • n_iter_no_change = 5
      • class_weight = nothing
      • warm_start = false
      diff --git a/v0.20.3/models/ProbabilisticNuSVC_LIBSVM/index.html b/v0.20.3/models/ProbabilisticNuSVC_LIBSVM/index.html new file mode 100644 index 000000000..730f4e7c8 --- /dev/null +++ b/v0.20.3/models/ProbabilisticNuSVC_LIBSVM/index.html @@ -0,0 +1,30 @@ + +ProbabilisticNuSVC · MLJ

      ProbabilisticNuSVC

      ProbabilisticNuSVC

      A model type for constructing a probabilistic ν-support vector classifier, based on LIBSVM.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      ProbabilisticNuSVC = @load ProbabilisticNuSVC pkg=LIBSVM

      Do model = ProbabilisticNuSVC() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in ProbabilisticNuSVC(kernel=...).

      This model is identical to NuSVC with the exception that it predicts probabilities, instead of actual class labels. Probabilities are computed using Platt scaling, which will add to total computation time.

      Reference for algorithm and core C-library: C.-C. Chang and C.-J. Lin (2011): "LIBSVM: a library for support vector machines." ACM Transactions on Intelligent Systems and Technology, 2(3):27:1–27:27. Updated at https://www.csie.ntu.edu.tw/~cjlin/papers/libsvm.pdf.

      Platt, John (1999): "Probabilistic Outputs for Support Vector Machines and Comparisons to Regularized Likelihood Methods."

      Training data

      In MLJ or MLJBase, bind an instance model to data with:

      mach = machine(model, X, y)

      where

      • X: any table of input features (eg, a DataFrame) whose columns each have Continuous element scitype; check column scitypes with schema(X)
      • y: is the target, which can be any AbstractVector whose element scitype is <:OrderedFactor or <:Multiclass; check the scitype with scitype(y)

      Train the machine using fit!(mach, rows=...).

      Hyper-parameters

      • kernel=LIBSVM.Kernel.RadialBasis: either an object that can be called, as in kernel(x1, x2), or one of the built-in kernels from the LIBSVM.jl package listed below. Here x1 and x2 are vectors whose lengths match the number of columns of the training data X (see "Examples" below).

        • LIBSVM.Kernel.Linear: (x1, x2) -> x1'*x2
        • LIBSVM.Kernel.Polynomial: (x1, x2) -> gamma*x1'*x2 + coef0)^degree
        • LIBSVM.Kernel.RadialBasis: (x1, x2) -> (exp(-gamma*norm(x1 - x2)^2))
        • LIBSVM.Kernel.Sigmoid: (x1, x2) - > tanh(gamma*x1'*x2 + coef0)

        Here gamma, coef0, degree are other hyper-parameters. Serialization of models with user-defined kernels comes with some restrictions. See LIVSVM.jl issue91

      • gamma = 0.0: kernel parameter (see above); if gamma==-1.0 then gamma = 1/nfeatures is used in training, where nfeatures is the number of features (columns of X). If gamma==0.0 then gamma = 1/(var(Tables.matrix(X))*nfeatures) is used. Actual value used appears in the report (see below).

      • coef0 = 0.0: kernel parameter (see above)

      • degree::Int32 = Int32(3): degree in polynomial kernel (see above)

      • nu=0.5 (range (0, 1]): An upper bound on the fraction of margin errors and a lower bound of the fraction of support vectors. Denoted ν in the cited paper. Changing nu changes the thickness of the margin (a neighborhood of the decision surface) and a margin error is said to have occurred if a training observation lies on the wrong side of the surface or within the margin.

      • cachesize=200.0 cache memory size in MB

      • tolerance=0.001: tolerance for the stopping criterion

      • shrinking=true: whether to use shrinking heuristics

      Operations

      • predict(mach, Xnew): return predictions of the target given features Xnew having the same scitype as X above.

      Fitted parameters

      The fields of fitted_params(mach) are:

      • libsvm_model: the trained model object created by the LIBSVM.jl package
      • encoding: class encoding used internally by libsvm_model - a dictionary of class labels keyed on the internal integer representation

      Report

      The fields of report(mach) are:

      • gamma: actual value of the kernel parameter gamma used in training

      Examples

      Using a built-in kernel

      using MLJ
      +import LIBSVM
      +
      +ProbabilisticNuSVC = @load ProbabilisticNuSVC pkg=LIBSVM    ## model type
      +model = ProbabilisticNuSVC(kernel=LIBSVM.Kernel.Polynomial) ## instance
      +
      +X, y = @load_iris ## table, vector
      +mach = machine(model, X, y) |> fit!
      +
      +Xnew = (sepal_length = [6.4, 7.2, 7.4],
      +        sepal_width = [2.8, 3.0, 2.8],
      +        petal_length = [5.6, 5.8, 6.1],
      +        petal_width = [2.1, 1.6, 1.9],)
      +
      +julia> probs = predict(mach, Xnew)
      +3-element UnivariateFiniteVector{Multiclass{3}, String, UInt32, Float64}:
      + UnivariateFinite{Multiclass{3}}(setosa=>0.00313, versicolor=>0.0247, virginica=>0.972)
      + UnivariateFinite{Multiclass{3}}(setosa=>0.000598, versicolor=>0.0155, virginica=>0.984)
      + UnivariateFinite{Multiclass{3}}(setosa=>2.27e-6, versicolor=>2.73e-6, virginica=>1.0)
      +
      +julia> yhat = mode.(probs)
      +3-element CategoricalArrays.CategoricalArray{String,1,UInt32}:
      + "virginica"
      + "virginica"
      + "virginica"

      User-defined kernels

      k(x1, x2) = x1'*x2 ## equivalent to `LIBSVM.Kernel.Linear`
      +model = ProbabilisticNuSVC(kernel=k)
      +mach = machine(model, X, y) |> fit!
      +
      +probs = predict(mach, Xnew)

      See also the classifiers NuSVC, SVC, ProbabilisticSVC and LinearSVC. And see LIVSVM.jl and the original C implementation. documentation.

      diff --git a/v0.20.3/models/ProbabilisticSGDClassifier_MLJScikitLearnInterface/index.html b/v0.20.3/models/ProbabilisticSGDClassifier_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..f132f4e8d --- /dev/null +++ b/v0.20.3/models/ProbabilisticSGDClassifier_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +ProbabilisticSGDClassifier · MLJ

      ProbabilisticSGDClassifier

      ProbabilisticSGDClassifier

      A model type for constructing a probabilistic sgd classifier, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      ProbabilisticSGDClassifier = @load ProbabilisticSGDClassifier pkg=MLJScikitLearnInterface

      Do model = ProbabilisticSGDClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in ProbabilisticSGDClassifier(loss=...).

      Hyper-parameters

      • loss = log_loss
      • penalty = l2
      • alpha = 0.0001
      • l1_ratio = 0.15
      • fit_intercept = true
      • max_iter = 1000
      • tol = 0.001
      • shuffle = true
      • verbose = 0
      • epsilon = 0.1
      • n_jobs = nothing
      • random_state = nothing
      • learning_rate = optimal
      • eta0 = 0.0
      • power_t = 0.5
      • early_stopping = false
      • validation_fraction = 0.1
      • n_iter_no_change = 5
      • class_weight = nothing
      • warm_start = false
      • average = false
      diff --git a/v0.20.3/models/ProbabilisticSVC_LIBSVM/index.html b/v0.20.3/models/ProbabilisticSVC_LIBSVM/index.html new file mode 100644 index 000000000..c39cf101d --- /dev/null +++ b/v0.20.3/models/ProbabilisticSVC_LIBSVM/index.html @@ -0,0 +1,35 @@ + +ProbabilisticSVC · MLJ

      ProbabilisticSVC

      ProbabilisticSVC

      A model type for constructing a probabilistic C-support vector classifier, based on LIBSVM.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      ProbabilisticSVC = @load ProbabilisticSVC pkg=LIBSVM

      Do model = ProbabilisticSVC() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in ProbabilisticSVC(kernel=...).

      This model is identical to SVC with the exception that it predicts probabilities, instead of actual class labels. Probabilities are computed using Platt scaling, which will add to the total computation time.

      Reference for algorithm and core C-library: C.-C. Chang and C.-J. Lin (2011): "LIBSVM: a library for support vector machines." ACM Transactions on Intelligent Systems and Technology, 2(3):27:1–27:27. Updated at https://www.csie.ntu.edu.tw/~cjlin/papers/libsvm.pdf.

      Platt, John (1999): "Probabilistic Outputs for Support Vector Machines and Comparisons to Regularized Likelihood Methods."

      Training data

      In MLJ or MLJBase, bind an instance model to data with one of:

      mach = machine(model, X, y)
      +mach = machine(model, X, y, w)

      where

      • X: any table of input features (eg, a DataFrame) whose columns each have Continuous element scitype; check column scitypes with schema(X)
      • y: is the target, which can be any AbstractVector whose element scitype is <:OrderedFactor or <:Multiclass; check the scitype with scitype(y)
      • w: a dictionary of class weights, keyed on levels(y).

      Train the machine using fit!(mach, rows=...).

      Hyper-parameters

      • kernel=LIBSVM.Kernel.RadialBasis: either an object that can be called, as in kernel(x1, x2), or one of the built-in kernels from the LIBSVM.jl package listed below. Here x1 and x2 are vectors whose lengths match the number of columns of the training data X (see "Examples" below).

        • LIBSVM.Kernel.Linear: (x1, x2) -> x1'*x2
        • LIBSVM.Kernel.Polynomial: (x1, x2) -> gamma*x1'*x2 + coef0)^degree
        • LIBSVM.Kernel.RadialBasis: (x1, x2) -> (exp(-gamma*norm(x1 - x2)^2))
        • LIBSVM.Kernel.Sigmoid: (x1, x2) - > tanh(gamma*x1'*x2 + coef0)

        Here gamma, coef0, degree are other hyper-parameters. Serialization of models with user-defined kernels comes with some restrictions. See LIVSVM.jl issue91

      • gamma = 0.0: kernel parameter (see above); if gamma==-1.0 then gamma = 1/nfeatures is used in training, where nfeatures is the number of features (columns of X). If gamma==0.0 then gamma = 1/(var(Tables.matrix(X))*nfeatures) is used. Actual value used appears in the report (see below).

      • coef0 = 0.0: kernel parameter (see above)

      • degree::Int32 = Int32(3): degree in polynomial kernel (see above)

      • cost=1.0 (range (0, Inf)): the parameter denoted $C$ in the cited reference; for greater regularization, decrease cost

      • cachesize=200.0 cache memory size in MB

      • tolerance=0.001: tolerance for the stopping criterion

      • shrinking=true: whether to use shrinking heuristics

      Operations

      • predict(mach, Xnew): return probabilistic predictions of the target given features Xnew having the same scitype as X above.

      Fitted parameters

      The fields of fitted_params(mach) are:

      • libsvm_model: the trained model object created by the LIBSVM.jl package
      • encoding: class encoding used internally by libsvm_model - a dictionary of class labels keyed on the internal integer representation

      Report

      The fields of report(mach) are:

      • gamma: actual value of the kernel parameter gamma used in training

      Examples

      Using a built-in kernel

      using MLJ
      +import LIBSVM
      +
      +ProbabilisticSVC = @load ProbabilisticSVC pkg=LIBSVM      ## model type
      +model = ProbabilisticSVC(kernel=LIBSVM.Kernel.Polynomial) ## instance
      +
      +X, y = @load_iris ## table, vector
      +mach = machine(model, X, y) |> fit!
      +
      +Xnew = (sepal_length = [6.4, 7.2, 7.4],
      +        sepal_width = [2.8, 3.0, 2.8],
      +        petal_length = [5.6, 5.8, 6.1],
      +        petal_width = [2.1, 1.6, 1.9],)
      +
      +julia> probs = predict(mach, Xnew)
      +3-element UnivariateFiniteVector{Multiclass{3}, String, UInt32, Float64}:
      + UnivariateFinite{Multiclass{3}}(setosa=>0.00186, versicolor=>0.003, virginica=>0.995)
      + UnivariateFinite{Multiclass{3}}(setosa=>0.000563, versicolor=>0.0554, virginica=>0.944)
      + UnivariateFinite{Multiclass{3}}(setosa=>1.4e-6, versicolor=>1.68e-6, virginica=>1.0)
      +
      +
      +julia> labels = mode.(probs)
      +3-element CategoricalArrays.CategoricalArray{String,1,UInt32}:
      + "virginica"
      + "virginica"
      + "virginica"

      User-defined kernels

      k(x1, x2) = x1'*x2 ## equivalent to `LIBSVM.Kernel.Linear`
      +model = ProbabilisticSVC(kernel=k)
      +mach = machine(model, X, y) |> fit!
      +
      +probs = predict(mach, Xnew)

      Incorporating class weights

      In either scenario above, we can do:

      weights = Dict("virginica" => 1, "versicolor" => 20, "setosa" => 1)
      +mach = machine(model, X, y, weights) |> fit!
      +
      +probs = predict(mach, Xnew)

      See also the classifiers SVC, NuSVC and LinearSVC, and LIVSVM.jl and the original C implementation documentation.

      diff --git a/v0.20.3/models/QuantileRegressor_MLJLinearModels/index.html b/v0.20.3/models/QuantileRegressor_MLJLinearModels/index.html new file mode 100644 index 000000000..adbc183df --- /dev/null +++ b/v0.20.3/models/QuantileRegressor_MLJLinearModels/index.html @@ -0,0 +1,6 @@ + +QuantileRegressor · MLJ

      QuantileRegressor

      QuantileRegressor

      A model type for constructing a quantile regressor, based on MLJLinearModels.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      QuantileRegressor = @load QuantileRegressor pkg=MLJLinearModels

      Do model = QuantileRegressor() to construct an instance with default hyper-parameters.

      This model coincides with RobustRegressor, with the exception that the robust loss, rho, is fixed to QuantileRho(delta), where delta is a new hyperparameter.

      Different solver options exist, as indicated under "Hyperparameters" below.

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, X, y)

      where:

      • X is any table of input features (eg, a DataFrame) whose columns have Continuous scitype; check column scitypes with schema(X)
      • y is the target, which can be any AbstractVector whose element scitype is Continuous; check the scitype with scitype(y)

      Train the machine using fit!(mach, rows=...).

      Hyperparameters

      • delta::Real: parameterizes the QuantileRho function (indicating the quantile to use with default 0.5 for the median regression) Default: 0.5

      • lambda::Real: strength of the regularizer if penalty is :l2 or :l1. Strength of the L2 regularizer if penalty is :en. Default: 1.0

      • gamma::Real: strength of the L1 regularizer if penalty is :en. Default: 0.0

      • penalty::Union{String, Symbol}: the penalty to use, either :l2, :l1, :en (elastic net) or :none. Default: :l2

      • fit_intercept::Bool: whether to fit the intercept or not. Default: true

      • penalize_intercept::Bool: whether to penalize the intercept. Default: false

      • scale_penalty_with_samples::Bool: whether to scale the penalty with the number of observations. Default: true

      • solver::Union{Nothing, MLJLinearModels.Solver}: some instance of MLJLinearModels.S where S is one of: LBFGS, IWLSCG, if penalty = :l2, and ProxGrad otherwise.

        If solver = nothing (default) then LBFGS() is used, if penalty = :l2, and otherwise ProxGrad(accel=true) (FISTA) is used.

        Solver aliases: FISTA(; kwargs...) = ProxGrad(accel=true, kwargs...), ISTA(; kwargs...) = ProxGrad(accel=false, kwargs...) Default: nothing

      Example

      using MLJ
      +X, y = make_regression()
      +mach = fit!(machine(QuantileRegressor(), X, y))
      +predict(mach, X)
      +fitted_params(mach)

      See also RobustRegressor, HuberRegressor.

      diff --git a/v0.20.3/models/RANSACRegressor_MLJScikitLearnInterface/index.html b/v0.20.3/models/RANSACRegressor_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..9ca0dec9c --- /dev/null +++ b/v0.20.3/models/RANSACRegressor_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +RANSACRegressor · MLJ

      RANSACRegressor

      RANSACRegressor

      A model type for constructing a ransac regressor, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      RANSACRegressor = @load RANSACRegressor pkg=MLJScikitLearnInterface

      Do model = RANSACRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in RANSACRegressor(estimator=...).

      Hyper-parameters

      • estimator = nothing
      • min_samples = 5
      • residual_threshold = nothing
      • is_data_valid = nothing
      • is_model_valid = nothing
      • max_trials = 100
      • max_skips = 9223372036854775807
      • stop_n_inliers = 9223372036854775807
      • stop_score = Inf
      • stop_probability = 0.99
      • loss = absolute_error
      • random_state = nothing
      diff --git a/v0.20.3/models/RODDetector_OutlierDetectionPython/index.html b/v0.20.3/models/RODDetector_OutlierDetectionPython/index.html new file mode 100644 index 000000000..26b542854 --- /dev/null +++ b/v0.20.3/models/RODDetector_OutlierDetectionPython/index.html @@ -0,0 +1,2 @@ + +RODDetector · MLJ diff --git a/v0.20.3/models/ROSE_Imbalance/index.html b/v0.20.3/models/ROSE_Imbalance/index.html new file mode 100644 index 000000000..d5aaa70fe --- /dev/null +++ b/v0.20.3/models/ROSE_Imbalance/index.html @@ -0,0 +1,30 @@ + +ROSE · MLJ

      ROSE

      Initiate a ROSE model with the given hyper-parameters.

      ROSE

      A model type for constructing a rose, based on Imbalance.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      ROSE = @load ROSE pkg=Imbalance

      Do model = ROSE() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in ROSE(s=...).

      ROSE implements the ROSE (Random Oversampling Examples) algorithm to correct for class imbalance as in G Menardi, N. Torelli, “Training and assessing classification rules with imbalanced data,” Data Mining and Knowledge Discovery, 28(1), pp.92-122, 2014.

      Training data

      In MLJ or MLJBase, wrap the model in a machine by mach = machine(model)

      There is no need to provide any data here because the model is a static transformer.

      Likewise, there is no need to fit!(mach).

      For default values of the hyper-parameters, model can be constructed by model = ROSE()

      Hyperparameters

      • s::float: A parameter that proportionally controls the bandwidth of the Gaussian kernel

      • ratios=1.0: A parameter that controls the amount of oversampling to be done for each class

        • Can be a float and in this case each class will be oversampled to the size of the majority class times the float. By default, all classes are oversampled to the size of the majority class
        • Can be a dictionary mapping each class label to the float ratio for that class
      • rng::Union{AbstractRNG, Integer}=default_rng(): Either an AbstractRNG object or an Integer seed to be used with Xoshiro if the Julia VERSION supports it. Otherwise, uses MersenneTwister`.

      Transform Inputs

      • X: A matrix or table of floats where each row is an observation from the dataset
      • y: An abstract vector of labels (e.g., strings) that correspond to the observations in X

      Transform Outputs

      • Xover: A matrix or table that includes original data and the new observations due to oversampling. depending on whether the input X is a matrix or table respectively
      • yover: An abstract vector of labels corresponding to Xover

      Operations

      • transform(mach, X, y): resample the data X and y using ROSE, returning both the new and original observations

      Example

      using MLJ
      +import Imbalance
      +
      +## set probability of each class
      +class_probs = [0.5, 0.2, 0.3]                         
      +num_rows, num_continuous_feats = 100, 5
      +## generate a table and categorical vector accordingly
      +X, y = Imbalance.generate_imbalanced_data(num_rows, num_continuous_feats; 
      +                                class_probs, rng=42)  
      +
      +julia> Imbalance.checkbalance(y)
      +1: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 19 (39.6%) 
      +2: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 33 (68.8%) 
      +0: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 48 (100.0%) 
      +
      +## load ROSE
      +ROSE = @load ROSE pkg=Imbalance
      +
      +## wrap the model in a machine
      +oversampler = ROSE(s=0.3, ratios=Dict(0=>1.0, 1=> 0.9, 2=>0.8), rng=42)
      +mach = machine(oversampler)
      +
      +## provide the data to transform (there is nothing to fit)
      +Xover, yover = transform(mach, X, y)
      +
      +julia> Imbalance.checkbalance(yover)
      +2: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 38 (79.2%) 
      +1: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 43 (89.6%) 
      +0: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 48 (100.0%) 
      diff --git a/v0.20.3/models/RandomForestClassifier_BetaML/index.html b/v0.20.3/models/RandomForestClassifier_BetaML/index.html new file mode 100644 index 000000000..5ef9177ff --- /dev/null +++ b/v0.20.3/models/RandomForestClassifier_BetaML/index.html @@ -0,0 +1,31 @@ + +RandomForestClassifier · MLJ

      RandomForestClassifier

      mutable struct RandomForestClassifier <: MLJModelInterface.Probabilistic

      A simple Random Forest model for classification with support for Missing data, from the Beta Machine Learning Toolkit (BetaML).

      Hyperparameters:

      • n_trees::Int64
      • max_depth::Int64: The maximum depth the tree is allowed to reach. When this is reached the node is forced to become a leaf [def: 0, i.e. no limits]
      • min_gain::Float64: The minimum information gain to allow for a node's partition [def: 0]
      • min_records::Int64: The minimum number of records a node must holds to consider for a partition of it [def: 2]
      • max_features::Int64: The maximum number of (random) features to consider at each partitioning [def: 0, i.e. square root of the data dimensions]
      • splitting_criterion::Function: This is the name of the function to be used to compute the information gain of a specific partition. This is done by measuring the difference betwwen the "impurity" of the labels of the parent node with those of the two child nodes, weighted by the respective number of items. [def: gini]. Either gini, entropy or a custom function. It can also be an anonymous function.
      • β::Float64: Parameter that regulate the weights of the scoring of each tree, to be (optionally) used in prediction based on the error of the individual trees computed on the records on which trees have not been trained. Higher values favour "better" trees, but too high values will cause overfitting [def: 0, i.e. uniform weigths]
      • rng::Random.AbstractRNG: A Random Number Generator to be used in stochastic parts of the code [deafult: Random.GLOBAL_RNG]

      Example :

      julia> using MLJ
      +
      +julia> X, y        = @load_iris;
      +
      +julia> modelType   = @load RandomForestClassifier pkg = "BetaML" verbosity=0
      +BetaML.Trees.RandomForestClassifier
      +
      +julia> model       = modelType()
      +RandomForestClassifier(
      +  n_trees = 30, 
      +  max_depth = 0, 
      +  min_gain = 0.0, 
      +  min_records = 2, 
      +  max_features = 0, 
      +  splitting_criterion = BetaML.Utils.gini, 
      +  β = 0.0, 
      +  rng = Random._GLOBAL_RNG())
      +
      +julia> mach        = machine(model, X, y);
      +
      +julia> fit!(mach);
      +[ Info: Training machine(RandomForestClassifier(n_trees = 30, …), …).
      +
      +julia> cat_est    = predict(mach, X)
      +150-element CategoricalDistributions.UnivariateFiniteVector{Multiclass{3}, String, UInt32, Float64}:
      + UnivariateFinite{Multiclass{3}}(setosa=>1.0, versicolor=>0.0, virginica=>0.0)
      + UnivariateFinite{Multiclass{3}}(setosa=>1.0, versicolor=>0.0, virginica=>0.0)
      + ⋮
      + UnivariateFinite{Multiclass{3}}(setosa=>0.0, versicolor=>0.0, virginica=>1.0)
      + UnivariateFinite{Multiclass{3}}(setosa=>0.0, versicolor=>0.0667, virginica=>0.933)
      diff --git a/v0.20.3/models/RandomForestClassifier_DecisionTree/index.html b/v0.20.3/models/RandomForestClassifier_DecisionTree/index.html new file mode 100644 index 000000000..d95cd0716 --- /dev/null +++ b/v0.20.3/models/RandomForestClassifier_DecisionTree/index.html @@ -0,0 +1,22 @@ + +RandomForestClassifier · MLJ

      RandomForestClassifier

      RandomForestClassifier

      A model type for constructing a CART random forest classifier, based on DecisionTree.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      RandomForestClassifier = @load RandomForestClassifier pkg=DecisionTree

      Do model = RandomForestClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in RandomForestClassifier(max_depth=...).

      RandomForestClassifier implements the standard Random Forest algorithm, originally published in Breiman, L. (2001): "Random Forests.", Machine Learning, vol. 45, pp. 5–32.

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, X, y)

      where

      • X: any table of input features (eg, a DataFrame) whose columns each have one of the following element scitypes: Continuous, Count, or <:OrderedFactor; check column scitypes with schema(X)
      • y: the target, which can be any AbstractVector whose element scitype is <:OrderedFactor or <:Multiclass; check the scitype with scitype(y)

      Train the machine with fit!(mach, rows=...).

      Hyperparameters

      • max_depth=-1: max depth of the decision tree (-1=any)
      • min_samples_leaf=1: min number of samples each leaf needs to have
      • min_samples_split=2: min number of samples needed for a split
      • min_purity_increase=0: min purity needed for a split
      • n_subfeatures=-1: number of features to select at random (0 for all, -1 for square root of number of features)
      • n_trees=10: number of trees to train
      • sampling_fraction=0.7 fraction of samples to train each tree on
      • feature_importance: method to use for computing feature importances. One of (:impurity, :split)
      • rng=Random.GLOBAL_RNG: random number generator or seed

      Operations

      • predict(mach, Xnew): return predictions of the target given features Xnew having the same scitype as X above. Predictions are probabilistic, but uncalibrated.
      • predict_mode(mach, Xnew): instead return the mode of each prediction above.

      Fitted parameters

      The fields of fitted_params(mach) are:

      • forest: the Ensemble object returned by the core DecisionTree.jl algorithm

      Report

      • features: the names of the features encountered in training

      Accessor functions

      • feature_importances(mach) returns a vector of (feature::Symbol => importance) pairs; the type of importance is determined by the hyperparameter feature_importance (see above)

      Examples

      using MLJ
      +Forest = @load RandomForestClassifier pkg=DecisionTree
      +forest = Forest(min_samples_split=6, n_subfeatures=3)
      +
      +X, y = @load_iris
      +mach = machine(forest, X, y) |> fit!
      +
      +Xnew = (sepal_length = [6.4, 7.2, 7.4],
      +        sepal_width = [2.8, 3.0, 2.8],
      +        petal_length = [5.6, 5.8, 6.1],
      +        petal_width = [2.1, 1.6, 1.9],)
      +yhat = predict(mach, Xnew) ## probabilistic predictions
      +predict_mode(mach, Xnew)   ## point predictions
      +pdf.(yhat, "virginica")    ## probabilities for the "verginica" class
      +
      +fitted_params(mach).forest ## raw `Ensemble` object from DecisionTrees.jl
      +
      +feature_importances(mach)  ## `:impurity` feature importances
      +forest.feature_importance = :split
      +feature_importance(mach)   ## `:split` feature importances
      +

      See also DecisionTree.jl and the unwrapped model type MLJDecisionTreeInterface.DecisionTree.RandomForestClassifier.

      diff --git a/v0.20.3/models/RandomForestClassifier_MLJScikitLearnInterface/index.html b/v0.20.3/models/RandomForestClassifier_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..25ab9f3b2 --- /dev/null +++ b/v0.20.3/models/RandomForestClassifier_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +RandomForestClassifier · MLJ

      RandomForestClassifier

      RandomForestClassifier

      A model type for constructing a random forest classifier, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      RandomForestClassifier = @load RandomForestClassifier pkg=MLJScikitLearnInterface

      Do model = RandomForestClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in RandomForestClassifier(n_estimators=...).

      A random forest is a meta estimator that fits a number of classifying decision trees on various sub-samples of the dataset and uses averaging to improve the predictive accuracy and control over-fitting. The sub-sample size is controlled with the max_samples parameter if bootstrap=True (default), otherwise the whole dataset is used to build each tree.

      diff --git a/v0.20.3/models/RandomForestImputer_BetaML/index.html b/v0.20.3/models/RandomForestImputer_BetaML/index.html new file mode 100644 index 000000000..cf9ecff4a --- /dev/null +++ b/v0.20.3/models/RandomForestImputer_BetaML/index.html @@ -0,0 +1,36 @@ + +RandomForestImputer · MLJ

      RandomForestImputer

      mutable struct RandomForestImputer <: MLJModelInterface.Unsupervised

      Impute missing values using Random Forests, from the Beta Machine Learning Toolkit (BetaML).

      Hyperparameters:

      • n_trees::Int64: Number of (decision) trees in the forest [def: 30]
      • max_depth::Union{Nothing, Int64}: The maximum depth the tree is allowed to reach. When this is reached the node is forced to become a leaf [def: nothing, i.e. no limits]
      • min_gain::Float64: The minimum information gain to allow for a node's partition [def: 0]
      • min_records::Int64: The minimum number of records a node must holds to consider for a partition of it [def: 2]
      • max_features::Union{Nothing, Int64}: The maximum number of (random) features to consider at each partitioning [def: nothing, i.e. square root of the data dimension]
      • forced_categorical_cols::Vector{Int64}: Specify the positions of the integer columns to treat as categorical instead of cardinal. [Default: empty vector (all numerical cols are treated as cardinal by default and the others as categorical)]
      • splitting_criterion::Union{Nothing, Function}: Either gini, entropy or variance. This is the name of the function to be used to compute the information gain of a specific partition. This is done by measuring the difference betwwen the "impurity" of the labels of the parent node with those of the two child nodes, weighted by the respective number of items. [def: nothing, i.e. gini for categorical labels (classification task) and variance for numerical labels(regression task)]. It can be an anonymous function.
      • recursive_passages::Int64: Define the times to go trough the various columns to impute their data. Useful when there are data to impute on multiple columns. The order of the first passage is given by the decreasing number of missing values per column, the other passages are random [default: 1].
      • rng::Random.AbstractRNG: A Random Number Generator to be used in stochastic parts of the code [deafult: Random.GLOBAL_RNG]

      Example:

      julia> using MLJ
      +
      +julia> X = [1 10.5;1.5 missing; 1.8 8; 1.7 15; 3.2 40; missing missing; 3.3 38; missing -2.3; 5.2 -2.4] |> table ;
      +
      +julia> modelType   = @load RandomForestImputer  pkg = "BetaML" verbosity=0
      +BetaML.Imputation.RandomForestImputer
      +
      +julia> model     = modelType(n_trees=40)
      +RandomForestImputer(
      +  n_trees = 40, 
      +  max_depth = nothing, 
      +  min_gain = 0.0, 
      +  min_records = 2, 
      +  max_features = nothing, 
      +  forced_categorical_cols = Int64[], 
      +  splitting_criterion = nothing, 
      +  recursive_passages = 1, 
      +  rng = Random._GLOBAL_RNG())
      +
      +julia> mach      = machine(model, X);
      +
      +julia> fit!(mach);
      +[ Info: Training machine(RandomForestImputer(n_trees = 40, …), …).
      +
      +julia> X_full       = transform(mach) |> MLJ.matrix
      +9×2 Matrix{Float64}:
      + 1.0      10.5
      + 1.5      10.3909
      + 1.8       8.0
      + 1.7      15.0
      + 3.2      40.0
      + 2.88375   8.66125
      + 3.3      38.0
      + 3.98125  -2.3
      + 5.2      -2.4
      diff --git a/v0.20.3/models/RandomForestRegressor_BetaML/index.html b/v0.20.3/models/RandomForestRegressor_BetaML/index.html new file mode 100644 index 000000000..05fd4e0bd --- /dev/null +++ b/v0.20.3/models/RandomForestRegressor_BetaML/index.html @@ -0,0 +1,36 @@ + +RandomForestRegressor · MLJ

      RandomForestRegressor

      mutable struct RandomForestRegressor <: MLJModelInterface.Deterministic

      A simple Random Forest model for regression with support for Missing data, from the Beta Machine Learning Toolkit (BetaML).

      Hyperparameters:

      • n_trees::Int64: Number of (decision) trees in the forest [def: 30]
      • max_depth::Int64: The maximum depth the tree is allowed to reach. When this is reached the node is forced to become a leaf [def: 0, i.e. no limits]
      • min_gain::Float64: The minimum information gain to allow for a node's partition [def: 0]
      • min_records::Int64: The minimum number of records a node must holds to consider for a partition of it [def: 2]
      • max_features::Int64: The maximum number of (random) features to consider at each partitioning [def: 0, i.e. square root of the data dimension]
      • splitting_criterion::Function: This is the name of the function to be used to compute the information gain of a specific partition. This is done by measuring the difference betwwen the "impurity" of the labels of the parent node with those of the two child nodes, weighted by the respective number of items. [def: variance]. Either variance or a custom function. It can also be an anonymous function.
      • β::Float64: Parameter that regulate the weights of the scoring of each tree, to be (optionally) used in prediction based on the error of the individual trees computed on the records on which trees have not been trained. Higher values favour "better" trees, but too high values will cause overfitting [def: 0, i.e. uniform weigths]
      • rng::Random.AbstractRNG: A Random Number Generator to be used in stochastic parts of the code [deafult: Random.GLOBAL_RNG]

      Example:

      julia> using MLJ
      +
      +julia> X, y        = @load_boston;
      +
      +julia> modelType   = @load RandomForestRegressor pkg = "BetaML" verbosity=0
      +BetaML.Trees.RandomForestRegressor
      +
      +julia> model       = modelType()
      +RandomForestRegressor(
      +  n_trees = 30, 
      +  max_depth = 0, 
      +  min_gain = 0.0, 
      +  min_records = 2, 
      +  max_features = 0, 
      +  splitting_criterion = BetaML.Utils.variance, 
      +  β = 0.0, 
      +  rng = Random._GLOBAL_RNG())
      +
      +julia> mach        = machine(model, X, y);
      +
      +julia> fit!(mach);
      +[ Info: Training machine(RandomForestRegressor(n_trees = 30, …), …).
      +
      +julia> ŷ           = predict(mach, X);
      +
      +julia> hcat(y,ŷ)
      +506×2 Matrix{Float64}:
      + 24.0  25.8433
      + 21.6  22.4317
      + 34.7  35.5742
      + 33.4  33.9233
      +  ⋮    
      + 23.9  24.42
      + 22.0  22.4433
      + 11.9  15.5833
      diff --git a/v0.20.3/models/RandomForestRegressor_DecisionTree/index.html b/v0.20.3/models/RandomForestRegressor_DecisionTree/index.html new file mode 100644 index 000000000..23598995a --- /dev/null +++ b/v0.20.3/models/RandomForestRegressor_DecisionTree/index.html @@ -0,0 +1,13 @@ + +RandomForestRegressor · MLJ

      RandomForestRegressor

      RandomForestRegressor

      A model type for constructing a CART random forest regressor, based on DecisionTree.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      RandomForestRegressor = @load RandomForestRegressor pkg=DecisionTree

      Do model = RandomForestRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in RandomForestRegressor(max_depth=...).

      DecisionTreeRegressor implements the standard Random Forest algorithm, originally published in Breiman, L. (2001): "Random Forests.", Machine Learning, vol. 45, pp. 5–32

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, X, y)

      where

      • X: any table of input features (eg, a DataFrame) whose columns each have one of the following element scitypes: Continuous, Count, or <:OrderedFactor; check column scitypes with schema(X)
      • y: the target, which can be any AbstractVector whose element scitype is Continuous; check the scitype with scitype(y)

      Train the machine with fit!(mach, rows=...).

      Hyperparameters

      • max_depth=-1: max depth of the decision tree (-1=any)
      • min_samples_leaf=1: min number of samples each leaf needs to have
      • min_samples_split=2: min number of samples needed for a split
      • min_purity_increase=0: min purity needed for a split
      • n_subfeatures=-1: number of features to select at random (0 for all, -1 for square root of number of features)
      • n_trees=10: number of trees to train
      • sampling_fraction=0.7 fraction of samples to train each tree on
      • feature_importance: method to use for computing feature importances. One of (:impurity, :split)
      • rng=Random.GLOBAL_RNG: random number generator or seed

      Operations

      • predict(mach, Xnew): return predictions of the target given new features Xnew having the same scitype as X above.

      Fitted parameters

      The fields of fitted_params(mach) are:

      • forest: the Ensemble object returned by the core DecisionTree.jl algorithm

      Report

      • features: the names of the features encountered in training

      Accessor functions

      • feature_importances(mach) returns a vector of (feature::Symbol => importance) pairs; the type of importance is determined by the hyperparameter feature_importance (see above)

      Examples

      using MLJ
      +Forest = @load RandomForestRegressor pkg=DecisionTree
      +forest = Forest(max_depth=4, min_samples_split=3)
      +
      +X, y = make_regression(100, 2) ## synthetic data
      +mach = machine(forest, X, y) |> fit!
      +
      +Xnew, _ = make_regression(3, 2)
      +yhat = predict(mach, Xnew) ## new predictions
      +
      +fitted_params(mach).forest ## raw `Ensemble` object from DecisionTree.jl
      +feature_importances(mach)

      See also DecisionTree.jl and the unwrapped model type MLJDecisionTreeInterface.DecisionTree.RandomForestRegressor.

      diff --git a/v0.20.3/models/RandomForestRegressor_MLJScikitLearnInterface/index.html b/v0.20.3/models/RandomForestRegressor_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..f07dffa7a --- /dev/null +++ b/v0.20.3/models/RandomForestRegressor_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +RandomForestRegressor · MLJ

      RandomForestRegressor

      RandomForestRegressor

      A model type for constructing a random forest regressor, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      RandomForestRegressor = @load RandomForestRegressor pkg=MLJScikitLearnInterface

      Do model = RandomForestRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in RandomForestRegressor(n_estimators=...).

      A random forest is a meta estimator that fits a number of classifying decision trees on various sub-samples of the dataset and uses averaging to improve the predictive accuracy and control over-fitting. The sub-sample size is controlled with the max_samples parameter if bootstrap=True (default), otherwise the whole dataset is used to build each tree.

      diff --git a/v0.20.3/models/RandomOversampler_Imbalance/index.html b/v0.20.3/models/RandomOversampler_Imbalance/index.html new file mode 100644 index 000000000..ea30d51e1 --- /dev/null +++ b/v0.20.3/models/RandomOversampler_Imbalance/index.html @@ -0,0 +1,30 @@ + +RandomOversampler · MLJ

      RandomOversampler

      Initiate a random oversampling model with the given hyper-parameters.

      RandomOversampler

      A model type for constructing a random oversampler, based on Imbalance.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      RandomOversampler = @load RandomOversampler pkg=Imbalance

      Do model = RandomOversampler() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in RandomOversampler(ratios=...).

      RandomOversampler implements naive oversampling by repeating existing observations with replacement.

      Training data

      In MLJ or MLJBase, wrap the model in a machine by mach = machine(model)

      There is no need to provide any data here because the model is a static transformer.

      Likewise, there is no need to fit!(mach).

      For default values of the hyper-parameters, model can be constructed by model = RandomOverSampler()

      Hyperparameters

      • ratios=1.0: A parameter that controls the amount of oversampling to be done for each class

        • Can be a float and in this case each class will be oversampled to the size of the majority class times the float. By default, all classes are oversampled to the size of the majority class
        • Can be a dictionary mapping each class label to the float ratio for that class
      • rng::Union{AbstractRNG, Integer}=default_rng(): Either an AbstractRNG object or an Integer seed to be used with Xoshiro if the Julia VERSION supports it. Otherwise, uses MersenneTwister`.

      Transform Inputs

      • X: A matrix of real numbers or a table with element scitypes that subtype Union{Finite, Infinite}. Elements in nominal columns should subtype Finite (i.e., have scitype OrderedFactor or Multiclass) and elements in continuous columns should subtype Infinite (i.e., have scitype Count or Continuous).
      • y: An abstract vector of labels (e.g., strings) that correspond to the observations in X

      Transform Outputs

      • Xover: A matrix or table that includes original data and the new observations due to oversampling. depending on whether the input X is a matrix or table respectively
      • yover: An abstract vector of labels corresponding to Xover

      Operations

      • transform(mach, X, y): resample the data X and y using RandomOversampler, returning both the new and original observations

      Example

      using MLJ
      +import Imbalance
      +
      +## set probability of each class
      +class_probs = [0.5, 0.2, 0.3]                         
      +num_rows, num_continuous_feats = 100, 5
      +## generate a table and categorical vector accordingly
      +X, y = Imbalance.generate_imbalanced_data(num_rows, num_continuous_feats; 
      +                                class_probs, rng=42)    
      +
      +julia> Imbalance.checkbalance(y)
      +1: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 19 (39.6%) 
      +2: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 33 (68.8%) 
      +0: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 48 (100.0%) 
      +
      +## load RandomOversampler
      +RandomOversampler = @load RandomOversampler pkg=Imbalance
      +
      +## wrap the model in a machine
      +oversampler = RandomOversampler(ratios=Dict(0=>1.0, 1=> 0.9, 2=>0.8), rng=42)
      +mach = machine(oversampler)
      +
      +## provide the data to transform (there is nothing to fit)
      +Xover, yover = transform(mach, X, y)
      +
      +julia> Imbalance.checkbalance(yover)
      +2: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 38 (79.2%) 
      +1: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 43 (89.6%) 
      +0: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 48 (100.0%) 
      diff --git a/v0.20.3/models/RandomUndersampler_Imbalance/index.html b/v0.20.3/models/RandomUndersampler_Imbalance/index.html new file mode 100644 index 000000000..274378681 --- /dev/null +++ b/v0.20.3/models/RandomUndersampler_Imbalance/index.html @@ -0,0 +1,31 @@ + +RandomUndersampler · MLJ

      RandomUndersampler

      Initiate a random undersampling model with the given hyper-parameters.

      RandomUndersampler

      A model type for constructing a random undersampler, based on Imbalance.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      RandomUndersampler = @load RandomUndersampler pkg=Imbalance

      Do model = RandomUndersampler() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in RandomUndersampler(ratios=...).

      RandomUndersampler implements naive undersampling by randomly removing existing observations.

      Training data

      In MLJ or MLJBase, wrap the model in a machine by mach = machine(model)

      There is no need to provide any data here because the model is a static transformer.

      Likewise, there is no need to fit!(mach).

      For default values of the hyper-parameters, model can be constructed by model = RandomUndersampler()

      Hyperparameters

      • ratios=1.0: A parameter that controls the amount of undersampling to be done for each class

        • Can be a float and in this case each class will be undersampled to the size of the minority class times the float. By default, all classes are undersampled to the size of the minority class
        • Can be a dictionary mapping each class label to the float ratio for that class
      • rng::Union{AbstractRNG, Integer}=default_rng(): Either an AbstractRNG object or an Integer seed to be used with Xoshiro if the Julia VERSION supports it. Otherwise, uses MersenneTwister`.

      Transform Inputs

      • X: A matrix of real numbers or a table with element scitypes that subtype Union{Finite, Infinite}. Elements in nominal columns should subtype Finite (i.e., have scitype OrderedFactor or Multiclass) and elements in continuous columns should subtype Infinite (i.e., have scitype Count or Continuous).
      • y: An abstract vector of labels (e.g., strings) that correspond to the observations in X

      Transform Outputs

      • X_under: A matrix or table that includes the data after undersampling depending on whether the input X is a matrix or table respectively
      • y_under: An abstract vector of labels corresponding to X_under

      Operations

      • transform(mach, X, y): resample the data X and y using RandomUndersampler, returning both the new and original observations

      Example

      using MLJ
      +import Imbalance
      +
      +## set probability of each class
      +class_probs = [0.5, 0.2, 0.3]                         
      +num_rows, num_continuous_feats = 100, 5
      +## generate a table and categorical vector accordingly
      +X, y = Imbalance.generate_imbalanced_data(num_rows, num_continuous_feats; 
      +                                class_probs, rng=42)   
      +
      +julia> Imbalance.checkbalance(y; ref="minority")
      + 1: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 19 (100.0%) 
      + 2: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 33 (173.7%) 
      + 0: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 48 (252.6%) 
      +
      +## load RandomUndersampler
      +RandomUndersampler = @load RandomUndersampler pkg=Imbalance
      +
      +## wrap the model in a machine
      +undersampler = RandomUndersampler(ratios=Dict(0=>1.0, 1=> 1.0, 2=>1.0), 
      +               rng=42)
      +mach = machine(undersampler)
      +
      +## provide the data to transform (there is nothing to fit)
      +X_under, y_under = transform(mach, X, y)
      +                                      
      +julia> Imbalance.checkbalance(y_under; ref="minority")
      +0: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 19 (100.0%) 
      +2: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 19 (100.0%) 
      +1: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 19 (100.0%) 
      diff --git a/v0.20.3/models/RandomWalkOversampler_Imbalance/index.html b/v0.20.3/models/RandomWalkOversampler_Imbalance/index.html new file mode 100644 index 000000000..b450e547e --- /dev/null +++ b/v0.20.3/models/RandomWalkOversampler_Imbalance/index.html @@ -0,0 +1,39 @@ + +RandomWalkOversampler · MLJ

      RandomWalkOversampler

      Initiate a RandomWalkOversampler model with the given hyper-parameters.

      RandomWalkOversampler

      A model type for constructing a random walk oversampler, based on Imbalance.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      RandomWalkOversampler = @load RandomWalkOversampler pkg=Imbalance

      Do model = RandomWalkOversampler() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in RandomWalkOversampler(ratios=...).

      RandomWalkOversampler implements the random walk oversampling algorithm to correct for class imbalance as in Zhang, H., & Li, M. (2014). RWO-Sampling: A random walk over-sampling approach to imbalanced data classification. Information Fusion, 25, 4-20.

      Training data

      In MLJ or MLJBase, wrap the model in a machine by

      mach = machine(model)

      There is no need to provide any data here because the model is a static transformer.

      Likewise, there is no need to fit!(mach).

      For default values of the hyper-parameters, model can be constructed by

      model = RandomWalkOversampler()

      Hyperparameters

      • ratios=1.0: A parameter that controls the amount of oversampling to be done for each class

        • Can be a float and in this case each class will be oversampled to the size of the majority class times the float. By default, all classes are oversampled to the size of the majority class
        • Can be a dictionary mapping each class label to the float ratio for that class
      • rng::Union{AbstractRNG, Integer}=default_rng(): Either an AbstractRNG object or an Integer seed to be used with Xoshiro if the Julia VERSION supports it. Otherwise, uses MersenneTwister`.

      Transform Inputs

      • X: A table with element scitypes that subtype Union{Finite, Infinite}. Elements in nominal columns should subtype Finite (i.e., have scitype OrderedFactor or Multiclass) and
       elements in continuous columns should subtype `Infinite` (i.e., have 
      + [scitype](https://juliaai.github.io/ScientificTypes.jl/) `Count` or `Continuous`).
      • y: An abstract vector of labels (e.g., strings) that correspond to the observations in X

      Transform Outputs

      • Xover: A matrix or table that includes original data and the new observations due to oversampling. depending on whether the input X is a matrix or table respectively
      • yover: An abstract vector of labels corresponding to Xover

      Operations

      • transform(mach, X, y): resample the data X and y using RandomWalkOversampler, returning both the new and original observations

      Example

      using MLJ
      +using ScientificTypes
      +import Imbalance
      +
      +## set probability of each class
      +class_probs = [0.5, 0.2, 0.3]                         
      +num_rows = 100
      +num_continuous_feats = 3
      +## want two categorical features with three and two possible values respectively
      +num_vals_per_category = [3, 2]
      +
      +## generate a table and categorical vector accordingly
      +X, y = Imbalance.generate_imbalanced_data(num_rows, num_continuous_feats; 
      +                                          class_probs, num_vals_per_category, rng=42)                      
      +julia> Imbalance.checkbalance(y)
      +1: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 19 (39.6%) 
      +2: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 33 (68.8%) 
      +0: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 48 (100.0%) 
      +
      +
      +julia> ScientificTypes.schema(X).scitypes
      +(Continuous, Continuous, Continuous, Continuous, Continuous)
      +## coerce nominal columns to a finite scitype (multiclass or ordered factor)
      +X = coerce(X, :Column4=>Multiclass, :Column5=>Multiclass)
      +
      +## load RandomWalkOversampler model type:
      +RandomWalkOversampler = @load RandomWalkOversampler pkg=Imbalance
      +
      +## oversample the minority classes to  sizes relative to the majority class:
      +oversampler = RandomWalkOversampler(ratios = Dict(0=>1.0, 1=> 0.9, 2=>0.8), rng = 42)
      +mach = machine(oversampler)
      +Xover, yover = transform(mach, X, y)
      +
      +julia> Imbalance.checkbalance(yover)
      +2: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 38 (79.2%) 
      +1: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 43 (89.6%) 
      +0: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 48 (100.0%)
      diff --git a/v0.20.3/models/RidgeCVClassifier_MLJScikitLearnInterface/index.html b/v0.20.3/models/RidgeCVClassifier_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..6df8104b7 --- /dev/null +++ b/v0.20.3/models/RidgeCVClassifier_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +RidgeCVClassifier · MLJ

      RidgeCVClassifier

      RidgeCVClassifier

      A model type for constructing a ridge regression classifier with built-in cross-validation, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      RidgeCVClassifier = @load RidgeCVClassifier pkg=MLJScikitLearnInterface

      Do model = RidgeCVClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in RidgeCVClassifier(alphas=...).

      Hyper-parameters

      • alphas = [0.1, 1.0, 10.0]
      • fit_intercept = true
      • scoring = nothing
      • cv = 5
      • class_weight = nothing
      • store_cv_values = false
      diff --git a/v0.20.3/models/RidgeCVRegressor_MLJScikitLearnInterface/index.html b/v0.20.3/models/RidgeCVRegressor_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..6ec50d7a2 --- /dev/null +++ b/v0.20.3/models/RidgeCVRegressor_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +RidgeCVRegressor · MLJ

      RidgeCVRegressor

      RidgeCVRegressor

      A model type for constructing a ridge regressor with built-in cross-validation, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      RidgeCVRegressor = @load RidgeCVRegressor pkg=MLJScikitLearnInterface

      Do model = RidgeCVRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in RidgeCVRegressor(alphas=...).

      Hyper-parameters

      • alphas = (0.1, 1.0, 10.0)
      • fit_intercept = true
      • scoring = nothing
      • cv = 5
      • gcv_mode = nothing
      • store_cv_values = false
      diff --git a/v0.20.3/models/RidgeClassifier_MLJScikitLearnInterface/index.html b/v0.20.3/models/RidgeClassifier_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..75319dd51 --- /dev/null +++ b/v0.20.3/models/RidgeClassifier_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +RidgeClassifier · MLJ

      RidgeClassifier

      RidgeClassifier

      A model type for constructing a ridge regression classifier, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      RidgeClassifier = @load RidgeClassifier pkg=MLJScikitLearnInterface

      Do model = RidgeClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in RidgeClassifier(alpha=...).

      Hyper-parameters

      • alpha = 1.0
      • fit_intercept = true
      • copy_X = true
      • max_iter = nothing
      • tol = 0.001
      • class_weight = nothing
      • solver = auto
      • random_state = nothing
      diff --git a/v0.20.3/models/RidgeRegressor_MLJLinearModels/index.html b/v0.20.3/models/RidgeRegressor_MLJLinearModels/index.html new file mode 100644 index 000000000..593b7bb41 --- /dev/null +++ b/v0.20.3/models/RidgeRegressor_MLJLinearModels/index.html @@ -0,0 +1,6 @@ + +RidgeRegressor · MLJ

      RidgeRegressor

      RidgeRegressor

      A model type for constructing a ridge regressor, based on MLJLinearModels.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      RidgeRegressor = @load RidgeRegressor pkg=MLJLinearModels

      Do model = RidgeRegressor() to construct an instance with default hyper-parameters.

      Ridge regression is a linear model with objective function

      $

      |Xθ - y|₂²/2 + n⋅λ|θ|₂²/2 $

      where $n$ is the number of observations.

      If scale_penalty_with_samples = false then the objective function is instead

      $

      |Xθ - y|₂²/2 + λ|θ|₂²/2 $

      .

      Different solver options exist, as indicated under "Hyperparameters" below.

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, X, y)

      where:

      • X is any table of input features (eg, a DataFrame) whose columns have Continuous scitype; check column scitypes with schema(X)
      • y is the target, which can be any AbstractVector whose element scitype is Continuous; check the scitype with scitype(y)

      Train the machine using fit!(mach, rows=...).

      Hyperparameters

      • lambda::Real: strength of the L2 regularization. Default: 1.0
      • fit_intercept::Bool: whether to fit the intercept or not. Default: true
      • penalize_intercept::Bool: whether to penalize the intercept. Default: false
      • scale_penalty_with_samples::Bool: whether to scale the penalty with the number of observations. Default: true
      • solver::Union{Nothing, MLJLinearModels.Solver}: any instance of MLJLinearModels.Analytical. Use Analytical() for Cholesky and CG()=Analytical(iterative=true) for conjugate-gradient. If solver = nothing (default) then Analytical() is used. Default: nothing

      Example

      using MLJ
      +X, y = make_regression()
      +mach = fit!(machine(RidgeRegressor(), X, y))
      +predict(mach, X)
      +fitted_params(mach)

      See also ElasticNetRegressor.

      diff --git a/v0.20.3/models/RidgeRegressor_MLJScikitLearnInterface/index.html b/v0.20.3/models/RidgeRegressor_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..538b19f73 --- /dev/null +++ b/v0.20.3/models/RidgeRegressor_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +RidgeRegressor · MLJ

      RidgeRegressor

      RidgeRegressor

      A model type for constructing a ridge regressor, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      RidgeRegressor = @load RidgeRegressor pkg=MLJScikitLearnInterface

      Do model = RidgeRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in RidgeRegressor(alpha=...).

      Hyper-parameters

      • alpha = 1.0
      • fit_intercept = true
      • copy_X = true
      • max_iter = 1000
      • tol = 0.0001
      • solver = auto
      • random_state = nothing
      diff --git a/v0.20.3/models/RidgeRegressor_MultivariateStats/index.html b/v0.20.3/models/RidgeRegressor_MultivariateStats/index.html new file mode 100644 index 000000000..e92d4d311 --- /dev/null +++ b/v0.20.3/models/RidgeRegressor_MultivariateStats/index.html @@ -0,0 +1,11 @@ + +RidgeRegressor · MLJ

      RidgeRegressor

      RidgeRegressor

      A model type for constructing a ridge regressor, based on MultivariateStats.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      RidgeRegressor = @load RidgeRegressor pkg=MultivariateStats

      Do model = RidgeRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in RidgeRegressor(lambda=...).

      RidgeRegressor adds a quadratic penalty term to least squares regression, for regularization. Ridge regression is particularly useful in the case of multicollinearity. Options exist to specify a bias term, and to adjust the strength of the penalty term.

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, X, y)

      Here:

      • X is any table of input features (eg, a DataFrame) whose columns are of scitype Continuous; check column scitypes with schema(X).
      • y is the target, which can be any AbstractVector whose element scitype is Continuous; check the scitype with scitype(y)

      Train the machine using fit!(mach, rows=...).

      Hyper-parameters

      • lambda=1.0: Is the non-negative parameter for the regularization strength. If lambda is 0, ridge regression is equivalent to linear least squares regression, and as lambda approaches infinity, all the linear coefficients approach 0.
      • bias=true: Include the bias term if true, otherwise fit without bias term.

      Operations

      • predict(mach, Xnew): Return predictions of the target given new features Xnew, which should have the same scitype as X above.

      Fitted parameters

      The fields of fitted_params(mach) are:

      • coefficients: The linear coefficients determined by the model.
      • intercept: The intercept determined by the model.

      Examples

      using MLJ
      +
      +RidgeRegressor = @load RidgeRegressor pkg=MultivariateStats
      +pipe = Standardizer() |> RidgeRegressor(lambda=10)
      +
      +X, y = @load_boston
      +
      +mach = machine(pipe, X, y) |> fit!
      +yhat = predict(mach, X)
      +training_error = l1(yhat, y) |> mean

      See also LinearRegressor, MultitargetLinearRegressor, MultitargetRidgeRegressor

      diff --git a/v0.20.3/models/RobustRegressor_MLJLinearModels/index.html b/v0.20.3/models/RobustRegressor_MLJLinearModels/index.html new file mode 100644 index 000000000..272998eb8 --- /dev/null +++ b/v0.20.3/models/RobustRegressor_MLJLinearModels/index.html @@ -0,0 +1,6 @@ + +RobustRegressor · MLJ

      RobustRegressor

      RobustRegressor

      A model type for constructing a robust regressor, based on MLJLinearModels.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      RobustRegressor = @load RobustRegressor pkg=MLJLinearModels

      Do model = RobustRegressor() to construct an instance with default hyper-parameters.

      Robust regression is a linear model with objective function

      $

      ∑ρ(Xθ - y) + n⋅λ|θ|₂² + n⋅γ|θ|₁ $

      where $ρ$ is a robust loss function (e.g. the Huber function) and $n$ is the number of observations.

      If scale_penalty_with_samples = false the objective function is instead

      $

      ∑ρ(Xθ - y) + λ|θ|₂² + γ|θ|₁ $

      .

      Different solver options exist, as indicated under "Hyperparameters" below.

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, X, y)

      where:

      • X is any table of input features (eg, a DataFrame) whose columns have Continuous scitype; check column scitypes with schema(X)
      • y is the target, which can be any AbstractVector whose element scitype is Continuous; check the scitype with scitype(y)

      Train the machine using fit!(mach, rows=...).

      Hyperparameters

      • rho::MLJLinearModels.RobustRho: the type of robust loss, which can be any instance of MLJLinearModels.L where L is one of: AndrewsRho, BisquareRho, FairRho, HuberRho, LogisticRho, QuantileRho, TalwarRho, HuberRho, TalwarRho. Default: HuberRho(0.1)

      • lambda::Real: strength of the regularizer if penalty is :l2 or :l1. Strength of the L2 regularizer if penalty is :en. Default: 1.0

      • gamma::Real: strength of the L1 regularizer if penalty is :en. Default: 0.0

      • penalty::Union{String, Symbol}: the penalty to use, either :l2, :l1, :en (elastic net) or :none. Default: :l2

      • fit_intercept::Bool: whether to fit the intercept or not. Default: true

      • penalize_intercept::Bool: whether to penalize the intercept. Default: false

      • scale_penalty_with_samples::Bool: whether to scale the penalty with the number of observations. Default: true

      • solver::Union{Nothing, MLJLinearModels.Solver}: some instance of MLJLinearModels.S where S is one of: LBFGS, IWLSCG, Newton, NewtonCG, if penalty = :l2, and ProxGrad otherwise.

        If solver = nothing (default) then LBFGS() is used, if penalty = :l2, and otherwise ProxGrad(accel=true) (FISTA) is used.

        Solver aliases: FISTA(; kwargs...) = ProxGrad(accel=true, kwargs...), ISTA(; kwargs...) = ProxGrad(accel=false, kwargs...) Default: nothing

      Example

      using MLJ
      +X, y = make_regression()
      +mach = fit!(machine(RobustRegressor(), X, y))
      +predict(mach, X)
      +fitted_params(mach)

      See also HuberRegressor, QuantileRegressor.

      diff --git a/v0.20.3/models/SGDClassifier_MLJScikitLearnInterface/index.html b/v0.20.3/models/SGDClassifier_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..7d24fe54e --- /dev/null +++ b/v0.20.3/models/SGDClassifier_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +SGDClassifier · MLJ

      SGDClassifier

      SGDClassifier

      A model type for constructing a sgd classifier, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      SGDClassifier = @load SGDClassifier pkg=MLJScikitLearnInterface

      Do model = SGDClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in SGDClassifier(loss=...).

      Hyper-parameters

      • loss = hinge
      • penalty = l2
      • alpha = 0.0001
      • l1_ratio = 0.15
      • fit_intercept = true
      • max_iter = 1000
      • tol = 0.001
      • shuffle = true
      • verbose = 0
      • epsilon = 0.1
      • n_jobs = nothing
      • random_state = nothing
      • learning_rate = optimal
      • eta0 = 0.0
      • power_t = 0.5
      • early_stopping = false
      • validation_fraction = 0.1
      • n_iter_no_change = 5
      • class_weight = nothing
      • warm_start = false
      • average = false
      diff --git a/v0.20.3/models/SGDRegressor_MLJScikitLearnInterface/index.html b/v0.20.3/models/SGDRegressor_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..29d04b745 --- /dev/null +++ b/v0.20.3/models/SGDRegressor_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +SGDRegressor · MLJ

      SGDRegressor

      SGDRegressor

      A model type for constructing a stochastic gradient descent-based regressor, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      SGDRegressor = @load SGDRegressor pkg=MLJScikitLearnInterface

      Do model = SGDRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in SGDRegressor(loss=...).

      Hyper-parameters

      • loss = squared_error
      • penalty = l2
      • alpha = 0.0001
      • l1_ratio = 0.15
      • fit_intercept = true
      • max_iter = 1000
      • tol = 0.001
      • shuffle = true
      • verbose = 0
      • epsilon = 0.1
      • random_state = nothing
      • learning_rate = invscaling
      • eta0 = 0.01
      • power_t = 0.25
      • early_stopping = false
      • validation_fraction = 0.1
      • n_iter_no_change = 5
      • warm_start = false
      • average = false
      diff --git a/v0.20.3/models/SMOTENC_Imbalance/index.html b/v0.20.3/models/SMOTENC_Imbalance/index.html new file mode 100644 index 000000000..f0c209db3 --- /dev/null +++ b/v0.20.3/models/SMOTENC_Imbalance/index.html @@ -0,0 +1,39 @@ + +SMOTENC · MLJ

      SMOTENC

      Initiate a SMOTENC model with the given hyper-parameters.

      SMOTENC

      A model type for constructing a smotenc, based on Imbalance.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      SMOTENC = @load SMOTENC pkg=Imbalance

      Do model = SMOTENC() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in SMOTENC(k=...).

      SMOTENC implements the SMOTENC algorithm to correct for class imbalance as in N. V. Chawla, K. W. Bowyer, L. O.Hall, W. P. Kegelmeyer, “SMOTE: synthetic minority over-sampling technique,” Journal of artificial intelligence research, 321-357, 2002.

      Training data

      In MLJ or MLJBase, wrap the model in a machine by

      mach = machine(model)

      There is no need to provide any data here because the model is a static transformer.

      Likewise, there is no need to fit!(mach).

      For default values of the hyper-parameters, model can be constructed by

      model = SMOTENC()

      Hyperparameters

      • k=5: Number of nearest neighbors to consider in the SMOTENC algorithm. Should be within the range [1, n - 1], where n is the number of observations; otherwise set to the nearest of these two values.

      • ratios=1.0: A parameter that controls the amount of oversampling to be done for each class

        • Can be a float and in this case each class will be oversampled to the size of the majority class times the float. By default, all classes are oversampled to the size of the majority class
        • Can be a dictionary mapping each class label to the float ratio for that class
      • knn_tree: Decides the tree used in KNN computations. Either "Brute" or "Ball". BallTree can be much faster but may lead to inaccurate results.

      • rng::Union{AbstractRNG, Integer}=default_rng(): Either an AbstractRNG object or an Integer seed to be used with Xoshiro if the Julia VERSION supports it. Otherwise, uses MersenneTwister`.

      Transform Inputs

      • X: A table with element scitypes that subtype Union{Finite, Infinite}. Elements in nominal columns should subtype Finite (i.e., have scitype OrderedFactor or Multiclass) and elements in continuous columns should subtype Infinite (i.e., have scitype Count or Continuous).
      • y: An abstract vector of labels (e.g., strings) that correspond to the observations in X

      Transform Outputs

      • Xover: A matrix or table that includes original data and the new observations due to oversampling. depending on whether the input X is a matrix or table respectively
      • yover: An abstract vector of labels corresponding to Xover

      Operations

      • transform(mach, X, y): resample the data X and y using SMOTENC, returning both the new and original observations

      Example

      using MLJ
      +using ScientificTypes
      +import Imbalance
      +
      +## set probability of each class
      +class_probs = [0.5, 0.2, 0.3]                         
      +num_rows = 100
      +num_continuous_feats = 3
      +## want two categorical features with three and two possible values respectively
      +num_vals_per_category = [3, 2]
      +
      +## generate a table and categorical vector accordingly
      +X, y = Imbalance.generate_imbalanced_data(num_rows, num_continuous_feats; 
      +                                class_probs, num_vals_per_category, rng=42)                      
      +julia> Imbalance.checkbalance(y)
      +1: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 19 (39.6%) 
      +2: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 33 (68.8%) 
      +0: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 48 (100.0%) 
      +
      +julia> ScientificTypes.schema(X).scitypes
      +(Continuous, Continuous, Continuous, Continuous, Continuous)
      +## coerce nominal columns to a finite scitype (multiclass or ordered factor)
      +X = coerce(X, :Column4=>Multiclass, :Column5=>Multiclass)
      +
      +## load SMOTE-NC
      +SMOTENC = @load SMOTENC pkg=Imbalance
      +
      +## wrap the model in a machine
      +oversampler = SMOTENC(k=5, ratios=Dict(0=>1.0, 1=> 0.9, 2=>0.8), rng=42)
      +mach = machine(oversampler)
      +
      +## provide the data to transform (there is nothing to fit)
      +Xover, yover = transform(mach, X, y)
      +
      +julia> Imbalance.checkbalance(yover)
      +2: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 38 (79.2%) 
      +1: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 43 (89.6%) 
      +0: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 48 (100.0%) 
      diff --git a/v0.20.3/models/SMOTEN_Imbalance/index.html b/v0.20.3/models/SMOTEN_Imbalance/index.html new file mode 100644 index 000000000..de75814db --- /dev/null +++ b/v0.20.3/models/SMOTEN_Imbalance/index.html @@ -0,0 +1,40 @@ + +SMOTEN · MLJ

      SMOTEN

      Initiate a SMOTEN model with the given hyper-parameters.

      SMOTEN

      A model type for constructing a smoten, based on Imbalance.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      SMOTEN = @load SMOTEN pkg=Imbalance

      Do model = SMOTEN() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in SMOTEN(k=...).

      SMOTEN implements the SMOTEN algorithm to correct for class imbalance as in N. V. Chawla, K. W. Bowyer, L. O.Hall, W. P. Kegelmeyer, “SMOTEN: synthetic minority over-sampling technique,” Journal of artificial intelligence research, 321-357, 2002.

      Training data

      In MLJ or MLJBase, wrap the model in a machine by

      mach = machine(model)

      There is no need to provide any data here because the model is a static transformer.

      Likewise, there is no need to fit!(mach).

      For default values of the hyper-parameters, model can be constructed by

      model = SMOTEN()

      Hyperparameters

      • k=5: Number of nearest neighbors to consider in the SMOTEN algorithm. Should be within the range [1, n - 1], where n is the number of observations; otherwise set to the nearest of these two values.

      • ratios=1.0: A parameter that controls the amount of oversampling to be done for each class

        • Can be a float and in this case each class will be oversampled to the size of the majority class times the float. By default, all classes are oversampled to the size of the majority class
        • Can be a dictionary mapping each class label to the float ratio for that class
      • rng::Union{AbstractRNG, Integer}=default_rng(): Either an AbstractRNG object or an Integer seed to be used with Xoshiro if the Julia VERSION supports it. Otherwise, uses MersenneTwister`.

      Transform Inputs

      • X: A matrix of integers or a table with element scitypes that subtype Finite. That is, for table inputs each column should have either OrderedFactor or Multiclass as the element scitype.
      • y: An abstract vector of labels (e.g., strings) that correspond to the observations in X

      Transform Outputs

      • Xover: A matrix or table that includes original data and the new observations due to oversampling. depending on whether the input X is a matrix or table respectively
      • yover: An abstract vector of labels corresponding to Xover

      Operations

      • transform(mach, X, y): resample the data X and y using SMOTEN, returning both the new and original observations

      Example

      using MLJ
      +using ScientificTypes
      +import Imbalance
      +
      +## set probability of each class
      +class_probs = [0.5, 0.2, 0.3]                         
      +num_rows = 100
      +num_continuous_feats = 0
      +## want two categorical features with three and two possible values respectively
      +num_vals_per_category = [3, 2]
      +
      +## generate a table and categorical vector accordingly
      +X, y = Imbalance.generate_imbalanced_data(num_rows, num_continuous_feats; 
      +                                class_probs, num_vals_per_category, rng=42)                      
      +julia> Imbalance.checkbalance(y)
      +1: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 19 (39.6%) 
      +2: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 33 (68.8%) 
      +0: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 48 (100.0%) 
      +
      +julia> ScientificTypes.schema(X).scitypes
      +(Count, Count)
      +
      +## coerce to a finite scitype (multiclass or ordered factor)
      +X = coerce(X, autotype(X, :few_to_finite))
      +
      +## load SMOTEN
      +SMOTEN = @load SMOTEN pkg=Imbalance
      +
      +## wrap the model in a machine
      +oversampler = SMOTEN(k=5, ratios=Dict(0=>1.0, 1=> 0.9, 2=>0.8), rng=42)
      +mach = machine(oversampler)
      +
      +## provide the data to transform (there is nothing to fit)
      +Xover, yover = transform(mach, X, y)
      +
      +julia> Imbalance.checkbalance(yover)
      +2: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 38 (79.2%) 
      +1: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 43 (89.6%) 
      +0: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 48 (100.0%) 
      diff --git a/v0.20.3/models/SMOTE_Imbalance/index.html b/v0.20.3/models/SMOTE_Imbalance/index.html new file mode 100644 index 000000000..66d0614c6 --- /dev/null +++ b/v0.20.3/models/SMOTE_Imbalance/index.html @@ -0,0 +1,31 @@ + +SMOTE · MLJ

      SMOTE

      Initiate a SMOTE model with the given hyper-parameters.

      SMOTE

      A model type for constructing a smote, based on Imbalance.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      SMOTE = @load SMOTE pkg=Imbalance

      Do model = SMOTE() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in SMOTE(k=...).

      SMOTE implements the SMOTE algorithm to correct for class imbalance as in N. V. Chawla, K. W. Bowyer, L. O.Hall, W. P. Kegelmeyer, “SMOTE: synthetic minority over-sampling technique,” Journal of artificial intelligence research, 321-357, 2002.

      Training data

      In MLJ or MLJBase, wrap the model in a machine by

      mach = machine(model)

      There is no need to provide any data here because the model is a static transformer.

      Likewise, there is no need to fit!(mach).

      For default values of the hyper-parameters, model can be constructed by

      model = SMOTE()

      Hyperparameters

      • k=5: Number of nearest neighbors to consider in the SMOTE algorithm. Should be within the range [1, n - 1], where n is the number of observations; otherwise set to the nearest of these two values.

      • ratios=1.0: A parameter that controls the amount of oversampling to be done for each class

        • Can be a float and in this case each class will be oversampled to the size of the majority class times the float. By default, all classes are oversampled to the size of the majority class
        • Can be a dictionary mapping each class label to the float ratio for that class
      • rng::Union{AbstractRNG, Integer}=default_rng(): Either an AbstractRNG object or an Integer seed to be used with Xoshiro if the Julia VERSION supports it. Otherwise, uses MersenneTwister`.

      Transform Inputs

      • X: A matrix or table of floats where each row is an observation from the dataset
      • y: An abstract vector of labels (e.g., strings) that correspond to the observations in X

      Transform Outputs

      • Xover: A matrix or table that includes original data and the new observations due to oversampling. depending on whether the input X is a matrix or table respectively
      • yover: An abstract vector of labels corresponding to Xover

      Operations

      • transform(mach, X, y): resample the data X and y using SMOTE, returning both the new and original observations

      Example

      using MLJ
      +import Imbalance
      +
      +## set probability of each class
      +class_probs = [0.5, 0.2, 0.3]                         
      +num_rows, num_continuous_feats = 100, 5
      +## generate a table and categorical vector accordingly
      +X, y = Imbalance.generate_imbalanced_data(num_rows, num_continuous_feats; 
      +                                class_probs, rng=42)    
      +
      +julia> Imbalance.checkbalance(y)
      +1: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 19 (39.6%) 
      +2: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 33 (68.8%) 
      +0: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 48 (100.0%) 
      +
      +## load SMOTE
      +SMOTE = @load SMOTE pkg=Imbalance
      +
      +## wrap the model in a machine
      +oversampler = SMOTE(k=5, ratios=Dict(0=>1.0, 1=> 0.9, 2=>0.8), rng=42)
      +mach = machine(oversampler)
      +
      +## provide the data to transform (there is nothing to fit)
      +Xover, yover = transform(mach, X, y)
      +
      +julia> Imbalance.checkbalance(yover)
      +2: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 38 (79.2%) 
      +1: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 43 (89.6%) 
      +0: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 48 (100.0%) 
      +
      diff --git a/v0.20.3/models/SODDetector_OutlierDetectionPython/index.html b/v0.20.3/models/SODDetector_OutlierDetectionPython/index.html new file mode 100644 index 000000000..c1355a9c4 --- /dev/null +++ b/v0.20.3/models/SODDetector_OutlierDetectionPython/index.html @@ -0,0 +1,4 @@ + +SODDetector · MLJ diff --git a/v0.20.3/models/SOSDetector_OutlierDetectionPython/index.html b/v0.20.3/models/SOSDetector_OutlierDetectionPython/index.html new file mode 100644 index 000000000..25010f35f --- /dev/null +++ b/v0.20.3/models/SOSDetector_OutlierDetectionPython/index.html @@ -0,0 +1,4 @@ + +SOSDetector · MLJ diff --git a/v0.20.3/models/SRRegressor_SymbolicRegression/index.html b/v0.20.3/models/SRRegressor_SymbolicRegression/index.html new file mode 100644 index 000000000..d61ea1c40 --- /dev/null +++ b/v0.20.3/models/SRRegressor_SymbolicRegression/index.html @@ -0,0 +1,29 @@ + +SRRegressor · MLJ

      SRRegressor

      SRRegressor

      A model type for constructing a Symbolic Regression via Evolutionary Search, based on SymbolicRegression.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      SRRegressor = @load SRRegressor pkg=SymbolicRegression

      Do model = SRRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in SRRegressor(binary_operators=...).

      Single-target Symbolic Regression regressor (SRRegressor) searches for symbolic expressions that predict a single target variable from a set of input variables. All data is assumed to be Continuous. The search is performed using an evolutionary algorithm. This algorithm is described in the paper https://arxiv.org/abs/2305.01582.

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, X, y)

      OR

      mach = machine(model, X, y, w)

      Here:

      • X is any table of input features (eg, a DataFrame) whose columns are of scitype Continuous; check column scitypes with schema(X). Variable names in discovered expressions will be taken from the column names of X, if available. Units in columns of X (use DynamicQuantities for units) will trigger dimensional analysis to be used.
      • y is the target, which can be any AbstractVector whose element scitype is Continuous; check the scitype with scitype(y). Units in y (use DynamicQuantities for units) will trigger dimensional analysis to be used.
      • w is the observation weights which can either be nothing (default) or an AbstractVector whoose element scitype is Count or Continuous.

      Train the machine using fit!(mach), inspect the discovered expressions with report(mach), and predict on new data with predict(mach, Xnew). Note that unlike other regressors, symbolic regression stores a list of trained models. The model chosen from this list is defined by the function selection_method keyword argument, which by default balances accuracy and complexity.

      Hyper-parameters

      • binary_operators: Vector of binary operators (functions) to use. Each operator should be defined for two input scalars, and one output scalar. All operators need to be defined over the entire real line (excluding infinity - these are stopped before they are input), or return NaN where not defined. For speed, define it so it takes two reals of the same type as input, and outputs the same type. For the SymbolicUtils simplification backend, you will need to define a generic method of the operator so it takes arbitrary types.

      • unary_operators: Same, but for unary operators (one input scalar, gives an output scalar).

      • constraints: Array of pairs specifying size constraints for each operator. The constraints for a binary operator should be a 2-tuple (e.g., (-1, -1)) and the constraints for a unary operator should be an Int. A size constraint is a limit to the size of the subtree in each argument of an operator. e.g., [(^)=>(-1, 3)] means that the ^ operator can have arbitrary size (-1) in its left argument, but a maximum size of 3 in its right argument. Default is no constraints.

      • batching: Whether to evolve based on small mini-batches of data, rather than the entire dataset.

      • batch_size: What batch size to use if using batching.

      • elementwise_loss: What elementwise loss function to use. Can be one of the following losses, or any other loss of type SupervisedLoss. You can also pass a function that takes a scalar target (left argument), and scalar predicted (right argument), and returns a scalar. This will be averaged over the predicted data. If weights are supplied, your function should take a third argument for the weight scalar. Included losses: Regression: - LPDistLoss{P}(), - L1DistLoss(), - L2DistLoss() (mean square), - LogitDistLoss(), - HuberLoss(d), - L1EpsilonInsLoss(ϵ), - L2EpsilonInsLoss(ϵ), - PeriodicLoss(c), - QuantileLoss(τ), Classification: - ZeroOneLoss(), - PerceptronLoss(), - L1HingeLoss(), - SmoothedL1HingeLoss(γ), - ModifiedHuberLoss(), - L2MarginLoss(), - ExpLoss(), - SigmoidLoss(), - DWDMarginLoss(q).

      • loss_function: Alternatively, you may redefine the loss used as any function of tree::Node{T}, dataset::Dataset{T}, and options::Options, so long as you output a non-negative scalar of type T. This is useful if you want to use a loss that takes into account derivatives, or correlations across the dataset. This also means you could use a custom evaluation for a particular expression. If you are using batching=true, then your function should accept a fourth argument idx, which is either nothing (indicating that the full dataset should be used), or a vector of indices to use for the batch. For example,

          function my_loss(tree, dataset::Dataset{T,L}, options)::L where {T,L}
        +      prediction, flag = eval_tree_array(tree, dataset.X, options)
        +      if !flag
        +          return L(Inf)
        +      end
        +      return sum((prediction .- dataset.y) .^ 2) / dataset.n
        +  end
      • populations: How many populations of equations to use.

      • population_size: How many equations in each population.

      • ncycles_per_iteration: How many generations to consider per iteration.

      • tournament_selection_n: Number of expressions considered in each tournament.

      • tournament_selection_p: The fittest expression in a tournament is to be selected with probability p, the next fittest with probability p*(1-p), and so forth.

      • topn: Number of equations to return to the host process, and to consider for the hall of fame.

      • complexity_of_operators: What complexity should be assigned to each operator, and the occurrence of a constant or variable. By default, this is 1 for all operators. Can be a real number as well, in which case the complexity of an expression will be rounded to the nearest integer. Input this in the form of, e.g., [(^) => 3, sin => 2].

      • complexity_of_constants: What complexity should be assigned to use of a constant. By default, this is 1.

      • complexity_of_variables: What complexity should be assigned to each variable. By default, this is 1.

      • alpha: The probability of accepting an equation mutation during regularized evolution is given by exp(-delta_loss/(alpha * T)), where T goes from 1 to 0. Thus, alpha=infinite is the same as no annealing.

      • maxsize: Maximum size of equations during the search.

      • maxdepth: Maximum depth of equations during the search, by default this is set equal to the maxsize.

      • parsimony: A multiplicative factor for how much complexity is punished.

      • dimensional_constraint_penalty: An additive factor if the dimensional constraint is violated.

      • use_frequency: Whether to use a parsimony that adapts to the relative proportion of equations at each complexity; this will ensure that there are a balanced number of equations considered for every complexity.

      • use_frequency_in_tournament: Whether to use the adaptive parsimony described above inside the score, rather than just at the mutation accept/reject stage.

      • adaptive_parsimony_scaling: How much to scale the adaptive parsimony term in the loss. Increase this if the search is spending too much time optimizing the most complex equations.

      • turbo: Whether to use LoopVectorization.@turbo to evaluate expressions. This can be significantly faster, but is only compatible with certain operators. Experimental!

      • migration: Whether to migrate equations between processes.

      • hof_migration: Whether to migrate equations from the hall of fame to processes.

      • fraction_replaced: What fraction of each population to replace with migrated equations at the end of each cycle.

      • fraction_replaced_hof: What fraction to replace with hall of fame equations at the end of each cycle.

      • should_simplify: Whether to simplify equations. If you pass a custom objective, this will be set to false.

      • should_optimize_constants: Whether to use an optimization algorithm to periodically optimize constants in equations.

      • optimizer_nrestarts: How many different random starting positions to consider for optimization of constants.

      • optimizer_algorithm: Select algorithm to use for optimizing constants. Default is "BFGS", but "NelderMead" is also supported.

      • optimizer_options: General options for the constant optimization. For details we refer to the documentation on Optim.Options from the Optim.jl package. Options can be provided here as NamedTuple, e.g. (iterations=16,), as a Dict, e.g. Dict(:x_tol => 1.0e-32,), or as an Optim.Options instance.

      • output_file: What file to store equations to, as a backup.

      • perturbation_factor: When mutating a constant, either multiply or divide by (1+perturbation_factor)^(rand()+1).

      • probability_negate_constant: Probability of negating a constant in the equation when mutating it.

      • mutation_weights: Relative probabilities of the mutations. The struct MutationWeights should be passed to these options. See its documentation on MutationWeights for the different weights.

      • crossover_probability: Probability of performing crossover.

      • annealing: Whether to use simulated annealing.

      • warmup_maxsize_by: Whether to slowly increase the max size from 5 up to maxsize. If nonzero, specifies the fraction through the search at which the maxsize should be reached.

      • verbosity: Whether to print debugging statements or not.

      • print_precision: How many digits to print when printing equations. By default, this is 5.

      • save_to_file: Whether to save equations to a file during the search.

      • bin_constraints: See constraints. This is the same, but specified for binary operators only (for example, if you have an operator that is both a binary and unary operator).

      • una_constraints: Likewise, for unary operators.

      • seed: What random seed to use. nothing uses no seed.

      • progress: Whether to use a progress bar output (verbosity will have no effect).

      • early_stop_condition: Float - whether to stop early if the mean loss gets below this value. Function - a function taking (loss, complexity) as arguments and returning true or false.

      • timeout_in_seconds: Float64 - the time in seconds after which to exit (as an alternative to the number of iterations).

      • max_evals: Int (or Nothing) - the maximum number of evaluations of expressions to perform.

      • skip_mutation_failures: Whether to simply skip over mutations that fail or are rejected, rather than to replace the mutated expression with the original expression and proceed normally.

      • enable_autodiff: Whether to enable automatic differentiation functionality. This is turned off by default. If turned on, this will be turned off if one of the operators does not have well-defined gradients.

      • nested_constraints: Specifies how many times a combination of operators can be nested. For example, [sin => [cos => 0], cos => [cos => 2]] specifies that cos may never appear within a sin, but sin can be nested with itself an unlimited number of times. The second term specifies that cos can be nested up to 2 times within a cos, so that cos(cos(cos(x))) is allowed (as well as any combination of + or - within it), but cos(cos(cos(cos(x)))) is not allowed. When an operator is not specified, it is assumed that it can be nested an unlimited number of times. This requires that there is no operator which is used both in the unary operators and the binary operators (e.g., - could be both subtract, and negation). For binary operators, both arguments are treated the same way, and the max of each argument is constrained.

      • deterministic: Use a global counter for the birth time, rather than calls to time(). This gives perfect resolution, and is therefore deterministic. However, it is not thread safe, and must be used in serial mode.

      • define_helper_functions: Whether to define helper functions for constructing and evaluating trees.

      • niterations::Int=10: The number of iterations to perform the search. More iterations will improve the results.

      • parallelism=:multithreading: What parallelism mode to use. The options are :multithreading, :multiprocessing, and :serial. By default, multithreading will be used. Multithreading uses less memory, but multiprocessing can handle multi-node compute. If using :multithreading mode, the number of threads available to julia are used. If using :multiprocessing, numprocs processes will be created dynamically if procs is unset. If you have already allocated processes, pass them to the procs argument and they will be used. You may also pass a string instead of a symbol, like "multithreading".

      • numprocs::Union{Int, Nothing}=nothing: The number of processes to use, if you want equation_search to set this up automatically. By default this will be 4, but can be any number (you should pick a number <= the number of cores available).

      • procs::Union{Vector{Int}, Nothing}=nothing: If you have set up a distributed run manually with procs = addprocs() and @everywhere, pass the procs to this keyword argument.

      • addprocs_function::Union{Function, Nothing}=nothing: If using multiprocessing (parallelism=:multithreading), and are not passing procs manually, then they will be allocated dynamically using addprocs. However, you may also pass a custom function to use instead of addprocs. This function should take a single positional argument, which is the number of processes to use, as well as the lazy keyword argument. For example, if set up on a slurm cluster, you could pass addprocs_function = addprocs_slurm, which will set up slurm processes.

      • heap_size_hint_in_bytes::Union{Int,Nothing}=nothing: On Julia 1.9+, you may set the --heap-size-hint flag on Julia processes, recommending garbage collection once a process is close to the recommended size. This is important for long-running distributed jobs where each process has an independent memory, and can help avoid out-of-memory errors. By default, this is set to Sys.free_memory() / numprocs.

      • runtests::Bool=true: Whether to run (quick) tests before starting the search, to see if there will be any problems during the equation search related to the host environment.

      • loss_type::Type=Nothing: If you would like to use a different type for the loss than for the data you passed, specify the type here. Note that if you pass complex data ::Complex{L}, then the loss type will automatically be set to L.

      • selection_method::Function: Function to selection expression from the Pareto frontier for use in predict. See SymbolicRegression.MLJInterfaceModule.choose_best for an example. This function should return a single integer specifying the index of the expression to use. By default, choose_best maximizes the score (a pound-for-pound rating) of expressions reaching the threshold of 1.5x the minimum loss. To fix the index at 5, you could just write Returns(5).

      • dimensions_type::AbstractDimensions: The type of dimensions to use when storing the units of the data. By default this is DynamicQuantities.SymbolicDimensions.

      Operations

      • predict(mach, Xnew): Return predictions of the target given features Xnew, which should have same scitype as X above. The expression used for prediction is defined by the selection_method function, which can be seen by viewing report(mach).best_idx.

      Fitted parameters

      The fields of fitted_params(mach) are:

      • best_idx::Int: The index of the best expression in the Pareto frontier, as determined by the selection_method function.
      • equations::Vector{Node{T}}: The expressions discovered by the search, represented in a dominating Pareto frontier (i.e., the best expressions found for each complexity). T is equal to the element type of the passed data.
      • equation_strings::Vector{String}: The expressions discovered by the search, represented as strings for easy inspection.

      Report

      The fields of report(mach) are:

      • best_idx::Int: The index of the best expression in the Pareto frontier, as determined by the selection_method function.
      • equations::Vector{Node{T}}: The expressions discovered by the search, represented in a dominating Pareto frontier (i.e., the best expressions found for each complexity).
      • equation_strings::Vector{String}: The expressions discovered by the search, represented as strings for easy inspection.
      • complexities::Vector{Int}: The complexity of each expression in the Pareto frontier.
      • losses::Vector{L}: The loss of each expression in the Pareto frontier, according to the loss function specified in the model. The type L is the loss type, which is usually the same as the element type of data passed (i.e., T), but can differ if complex data types are passed.
      • scores::Vector{L}: A metric which considers both the complexity and loss of an expression, equal to the change in the log-loss divided by the change in complexity, relative to the previous expression along the Pareto frontier. A larger score aims to indicate an expression is more likely to be the true expression generating the data, but this is very problem-dependent and generally several other factors should be considered.

      Examples

      using MLJ
      +SRRegressor = @load SRRegressor pkg=SymbolicRegression
      +X, y = @load_boston
      +model = SRRegressor(binary_operators=[+, -, *], unary_operators=[exp], niterations=100)
      +mach = machine(model, X, y)
      +fit!(mach)
      +y_hat = predict(mach, X)
      +## View the equation used:
      +r = report(mach)
      +println("Equation used:", r.equation_strings[r.best_idx])

      With units and variable names:

      using MLJ
      +using DynamicQuantities
      +SRegressor = @load SRRegressor pkg=SymbolicRegression
      +
      +X = (; x1=rand(32) .* us"km/h", x2=rand(32) .* us"km")
      +y = @. X.x2 / X.x1 + 0.5us"h"
      +model = SRRegressor(binary_operators=[+, -, *, /])
      +mach = machine(model, X, y)
      +fit!(mach)
      +y_hat = predict(mach, X)
      +## View the equation used:
      +r = report(mach)
      +println("Equation used:", r.equation_strings[r.best_idx])

      See also MultitargetSRRegressor.

      diff --git a/v0.20.3/models/SVC_LIBSVM/index.html b/v0.20.3/models/SVC_LIBSVM/index.html new file mode 100644 index 000000000..e364888fe --- /dev/null +++ b/v0.20.3/models/SVC_LIBSVM/index.html @@ -0,0 +1,36 @@ + +SVC · MLJ

      SVC

      SVC

      A model type for constructing a C-support vector classifier, based on LIBSVM.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      SVC = @load SVC pkg=LIBSVM

      Do model = SVC() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in SVC(kernel=...).

      This model predicts actual class labels. To predict probabilities, use instead ProbabilisticSVC.

      Reference for algorithm and core C-library: C.-C. Chang and C.-J. Lin (2011): "LIBSVM: a library for support vector machines." ACM Transactions on Intelligent Systems and Technology, 2(3):27:1–27:27. Updated at https://www.csie.ntu.edu.tw/~cjlin/papers/libsvm.pdf.

      Training data

      In MLJ or MLJBase, bind an instance model to data with one of:

      mach = machine(model, X, y)
      +mach = machine(model, X, y, w)

      where

      • X: any table of input features (eg, a DataFrame) whose columns each have Continuous element scitype; check column scitypes with schema(X)
      • y: is the target, which can be any AbstractVector whose element scitype is <:OrderedFactor or <:Multiclass; check the scitype with scitype(y)
      • w: a dictionary of class weights, keyed on levels(y).

      Train the machine using fit!(mach, rows=...).

      Hyper-parameters

      • kernel=LIBSVM.Kernel.RadialBasis: either an object that can be called, as in kernel(x1, x2), or one of the built-in kernels from the LIBSVM.jl package listed below. Here x1 and x2 are vectors whose lengths match the number of columns of the training data X (see "Examples" below).

        • LIBSVM.Kernel.Linear: (x1, x2) -> x1'*x2
        • LIBSVM.Kernel.Polynomial: (x1, x2) -> gamma*x1'*x2 + coef0)^degree
        • LIBSVM.Kernel.RadialBasis: (x1, x2) -> (exp(-gamma*norm(x1 - x2)^2))
        • LIBSVM.Kernel.Sigmoid: (x1, x2) - > tanh(gamma*x1'*x2 + coef0)

        Here gamma, coef0, degree are other hyper-parameters. Serialization of models with user-defined kernels comes with some restrictions. See LIVSVM.jl issue91

      • gamma = 0.0: kernel parameter (see above); if gamma==-1.0 then gamma = 1/nfeatures is used in training, where nfeatures is the number of features (columns of X). If gamma==0.0 then gamma = 1/(var(Tables.matrix(X))*nfeatures) is used. Actual value used appears in the report (see below).

      • coef0 = 0.0: kernel parameter (see above)

      • degree::Int32 = Int32(3): degree in polynomial kernel (see above)

      • cost=1.0 (range (0, Inf)): the parameter denoted $C$ in the cited reference; for greater regularization, decrease cost

      • cachesize=200.0 cache memory size in MB

      • tolerance=0.001: tolerance for the stopping criterion

      • shrinking=true: whether to use shrinking heuristics

      Operations

      • predict(mach, Xnew): return predictions of the target given features Xnew having the same scitype as X above.

      Fitted parameters

      The fields of fitted_params(mach) are:

      • libsvm_model: the trained model object created by the LIBSVM.jl package
      • encoding: class encoding used internally by libsvm_model - a dictionary of class labels keyed on the internal integer representation

      Report

      The fields of report(mach) are:

      • gamma: actual value of the kernel parameter gamma used in training

      Examples

      Using a built-in kernel

      using MLJ
      +import LIBSVM
      +
      +SVC = @load SVC pkg=LIBSVM                   ## model type
      +model = SVC(kernel=LIBSVM.Kernel.Polynomial) ## instance
      +
      +X, y = @load_iris ## table, vector
      +mach = machine(model, X, y) |> fit!
      +
      +Xnew = (sepal_length = [6.4, 7.2, 7.4],
      +        sepal_width = [2.8, 3.0, 2.8],
      +        petal_length = [5.6, 5.8, 6.1],
      +        petal_width = [2.1, 1.6, 1.9],)
      +
      +julia> yhat = predict(mach, Xnew)
      +3-element CategoricalArrays.CategoricalArray{String,1,UInt32}:
      + "virginica"
      + "virginica"
      + "virginica"

      User-defined kernels

      k(x1, x2) = x1'*x2 ## equivalent to `LIBSVM.Kernel.Linear`
      +model = SVC(kernel=k)
      +mach = machine(model, X, y) |> fit!
      +
      +julia> yhat = predict(mach, Xnew)
      +3-element CategoricalArrays.CategoricalArray{String,1,UInt32}:
      + "virginica"
      + "virginica"
      + "virginica"

      Incorporating class weights

      In either scenario above, we can do:

      weights = Dict("virginica" => 1, "versicolor" => 20, "setosa" => 1)
      +mach = machine(model, X, y, weights) |> fit!
      +
      +julia> yhat = predict(mach, Xnew)
      +3-element CategoricalArrays.CategoricalArray{String,1,UInt32}:
      + "versicolor"
      + "versicolor"
      + "versicolor"

      See also the classifiers ProbabilisticSVC, NuSVC and LinearSVC. And see LIVSVM.jl and the original C implementation documentation.

      diff --git a/v0.20.3/models/SVMClassifier_MLJScikitLearnInterface/index.html b/v0.20.3/models/SVMClassifier_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..70413c592 --- /dev/null +++ b/v0.20.3/models/SVMClassifier_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +SVMClassifier · MLJ

      SVMClassifier

      SVMClassifier

      A model type for constructing a C-support vector classifier, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      SVMClassifier = @load SVMClassifier pkg=MLJScikitLearnInterface

      Do model = SVMClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in SVMClassifier(C=...).

      Hyper-parameters

      • C = 1.0
      • kernel = rbf
      • degree = 3
      • gamma = scale
      • coef0 = 0.0
      • shrinking = true
      • tol = 0.001
      • cache_size = 200
      • max_iter = -1
      • decision_function_shape = ovr
      • random_state = nothing
      diff --git a/v0.20.3/models/SVMLinearClassifier_MLJScikitLearnInterface/index.html b/v0.20.3/models/SVMLinearClassifier_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..60f097a13 --- /dev/null +++ b/v0.20.3/models/SVMLinearClassifier_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +SVMLinearClassifier · MLJ

      SVMLinearClassifier

      SVMLinearClassifier

      A model type for constructing a linear support vector classifier, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      SVMLinearClassifier = @load SVMLinearClassifier pkg=MLJScikitLearnInterface

      Do model = SVMLinearClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in SVMLinearClassifier(penalty=...).

      Hyper-parameters

      • penalty = l2
      • loss = squared_hinge
      • dual = true
      • tol = 0.0001
      • C = 1.0
      • multi_class = ovr
      • fit_intercept = true
      • intercept_scaling = 1.0
      • random_state = nothing
      • max_iter = 1000
      diff --git a/v0.20.3/models/SVMLinearRegressor_MLJScikitLearnInterface/index.html b/v0.20.3/models/SVMLinearRegressor_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..b8c6bb30d --- /dev/null +++ b/v0.20.3/models/SVMLinearRegressor_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +SVMLinearRegressor · MLJ

      SVMLinearRegressor

      SVMLinearRegressor

      A model type for constructing a linear support vector regressor, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      SVMLinearRegressor = @load SVMLinearRegressor pkg=MLJScikitLearnInterface

      Do model = SVMLinearRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in SVMLinearRegressor(epsilon=...).

      Hyper-parameters

      • epsilon = 0.0
      • tol = 0.0001
      • C = 1.0
      • loss = epsilon_insensitive
      • fit_intercept = true
      • intercept_scaling = 1.0
      • dual = true
      • random_state = nothing
      • max_iter = 1000
      diff --git a/v0.20.3/models/SVMNuClassifier_MLJScikitLearnInterface/index.html b/v0.20.3/models/SVMNuClassifier_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..6484db555 --- /dev/null +++ b/v0.20.3/models/SVMNuClassifier_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +SVMNuClassifier · MLJ

      SVMNuClassifier

      SVMNuClassifier

      A model type for constructing a nu-support vector classifier, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      SVMNuClassifier = @load SVMNuClassifier pkg=MLJScikitLearnInterface

      Do model = SVMNuClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in SVMNuClassifier(nu=...).

      Hyper-parameters

      • nu = 0.5
      • kernel = rbf
      • degree = 3
      • gamma = scale
      • coef0 = 0.0
      • shrinking = true
      • tol = 0.001
      • cache_size = 200
      • max_iter = -1
      • decision_function_shape = ovr
      • random_state = nothing
      diff --git a/v0.20.3/models/SVMNuRegressor_MLJScikitLearnInterface/index.html b/v0.20.3/models/SVMNuRegressor_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..3aaa4512e --- /dev/null +++ b/v0.20.3/models/SVMNuRegressor_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +SVMNuRegressor · MLJ

      SVMNuRegressor

      SVMNuRegressor

      A model type for constructing a nu-support vector regressor, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      SVMNuRegressor = @load SVMNuRegressor pkg=MLJScikitLearnInterface

      Do model = SVMNuRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in SVMNuRegressor(nu=...).

      Hyper-parameters

      • nu = 0.5
      • C = 1.0
      • kernel = rbf
      • degree = 3
      • gamma = scale
      • coef0 = 0.0
      • shrinking = true
      • tol = 0.001
      • cache_size = 200
      • max_iter = -1
      diff --git a/v0.20.3/models/SVMRegressor_MLJScikitLearnInterface/index.html b/v0.20.3/models/SVMRegressor_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..6dab93194 --- /dev/null +++ b/v0.20.3/models/SVMRegressor_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +SVMRegressor · MLJ

      SVMRegressor

      SVMRegressor

      A model type for constructing a epsilon-support vector regressor, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      SVMRegressor = @load SVMRegressor pkg=MLJScikitLearnInterface

      Do model = SVMRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in SVMRegressor(kernel=...).

      Hyper-parameters

      • kernel = rbf
      • degree = 3
      • gamma = scale
      • coef0 = 0.0
      • tol = 0.001
      • C = 1.0
      • epsilon = 0.1
      • shrinking = true
      • cache_size = 200
      • max_iter = -1
      diff --git a/v0.20.3/models/SelfOrganizingMap_SelfOrganizingMaps/index.html b/v0.20.3/models/SelfOrganizingMap_SelfOrganizingMaps/index.html new file mode 100644 index 000000000..33198dc4e --- /dev/null +++ b/v0.20.3/models/SelfOrganizingMap_SelfOrganizingMaps/index.html @@ -0,0 +1,10 @@ + +SelfOrganizingMap · MLJ

      SelfOrganizingMap

      SelfOrganizingMap

      A model type for constructing a self organizing map, based on SelfOrganizingMaps.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      SelfOrganizingMap = @load SelfOrganizingMap pkg=SelfOrganizingMaps

      Do model = SelfOrganizingMap() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in SelfOrganizingMap(k=...).

      SelfOrganizingMaps implements Kohonen's Self Organizing Map, Proceedings of the IEEE; Kohonen, T.; (1990):"The self-organizing map"

      Training data

      In MLJ or MLJBase, bind an instance model to data with mach = machine(model, X) where

      • X: an AbstractMatrix or Table of input features whose columns are of scitype Continuous.

      Train the machine with fit!(mach, rows=...).

      Hyper-parameters

      • k=10: Number of nodes along once side of SOM grid. There are total nodes.
      • η=0.5: Learning rate. Scales adjust made to winning node and its neighbors during each round of training.
      • σ²=0.05: The (squared) neighbor radius. Used to determine scale for neighbor node adjustments.
      • grid_type=:rectangular Node grid geometry. One of (:rectangular, :hexagonal, :spherical).
      • η_decay=:exponential Learning rate schedule function. One of (:exponential, :asymptotic)
      • σ_decay=:exponential Neighbor radius schedule function. One of (:exponential, :asymptotic, :none)
      • neighbor_function=:gaussian Kernel function used to make adjustment to neighbor weights. Scale is set by σ². One of (:gaussian, :mexican_hat).
      • matching_distance=euclidean Distance function from Distances.jl used to determine winning node.
      • Nepochs=1 Number of times to repeat training on the shuffled dataset.

      Operations

      • transform(mach, Xnew): returns the coordinates of the winning SOM node for each instance of Xnew. For SOM of gridtype :rectangular and :hexagonal, these are cartesian coordinates. For gridtype :spherical, these are the latitude and longitude in radians.

      Fitted parameters

      The fields of fitted_params(mach) are:

      • coords: The coordinates of each of the SOM nodes (points in the domain of the map) with shape (k², 2)
      • weights: Array of weight vectors for the SOM nodes (corresponding points in the map's range) of shape (k², input dimension)

      Report

      The fields of report(mach) are:

      • classes: the index of the winning node for each instance of the training data X interpreted as a class label

      Examples

      using MLJ
      +som = @load SelfOrganizingMap pkg=SelfOrganizingMaps
      +model = som()
      +X, y = make_regression(50, 3) ## synthetic data
      +mach = machine(model, X) |> fit!
      +X̃ = transform(mach, X)
      +
      +rpt = report(mach)
      +classes = rpt.classes
      diff --git a/v0.20.3/models/SimpleImputer_BetaML/index.html b/v0.20.3/models/SimpleImputer_BetaML/index.html new file mode 100644 index 000000000..fdd02fec4 --- /dev/null +++ b/v0.20.3/models/SimpleImputer_BetaML/index.html @@ -0,0 +1,29 @@ + +SimpleImputer · MLJ

      SimpleImputer

      mutable struct SimpleImputer <: MLJModelInterface.Unsupervised

      Impute missing values using feature (column) mean, with optional record normalisation (using l-norm norms), from the Beta Machine Learning Toolkit (BetaML).

      Hyperparameters:

      • statistic::Function: The descriptive statistic of the column (feature) to use as imputed value [def: mean]
      • norm::Union{Nothing, Int64}: Normalise the feature mean by l-norm norm of the records [default: nothing]. Use it (e.g. norm=1 to use the l-1 norm) if the records are highly heterogeneus (e.g. quantity exports of different countries).

      Example:

      julia> using MLJ
      +
      +julia> X = [1 10.5;1.5 missing; 1.8 8; 1.7 15; 3.2 40; missing missing; 3.3 38; missing -2.3; 5.2 -2.4] |> table ;
      +
      +julia> modelType   = @load SimpleImputer  pkg = "BetaML" verbosity=0
      +BetaML.Imputation.SimpleImputer
      +
      +julia> model     = modelType(norm=1)
      +SimpleImputer(
      +  statistic = Statistics.mean, 
      +  norm = 1)
      +
      +julia> mach      = machine(model, X);
      +
      +julia> fit!(mach);
      +[ Info: Training machine(SimpleImputer(statistic = mean, …), …).
      +
      +julia> X_full       = transform(mach) |> MLJ.matrix
      +9×2 Matrix{Float64}:
      + 1.0        10.5
      + 1.5         0.295466
      + 1.8         8.0
      + 1.7        15.0
      + 3.2        40.0
      + 0.280952    1.69524
      + 3.3        38.0
      + 0.0750839  -2.3
      + 5.2        -2.4
      diff --git a/v0.20.3/models/SpectralClustering_MLJScikitLearnInterface/index.html b/v0.20.3/models/SpectralClustering_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..d4a70757e --- /dev/null +++ b/v0.20.3/models/SpectralClustering_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +SpectralClustering · MLJ

      SpectralClustering

      SpectralClustering

      A model type for constructing a spectral clustering, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      SpectralClustering = @load SpectralClustering pkg=MLJScikitLearnInterface

      Do model = SpectralClustering() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in SpectralClustering(n_clusters=...).

      Apply clustering to a projection of the normalized Laplacian. In practice spectral clustering is very useful when the structure of the individual clusters is highly non-convex or more generally when a measure of the center and spread of the cluster is not a suitable description of the complete cluster. For instance when clusters are nested circles on the 2D plane.

      diff --git a/v0.20.3/models/StableForestClassifier_SIRUS/index.html b/v0.20.3/models/StableForestClassifier_SIRUS/index.html new file mode 100644 index 000000000..2d02b3280 --- /dev/null +++ b/v0.20.3/models/StableForestClassifier_SIRUS/index.html @@ -0,0 +1,2 @@ + +StableForestClassifier · MLJ

      StableForestClassifier

      StableForestClassifier

      A model type for constructing a stable forest classifier, based on SIRUS.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      StableForestClassifier = @load StableForestClassifier pkg=SIRUS

      Do model = StableForestClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in StableForestClassifier(rng=...).

      StableForestClassifier implements the random forest classifier with a stabilized forest structure (Bénard et al., 2021). This stabilization increases stability when extracting rules. The impact on the predictive accuracy compared to standard random forests should be relatively small.

      Note

      Just like normal random forests, this model is not easily explainable. If you are interested in an explainable model, use the StableRulesClassifier or StableRulesRegressor.

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, X, y)

      where

      • X: any table of input features (eg, a DataFrame) whose columns each have one of the following element scitypes: Continuous, Count, or <:OrderedFactor; check column scitypes with schema(X)
      • y: the target, which can be any AbstractVector whose element scitype is <:OrderedFactor or <:Multiclass; check the scitype with scitype(y)

      Train the machine with fit!(mach, rows=...).

      Hyperparameters

      • rng::AbstractRNG=default_rng(): Random number generator. Using a StableRNG from StableRNGs.jl is advised.
      • partial_sampling::Float64=0.7: Ratio of samples to use in each subset of the data. The default should be fine for most cases.
      • n_trees::Int=1000: The number of trees to use. It is advisable to use at least thousand trees to for a better rule selection, and in turn better predictive performance.
      • max_depth::Int=2: The depth of the tree. A lower depth decreases model complexity and can therefore improve accuracy when the sample size is small (reduce overfitting).
      • q::Int=10: Number of cutpoints to use per feature. The default value should be fine for most situations.
      • min_data_in_leaf::Int=5: Minimum number of data points per leaf.

      Fitted parameters

      The fields of fitted_params(mach) are:

      • fitresult: A StableForest object.

      Operations

      • predict(mach, Xnew): Return a vector of predictions for each row of Xnew.
      diff --git a/v0.20.3/models/StableForestRegressor_SIRUS/index.html b/v0.20.3/models/StableForestRegressor_SIRUS/index.html new file mode 100644 index 000000000..740a36248 --- /dev/null +++ b/v0.20.3/models/StableForestRegressor_SIRUS/index.html @@ -0,0 +1,2 @@ + +StableForestRegressor · MLJ

      StableForestRegressor

      StableForestRegressor

      A model type for constructing a stable forest regressor, based on SIRUS.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      StableForestRegressor = @load StableForestRegressor pkg=SIRUS

      Do model = StableForestRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in StableForestRegressor(rng=...).

      StableForestRegressor implements the random forest regressor with a stabilized forest structure (Bénard et al., 2021).

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, X, y)

      where

      • X: any table of input features (eg, a DataFrame) whose columns each have one of the following element scitypes: Continuous, Count, or <:OrderedFactor; check column scitypes with schema(X)
      • y: the target, which can be any AbstractVector whose element scitype is <:OrderedFactor or <:Multiclass; check the scitype with scitype(y)

      Train the machine with fit!(mach, rows=...).

      Hyperparameters

      • rng::AbstractRNG=default_rng(): Random number generator. Using a StableRNG from StableRNGs.jl is advised.
      • partial_sampling::Float64=0.7: Ratio of samples to use in each subset of the data. The default should be fine for most cases.
      • n_trees::Int=1000: The number of trees to use. It is advisable to use at least thousand trees to for a better rule selection, and in turn better predictive performance.
      • max_depth::Int=2: The depth of the tree. A lower depth decreases model complexity and can therefore improve accuracy when the sample size is small (reduce overfitting).
      • q::Int=10: Number of cutpoints to use per feature. The default value should be fine for most situations.
      • min_data_in_leaf::Int=5: Minimum number of data points per leaf.

      Fitted parameters

      The fields of fitted_params(mach) are:

      • fitresult: A StableForest object.

      Operations

      • predict(mach, Xnew): Return a vector of predictions for each row of Xnew.
      diff --git a/v0.20.3/models/StableRulesClassifier_SIRUS/index.html b/v0.20.3/models/StableRulesClassifier_SIRUS/index.html new file mode 100644 index 000000000..984e3f0b1 --- /dev/null +++ b/v0.20.3/models/StableRulesClassifier_SIRUS/index.html @@ -0,0 +1,2 @@ + +StableRulesClassifier · MLJ

      StableRulesClassifier

      StableRulesClassifier

      A model type for constructing a stable rules classifier, based on SIRUS.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      StableRulesClassifier = @load StableRulesClassifier pkg=SIRUS

      Do model = StableRulesClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in StableRulesClassifier(rng=...).

      StableRulesClassifier implements the explainable rule-based model based on a random forest.

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, X, y)

      where

      • X: any table of input features (eg, a DataFrame) whose columns each have one of the following element scitypes: Continuous, Count, or <:OrderedFactor; check column scitypes with schema(X)
      • y: the target, which can be any AbstractVector whose element scitype is <:OrderedFactor or <:Multiclass; check the scitype with scitype(y)

      Train the machine with fit!(mach, rows=...).

      Hyperparameters

      • rng::AbstractRNG=default_rng(): Random number generator. Using a StableRNG from StableRNGs.jl is advised.
      • partial_sampling::Float64=0.7: Ratio of samples to use in each subset of the data. The default should be fine for most cases.
      • n_trees::Int=1000: The number of trees to use. It is advisable to use at least thousand trees to for a better rule selection, and in turn better predictive performance.
      • max_depth::Int=2: The depth of the tree. A lower depth decreases model complexity and can therefore improve accuracy when the sample size is small (reduce overfitting).
      • q::Int=10: Number of cutpoints to use per feature. The default value should be fine for most situations.
      • min_data_in_leaf::Int=5: Minimum number of data points per leaf.
      • max_rules::Int=10: This is the most important hyperparameter after lambda. The more rules, the more accurate the model should be. If this is not the case, tune lambda first. However, more rules will also decrease model interpretability. So, it is important to find a good balance here. In most cases, 10 to 40 rules should provide reasonable accuracy while remaining interpretable.
      • lambda::Float64=1.0: The weights of the final rules are determined via a regularized regression over each rule as a binary feature. This hyperparameter specifies the strength of the ridge (L2) regularizer. SIRUS is very sensitive to the choice of this hyperparameter. Ensure that you try the full range from 10^-4 to 10^4 (e.g., 0.001, 0.01, ..., 100). When trying the range, one good check is to verify that an increase in max_rules increases performance. If this is not the case, then try a different value for lambda.

      Fitted parameters

      The fields of fitted_params(mach) are:

      • fitresult: A StableRules object.

      Operations

      • predict(mach, Xnew): Return a vector of predictions for each row of Xnew.
      diff --git a/v0.20.3/models/StableRulesRegressor_SIRUS/index.html b/v0.20.3/models/StableRulesRegressor_SIRUS/index.html new file mode 100644 index 000000000..7b97c9694 --- /dev/null +++ b/v0.20.3/models/StableRulesRegressor_SIRUS/index.html @@ -0,0 +1,2 @@ + +StableRulesRegressor · MLJ

      StableRulesRegressor

      StableRulesRegressor

      A model type for constructing a stable rules regressor, based on SIRUS.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      StableRulesRegressor = @load StableRulesRegressor pkg=SIRUS

      Do model = StableRulesRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in StableRulesRegressor(rng=...).

      StableRulesRegressor implements the explainable rule-based regression model based on a random forest.

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, X, y)

      where

      • X: any table of input features (eg, a DataFrame) whose columns each have one of the following element scitypes: Continuous, Count, or <:OrderedFactor; check column scitypes with schema(X)
      • y: the target, which can be any AbstractVector whose element scitype is <:OrderedFactor or <:Multiclass; check the scitype with scitype(y)

      Train the machine with fit!(mach, rows=...).

      Hyperparameters

      • rng::AbstractRNG=default_rng(): Random number generator. Using a StableRNG from StableRNGs.jl is advised.
      • partial_sampling::Float64=0.7: Ratio of samples to use in each subset of the data. The default should be fine for most cases.
      • n_trees::Int=1000: The number of trees to use. It is advisable to use at least thousand trees to for a better rule selection, and in turn better predictive performance.
      • max_depth::Int=2: The depth of the tree. A lower depth decreases model complexity and can therefore improve accuracy when the sample size is small (reduce overfitting).
      • q::Int=10: Number of cutpoints to use per feature. The default value should be fine for most situations.
      • min_data_in_leaf::Int=5: Minimum number of data points per leaf.
      • max_rules::Int=10: This is the most important hyperparameter after lambda. The more rules, the more accurate the model should be. If this is not the case, tune lambda first. However, more rules will also decrease model interpretability. So, it is important to find a good balance here. In most cases, 10 to 40 rules should provide reasonable accuracy while remaining interpretable.
      • lambda::Float64=1.0: The weights of the final rules are determined via a regularized regression over each rule as a binary feature. This hyperparameter specifies the strength of the ridge (L2) regularizer. SIRUS is very sensitive to the choice of this hyperparameter. Ensure that you try the full range from 10^-4 to 10^4 (e.g., 0.001, 0.01, ..., 100). When trying the range, one good check is to verify that an increase in max_rules increases performance. If this is not the case, then try a different value for lambda.

      Fitted parameters

      The fields of fitted_params(mach) are:

      • fitresult: A StableRules object.

      Operations

      • predict(mach, Xnew): Return a vector of predictions for each row of Xnew.
      diff --git a/v0.20.3/models/Standardizer_MLJModels/index.html b/v0.20.3/models/Standardizer_MLJModels/index.html new file mode 100644 index 000000000..fc4ade88a --- /dev/null +++ b/v0.20.3/models/Standardizer_MLJModels/index.html @@ -0,0 +1,37 @@ + +Standardizer · MLJ

      Standardizer

      Standardizer

      A model type for constructing a standardizer, based on MLJModels.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      Standardizer = @load Standardizer pkg=MLJModels

      Do model = Standardizer() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in Standardizer(features=...).

      Use this model to standardize (whiten) a Continuous vector, or relevant columns of a table. The rescalings applied by this transformer to new data are always those learned during the training phase, which are generally different from what would actually standardize the new data.

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, X)

      where

      • X: any Tables.jl compatible table or any abstract vector with Continuous element scitype (any abstract float vector). Only features in a table with Continuous scitype can be standardized; check column scitypes with schema(X).

      Train the machine using fit!(mach, rows=...).

      Hyper-parameters

      • features: one of the following, with the behavior indicated below:

        • [] (empty, the default): standardize all features (columns) having Continuous element scitype
        • non-empty vector of feature names (symbols): standardize only the Continuous features in the vector (if ignore=false) or Continuous features not named in the vector (ignore=true).
        • function or other callable: standardize a feature if the callable returns true on its name. For example, Standardizer(features = name -> name in [:x1, :x3], ignore = true, count=true) has the same effect as Standardizer(features = [:x1, :x3], ignore = true, count=true), namely to standardize all Continuous and Count features, with the exception of :x1 and :x3.

        Note this behavior is further modified if the ordered_factor or count flags are set to true; see below

      • ignore=false: whether to ignore or standardize specified features, as explained above

      • ordered_factor=false: if true, standardize any OrderedFactor feature wherever a Continuous feature would be standardized, as described above

      • count=false: if true, standardize any Count feature wherever a Continuous feature would be standardized, as described above

      Operations

      • transform(mach, Xnew): return Xnew with relevant features standardized according to the rescalings learned during fitting of mach.
      • inverse_transform(mach, Z): apply the inverse transformation to Z, so that inverse_transform(mach, transform(mach, Xnew)) is approximately the same as Xnew; unavailable if ordered_factor or count flags were set to true.

      Fitted parameters

      The fields of fitted_params(mach) are:

      • features_fit - the names of features that will be standardized
      • means - the corresponding untransformed mean values
      • stds - the corresponding untransformed standard deviations

      Report

      The fields of report(mach) are:

      • features_fit: the names of features that will be standardized

      Examples

      using MLJ
      +
      +X = (ordinal1 = [1, 2, 3],
      +     ordinal2 = coerce([:x, :y, :x], OrderedFactor),
      +     ordinal3 = [10.0, 20.0, 30.0],
      +     ordinal4 = [-20.0, -30.0, -40.0],
      +     nominal = coerce(["Your father", "he", "is"], Multiclass));
      +
      +julia> schema(X)
      +┌──────────┬──────────────────┐
      +│ names    │ scitypes         │
      +├──────────┼──────────────────┤
      +│ ordinal1 │ Count            │
      +│ ordinal2 │ OrderedFactor{2} │
      +│ ordinal3 │ Continuous       │
      +│ ordinal4 │ Continuous       │
      +│ nominal  │ Multiclass{3}    │
      +└──────────┴──────────────────┘
      +
      +stand1 = Standardizer();
      +
      +julia> transform(fit!(machine(stand1, X)), X)
      +(ordinal1 = [1, 2, 3],
      + ordinal2 = CategoricalValue{Symbol,UInt32}[:x, :y, :x],
      + ordinal3 = [-1.0, 0.0, 1.0],
      + ordinal4 = [1.0, 0.0, -1.0],
      + nominal = CategoricalValue{String,UInt32}["Your father", "he", "is"],)
      +
      +stand2 = Standardizer(features=[:ordinal3, ], ignore=true, count=true);
      +
      +julia> transform(fit!(machine(stand2, X)), X)
      +(ordinal1 = [-1.0, 0.0, 1.0],
      + ordinal2 = CategoricalValue{Symbol,UInt32}[:x, :y, :x],
      + ordinal3 = [10.0, 20.0, 30.0],
      + ordinal4 = [1.0, 0.0, -1.0],
      + nominal = CategoricalValue{String,UInt32}["Your father", "he", "is"],)

      See also OneHotEncoder, ContinuousEncoder.

      diff --git a/v0.20.3/models/SubspaceLDA_MultivariateStats/index.html b/v0.20.3/models/SubspaceLDA_MultivariateStats/index.html new file mode 100644 index 000000000..c37c3a009 --- /dev/null +++ b/v0.20.3/models/SubspaceLDA_MultivariateStats/index.html @@ -0,0 +1,13 @@ + +SubspaceLDA · MLJ

      SubspaceLDA

      SubspaceLDA

      A model type for constructing a subpace LDA model, based on MultivariateStats.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      SubspaceLDA = @load SubspaceLDA pkg=MultivariateStats

      Do model = SubspaceLDA() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in SubspaceLDA(normalize=...).

      Multiclass subspace linear discriminant analysis (LDA) is a variation on ordinary LDA suitable for high dimensional data, as it avoids storing scatter matrices. For details, refer the MultivariateStats.jl documentation.

      In addition to dimension reduction (using transform) probabilistic classification is provided (using predict). In the case of classification, the class probability for a new observation reflects the proximity of that observation to training observations associated with that class, and how far away the observation is from observations associated with other classes. Specifically, the distances, in the transformed (projected) space, of a new observation, from the centroid of each target class, is computed; the resulting vector of distances, multiplied by minus one, is passed to a softmax function to obtain a class probability prediction. Here "distance" is computed using a user-specified distance function.

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, X, y)

      Here:

      • X is any table of input features (eg, a DataFrame) whose columns are of scitype Continuous; check column scitypes with schema(X).
      • y is the target, which can be any AbstractVector whose element scitype is OrderedFactor or Multiclass; check the scitype with scitype(y).

      Train the machine using fit!(mach, rows=...).

      Hyper-parameters

      • normalize=true: Option to normalize the between class variance for the number of observations in each class, one of true or false.
      • outdim: the ouput dimension, automatically set to min(indim, nclasses-1) if equal to 0. If a non-zero outdim is passed, then the actual output dimension used is min(rank, outdim) where rank is the rank of the within-class covariance matrix.
      • dist=Distances.SqEuclidean(): The distance metric to use when performing classification (to compare the distance between a new point and centroids in the transformed space); must be a subtype of Distances.SemiMetric from Distances.jl, e.g., Distances.CosineDist.

      Operations

      • transform(mach, Xnew): Return a lower dimensional projection of the input Xnew, which should have the same scitype as X above.
      • predict(mach, Xnew): Return predictions of the target given features Xnew, which should have same scitype as X above. Predictions are probabilistic but uncalibrated.
      • predict_mode(mach, Xnew): Return the modes of the probabilistic predictions returned above.

      Fitted parameters

      The fields of fitted_params(mach) are:

      • classes: The classes seen during model fitting.
      • projection_matrix: The learned projection matrix, of size (indim, outdim), where indim and outdim are the input and output dimensions respectively (See Report section below).

      Report

      The fields of report(mach) are:

      • indim: The dimension of the input space i.e the number of training features.
      • outdim: The dimension of the transformed space the model is projected to.
      • mean: The mean of the untransformed training data. A vector of length indim.
      • nclasses: The number of classes directly observed in the training data (which can be less than the total number of classes in the class pool)

      class_means: The class-specific means of the training data. A matrix of size (indim, nclasses) with the ith column being the class-mean of the ith class in classes (See fitted params section above).

      • class_weights: The weights (class counts) of each class. A vector of length nclasses with the ith element being the class weight of the ith class in classes. (See fitted params section above.)
      • explained_variance_ratio: The ratio of explained variance to total variance. Each dimension corresponds to an eigenvalue.

      Examples

      using MLJ
      +
      +SubspaceLDA = @load SubspaceLDA pkg=MultivariateStats
      +
      +X, y = @load_iris ## a table and a vector
      +
      +model = SubspaceLDA()
      +mach = machine(model, X, y) |> fit!
      +
      +Xproj = transform(mach, X)
      +y_hat = predict(mach, X)
      +labels = predict_mode(mach, X)

      See also LDA, BayesianLDA, BayesianSubspaceLDA

      diff --git a/v0.20.3/models/TSVDTransformer_TSVD/index.html b/v0.20.3/models/TSVDTransformer_TSVD/index.html new file mode 100644 index 000000000..395e2171a --- /dev/null +++ b/v0.20.3/models/TSVDTransformer_TSVD/index.html @@ -0,0 +1,2 @@ + +TSVDTransformer · MLJ diff --git a/v0.20.3/models/TfidfTransformer_MLJText/index.html b/v0.20.3/models/TfidfTransformer_MLJText/index.html new file mode 100644 index 000000000..776335e40 --- /dev/null +++ b/v0.20.3/models/TfidfTransformer_MLJText/index.html @@ -0,0 +1,46 @@ + +TfidfTransformer · MLJ

      TfidfTransformer

      TfidfTransformer

      A model type for constructing a TF-IFD transformer, based on MLJText.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      TfidfTransformer = @load TfidfTransformer pkg=MLJText

      Do model = TfidfTransformer() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in TfidfTransformer(max_doc_freq=...).

      The transformer converts a collection of documents, tokenized or pre-parsed as bags of words/ngrams, to a matrix of TF-IDF scores. Here "TF" means term-frequency while "IDF" means inverse document frequency (defined below). The TF-IDF score is the product of the two. This is a common term weighting scheme in information retrieval, that has also found good use in document classification. The goal of using TF-IDF instead of the raw frequencies of occurrence of a token in a given document is to scale down the impact of tokens that occur very frequently in a given corpus and that are hence empirically less informative than features that occur in a small fraction of the training corpus.

      In textbooks and implementations there is variation in the definition of IDF. Here two IDF definitions are available. The default, smoothed option provides the IDF for a term t as log((1 + n)/(1 + df(t))) + 1, where n is the total number of documents and df(t) the number of documents in which t appears. Setting smooth_df = false provides an IDF of log(n/df(t)) + 1.

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, X)

      Here:

      • X is any vector whose elements are either tokenized documents or bags of words/ngrams. Specifically, each element is one of the following:

        • A vector of abstract strings (tokens), e.g., ["I", "like", "Sam", ".", "Sam", "is", "nice", "."] (scitype AbstractVector{Textual})
        • A dictionary of counts, indexed on abstract strings, e.g., Dict("I"=>1, "Sam"=>2, "Sam is"=>1) (scitype Multiset{Textual}})
        • A dictionary of counts, indexed on plain ngrams, e.g., Dict(("I",)=>1, ("Sam",)=>2, ("I", "Sam")=>1) (scitype Multiset{<:NTuple{N,Textual} where N}); here a plain ngram is a tuple of abstract strings.

      Train the machine using fit!(mach, rows=...).

      Hyper-parameters

      • max_doc_freq=1.0: Restricts the vocabulary that the transformer will consider. Terms that occur in > max_doc_freq documents will not be considered by the transformer. For example, if max_doc_freq is set to 0.9, terms that are in more than 90% of the documents will be removed.
      • min_doc_freq=0.0: Restricts the vocabulary that the transformer will consider. Terms that occur in < max_doc_freq documents will not be considered by the transformer. A value of 0.01 means that only terms that are at least in 1% of the documents will be included.
      • smooth_idf=true: Control which definition of IDF to use (see above).

      Operations

      • transform(mach, Xnew): Based on the vocabulary and IDF learned in training, return the matrix of TF-IDF scores for Xnew, a vector of the same form as X above. The matrix has size (n, p), where n = length(Xnew) and p the size of the vocabulary. Tokens/ngrams not appearing in the learned vocabulary are scored zero.

      Fitted parameters

      The fields of fitted_params(mach) are:

      • vocab: A vector containing the strings used in the transformer's vocabulary.
      • idf_vector: The transformer's calculated IDF vector.

      Examples

      TfidfTransformer accepts a variety of inputs. The example below transforms tokenized documents:

      using MLJ
      +import TextAnalysis
      +
      +TfidfTransformer = @load TfidfTransformer pkg=MLJText
      +
      +docs = ["Hi my name is Sam.", "How are you today?"]
      +tfidf_transformer = TfidfTransformer()
      +
      +julia> tokenized_docs = TextAnalysis.tokenize.(docs)
      +2-element Vector{Vector{String}}:
      + ["Hi", "my", "name", "is", "Sam", "."]
      + ["How", "are", "you", "today", "?"]
      +
      +mach = machine(tfidf_transformer, tokenized_docs)
      +fit!(mach)
      +
      +fitted_params(mach)
      +
      +tfidf_mat = transform(mach, tokenized_docs)

      Alternatively, one can provide documents pre-parsed as ngrams counts:

      using MLJ
      +import TextAnalysis
      +
      +docs = ["Hi my name is Sam.", "How are you today?"]
      +corpus = TextAnalysis.Corpus(TextAnalysis.NGramDocument.(docs, 1, 2))
      +ngram_docs = TextAnalysis.ngrams.(corpus)
      +
      +julia> ngram_docs[1]
      +Dict{AbstractString, Int64} with 11 entries:
      +  "is"      => 1
      +  "my"      => 1
      +  "name"    => 1
      +  "."       => 1
      +  "Hi"      => 1
      +  "Sam"     => 1
      +  "my name" => 1
      +  "Hi my"   => 1
      +  "name is" => 1
      +  "Sam ."   => 1
      +  "is Sam"  => 1
      +
      +tfidf_transformer = TfidfTransformer()
      +mach = machine(tfidf_transformer, ngram_docs)
      +MLJ.fit!(mach)
      +fitted_params(mach)
      +
      +tfidf_mat = transform(mach, ngram_docs)

      See also CountTransformer, BM25Transformer

      diff --git a/v0.20.3/models/TheilSenRegressor_MLJScikitLearnInterface/index.html b/v0.20.3/models/TheilSenRegressor_MLJScikitLearnInterface/index.html new file mode 100644 index 000000000..3a9404f6c --- /dev/null +++ b/v0.20.3/models/TheilSenRegressor_MLJScikitLearnInterface/index.html @@ -0,0 +1,2 @@ + +TheilSenRegressor · MLJ

      TheilSenRegressor

      TheilSenRegressor

      A model type for constructing a Theil-Sen regressor, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      TheilSenRegressor = @load TheilSenRegressor pkg=MLJScikitLearnInterface

      Do model = TheilSenRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in TheilSenRegressor(fit_intercept=...).

      Hyper-parameters

      • fit_intercept = true
      • copy_X = true
      • max_subpopulation = 10000
      • n_subsamples = nothing
      • max_iter = 300
      • tol = 0.001
      • random_state = nothing
      • n_jobs = nothing
      • verbose = false
      diff --git a/v0.20.3/models/TomekUndersampler_Imbalance/index.html b/v0.20.3/models/TomekUndersampler_Imbalance/index.html new file mode 100644 index 000000000..d4602ba7d --- /dev/null +++ b/v0.20.3/models/TomekUndersampler_Imbalance/index.html @@ -0,0 +1,28 @@ + +TomekUndersampler · MLJ

      TomekUndersampler

      Initiate a tomek undersampling model with the given hyper-parameters.

      TomekUndersampler

      A model type for constructing a tomek undersampler, based on Imbalance.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      TomekUndersampler = @load TomekUndersampler pkg=Imbalance

      Do model = TomekUndersampler() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in TomekUndersampler(min_ratios=...).

      TomekUndersampler undersamples by removing any point that is part of a tomek link in the data. As defined in, Ivan Tomek. Two modifications of cnn. IEEE Trans. Systems, Man and Cybernetics, 6:769–772, 1976.

      Training data

      In MLJ or MLJBase, wrap the model in a machine by mach = machine(model)

      There is no need to provide any data here because the model is a static transformer.

      Likewise, there is no need to fit!(mach).

      For default values of the hyper-parameters, model can be constructed by model = TomekUndersampler()

      Hyperparameters

      • min_ratios=1.0: A parameter that controls the maximum amount of undersampling to be done for each class. If this algorithm cleans the data to an extent that this is violated, some of the cleaned points will be revived randomly so that it is satisfied.

        • Can be a float and in this case each class will be at most undersampled to the size of the minority class times the float. By default, all classes are undersampled to the size of the minority class
        • Can be a dictionary mapping each class label to the float minimum ratio for that class
      • force_min_ratios=false: If true, and this algorithm cleans the data such that the ratios for each class exceed those specified in min_ratios then further undersampling will be perform so that the final ratios are equal to min_ratios.

      • rng::Union{AbstractRNG, Integer}=default_rng(): Either an AbstractRNG object or an Integer seed to be used with Xoshiro if the Julia VERSION supports it. Otherwise, uses MersenneTwister`.

      • try_preserve_type::Bool=true: When true, the function will try to not change the type of the input table (e.g., DataFrame). However, for some tables, this may not succeed, and in this case, the table returned will be a column table (named-tuple of vectors). This parameter is ignored if the input is a matrix.

      Transform Inputs

      • X: A matrix or table of floats where each row is an observation from the dataset
      • y: An abstract vector of labels (e.g., strings) that correspond to the observations in X

      Transform Outputs

      • X_under: A matrix or table that includes the data after undersampling depending on whether the input X is a matrix or table respectively
      • y_under: An abstract vector of labels corresponding to X_under

      Operations

      • transform(mach, X, y): resample the data X and y using TomekUndersampler, returning both the new and original observations

      Example

      using MLJ
      +import Imbalance
      +
      +## set probability of each class
      +class_probs = [0.5, 0.2, 0.3]                         
      +num_rows, num_continuous_feats = 100, 5
      +## generate a table and categorical vector accordingly
      +X, y = Imbalance.generate_imbalanced_data(num_rows, num_continuous_feats; 
      +                                min_sep=0.01, stds=[3.0 3.0 3.0], class_probs, rng=42)   
      +
      +julia> Imbalance.checkbalance(y; ref="minority")
      +1: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 19 (100.0%) 
      +2: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 33 (173.7%) 
      +0: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 48 (252.6%) 
      +
      +## load TomekUndersampler model type:
      +TomekUndersampler = @load TomekUndersampler pkg=Imbalance
      +
      +## Underample the majority classes to  sizes relative to the minority class:
      +tomek_undersampler = TomekUndersampler(min_ratios=1.0, rng=42)
      +mach = machine(tomek_undersampler)
      +X_under, y_under = transform(mach, X, y)
      +
      +julia> Imbalance.checkbalance(y_under; ref="minority")
      +1: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 19 (100.0%) 
      +2: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 22 (115.8%) 
      +0: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 36 (189.5%)
      diff --git a/v0.20.3/models/UnivariateBoxCoxTransformer_MLJModels/index.html b/v0.20.3/models/UnivariateBoxCoxTransformer_MLJModels/index.html new file mode 100644 index 000000000..909caa1ba --- /dev/null +++ b/v0.20.3/models/UnivariateBoxCoxTransformer_MLJModels/index.html @@ -0,0 +1,41 @@ + +UnivariateBoxCoxTransformer · MLJ

      UnivariateBoxCoxTransformer

      UnivariateBoxCoxTransformer

      A model type for constructing a single variable Box-Cox transformer, based on MLJModels.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      UnivariateBoxCoxTransformer = @load UnivariateBoxCoxTransformer pkg=MLJModels

      Do model = UnivariateBoxCoxTransformer() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in UnivariateBoxCoxTransformer(n=...).

      Box-Cox transformations attempt to make data look more normally distributed. This can improve performance and assist in the interpretation of models which suppose that data is generated by a normal distribution.

      A Box-Cox transformation (with shift) is of the form

      x -> ((x + c)^λ - 1)/λ

      for some constant c and real λ, unless λ = 0, in which case the above is replaced with

      x -> log(x + c)

      Given user-specified hyper-parameters n::Integer and shift::Bool, the present implementation learns the parameters c and λ from the training data as follows: If shift=true and zeros are encountered in the data, then c is set to 0.2 times the data mean. If there are no zeros, then no shift is applied. Finally, n different values of λ between -0.4 and 3 are considered, with λ fixed to the value maximizing normality of the transformed data.

      Reference: Wikipedia entry for power transform.

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, x)

      where

      • x: any abstract vector with element scitype Continuous; check the scitype with scitype(x)

      Train the machine using fit!(mach, rows=...).

      Hyper-parameters

      • n=171: number of values of the exponent λ to try
      • shift=false: whether to include a preliminary constant translation in transformations, in the presence of zeros

      Operations

      • transform(mach, xnew): apply the Box-Cox transformation learned when fitting mach
      • inverse_transform(mach, z): reconstruct the vector z whose transformation learned by mach is z

      Fitted parameters

      The fields of fitted_params(mach) are:

      • λ: the learned Box-Cox exponent
      • c: the learned shift

      Examples

      using MLJ
      +using UnicodePlots
      +using Random
      +Random.seed!(123)
      +
      +transf = UnivariateBoxCoxTransformer()
      +
      +x = randn(1000).^2
      +
      +mach = machine(transf, x)
      +fit!(mach)
      +
      +z = transform(mach, x)
      +
      +julia> histogram(x)
      +                ┌                                        ┐
      +   [ 0.0,  2.0) ┤███████████████████████████████████  848
      +   [ 2.0,  4.0) ┤████▌ 109
      +   [ 4.0,  6.0) ┤█▍ 33
      +   [ 6.0,  8.0) ┤▍ 7
      +   [ 8.0, 10.0) ┤▏ 2
      +   [10.0, 12.0) ┤  0
      +   [12.0, 14.0) ┤▏ 1
      +                └                                        ┘
      +                                 Frequency
      +
      +julia> histogram(z)
      +                ┌                                        ┐
      +   [-5.0, -4.0) ┤█▎ 8
      +   [-4.0, -3.0) ┤████████▊ 64
      +   [-3.0, -2.0) ┤█████████████████████▊ 159
      +   [-2.0, -1.0) ┤█████████████████████████████▊ 216
      +   [-1.0,  0.0) ┤███████████████████████████████████  254
      +   [ 0.0,  1.0) ┤█████████████████████████▊ 188
      +   [ 1.0,  2.0) ┤████████████▍ 90
      +   [ 2.0,  3.0) ┤██▊ 20
      +   [ 3.0,  4.0) ┤▎ 1
      +                └                                        ┘
      +                                 Frequency
      +
      diff --git a/v0.20.3/models/UnivariateDiscretizer_MLJModels/index.html b/v0.20.3/models/UnivariateDiscretizer_MLJModels/index.html new file mode 100644 index 000000000..605cde68d --- /dev/null +++ b/v0.20.3/models/UnivariateDiscretizer_MLJModels/index.html @@ -0,0 +1,33 @@ + +UnivariateDiscretizer · MLJ

      UnivariateDiscretizer

      UnivariateDiscretizer

      A model type for constructing a single variable discretizer, based on MLJModels.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      UnivariateDiscretizer = @load UnivariateDiscretizer pkg=MLJModels

      Do model = UnivariateDiscretizer() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in UnivariateDiscretizer(n_classes=...).

      Discretization converts a Continuous vector into an OrderedFactor vector. In particular, the output is a CategoricalVector (whose reference type is optimized).

      The transformation is chosen so that the vector on which the transformer is fit has, in transformed form, an approximately uniform distribution of values. Specifically, if n_classes is the level of discretization, then 2*n_classes - 1 ordered quantiles are computed, the odd quantiles being used for transforming (discretization) and the even quantiles for inverse transforming.

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, x)

      where

      • x: any abstract vector with Continuous element scitype; check scitype with scitype(x).

      Train the machine using fit!(mach, rows=...).

      Hyper-parameters

      • n_classes: number of discrete classes in the output

      Operations

      • transform(mach, xnew): discretize xnew according to the discretization learned when fitting mach
      • inverse_transform(mach, z): attempt to reconstruct from z a vector that transforms to give z

      Fitted parameters

      The fields of fitted_params(mach).fitesult include:

      • odd_quantiles: quantiles used for transforming (length is n_classes - 1)
      • even_quantiles: quantiles used for inverse transforming (length is n_classes)

      Example

      using MLJ
      +using Random
      +Random.seed!(123)
      +
      +discretizer = UnivariateDiscretizer(n_classes=100)
      +mach = machine(discretizer, randn(1000))
      +fit!(mach)
      +
      +julia> x = rand(5)
      +5-element Vector{Float64}:
      + 0.8585244609846809
      + 0.37541692370451396
      + 0.6767070590395461
      + 0.9208844241267105
      + 0.7064611415680901
      +
      +julia> z = transform(mach, x)
      +5-element CategoricalArrays.CategoricalArray{UInt8,1,UInt8}:
      + 0x52
      + 0x42
      + 0x4d
      + 0x54
      + 0x4e
      +
      +x_approx = inverse_transform(mach, z)
      +julia> x - x_approx
      +5-element Vector{Float64}:
      + 0.008224506144777322
      + 0.012731354778359405
      + 0.0056265330571125816
      + 0.005738175684445124
      + 0.006835652575801987
      diff --git a/v0.20.3/models/UnivariateFillImputer_MLJModels/index.html b/v0.20.3/models/UnivariateFillImputer_MLJModels/index.html new file mode 100644 index 000000000..420012a54 --- /dev/null +++ b/v0.20.3/models/UnivariateFillImputer_MLJModels/index.html @@ -0,0 +1,37 @@ + +UnivariateFillImputer · MLJ

      UnivariateFillImputer

      UnivariateFillImputer

      A model type for constructing a single variable fill imputer, based on MLJModels.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      UnivariateFillImputer = @load UnivariateFillImputer pkg=MLJModels

      Do model = UnivariateFillImputer() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in UnivariateFillImputer(continuous_fill=...).

      Use this model to imputing missing values in a vector with a fixed value learned from the non-missing values of training vector.

      For imputing missing values in tabular data, use FillImputer instead.

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, x)

      where

      • x: any abstract vector with element scitype Union{Missing, T} where T is a subtype of Continuous, Multiclass, OrderedFactor or Count; check scitype using scitype(x)

      Train the machine using fit!(mach, rows=...).

      Hyper-parameters

      • continuous_fill: function or other callable to determine value to be imputed in the case of Continuous (abstract float) data; default is to apply median after skipping missing values
      • count_fill: function or other callable to determine value to be imputed in the case of Count (integer) data; default is to apply rounded median after skipping missing values
      • finite_fill: function or other callable to determine value to be imputed in the case of Multiclass or OrderedFactor data (categorical vectors); default is to apply mode after skipping missing values

      Operations

      • transform(mach, xnew): return xnew with missing values imputed with the fill values learned when fitting mach

      Fitted parameters

      The fields of fitted_params(mach) are:

      • filler: the fill value to be imputed in all new data

      Examples

      using MLJ
      +imputer = UnivariateFillImputer()
      +
      +x_continuous = [1.0, 2.0, missing, 3.0]
      +x_multiclass = coerce(["y", "n", "y", missing, "y"], Multiclass)
      +x_count = [1, 1, 1, 2, missing, 3, 3]
      +
      +mach = machine(imputer, x_continuous)
      +fit!(mach)
      +
      +julia> fitted_params(mach)
      +(filler = 2.0,)
      +
      +julia> transform(mach, [missing, missing, 101.0])
      +3-element Vector{Float64}:
      + 2.0
      + 2.0
      + 101.0
      +
      +mach2 = machine(imputer, x_multiclass) |> fit!
      +
      +julia> transform(mach2, x_multiclass)
      +5-element CategoricalArray{String,1,UInt32}:
      + "y"
      + "n"
      + "y"
      + "y"
      + "y"
      +
      +mach3 = machine(imputer, x_count) |> fit!
      +
      +julia> transform(mach3, [missing, missing, 5])
      +3-element Vector{Int64}:
      + 2
      + 2
      + 5

      For imputing tabular data, use FillImputer.

      diff --git a/v0.20.3/models/UnivariateStandardizer_MLJModels/index.html b/v0.20.3/models/UnivariateStandardizer_MLJModels/index.html new file mode 100644 index 000000000..da5843a36 --- /dev/null +++ b/v0.20.3/models/UnivariateStandardizer_MLJModels/index.html @@ -0,0 +1,2 @@ + +UnivariateStandardizer · MLJ diff --git a/v0.20.3/models/UnivariateTimeTypeToContinuous_MLJModels/index.html b/v0.20.3/models/UnivariateTimeTypeToContinuous_MLJModels/index.html new file mode 100644 index 000000000..b661769ab --- /dev/null +++ b/v0.20.3/models/UnivariateTimeTypeToContinuous_MLJModels/index.html @@ -0,0 +1,18 @@ + +UnivariateTimeTypeToContinuous · MLJ

      UnivariateTimeTypeToContinuous

      UnivariateTimeTypeToContinuous

      A model type for constructing a single variable transformer that creates continuous representations of temporally typed data, based on MLJModels.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      UnivariateTimeTypeToContinuous = @load UnivariateTimeTypeToContinuous pkg=MLJModels

      Do model = UnivariateTimeTypeToContinuous() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in UnivariateTimeTypeToContinuous(zero_time=...).

      Use this model to convert vectors with a TimeType element type to vectors of Float64 type (Continuous element scitype).

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, x)

      where

      • x: any abstract vector whose element type is a subtype of Dates.TimeType

      Train the machine using fit!(mach, rows=...).

      Hyper-parameters

      • zero_time: the time that is to correspond to 0.0 under transformations, with the type coinciding with the training data element type. If unspecified, the earliest time encountered in training is used.
      • step::Period=Hour(24): time interval to correspond to one unit under transformation

      Operations

      • transform(mach, xnew): apply the encoding inferred when mach was fit

      Fitted parameters

      fitted_params(mach).fitresult is the tuple (zero_time, step) actually used in transformations, which may differ from the user-specified hyper-parameters.

      Example

      using MLJ
      +using Dates
      +
      +x = [Date(2001, 1, 1) + Day(i) for i in 0:4]
      +
      +encoder = UnivariateTimeTypeToContinuous(zero_time=Date(2000, 1, 1),
      +                                         step=Week(1))
      +
      +mach = machine(encoder, x)
      +fit!(mach)
      +julia> transform(mach, x)
      +5-element Vector{Float64}:
      + 52.285714285714285
      + 52.42857142857143
      + 52.57142857142857
      + 52.714285714285715
      + 52.857142
      diff --git a/v0.20.3/models/XGBoostClassifier_XGBoost/index.html b/v0.20.3/models/XGBoostClassifier_XGBoost/index.html new file mode 100644 index 000000000..f437f3682 --- /dev/null +++ b/v0.20.3/models/XGBoostClassifier_XGBoost/index.html @@ -0,0 +1,2 @@ + +XGBoostClassifier · MLJ

      XGBoostClassifier

      XGBoostClassifier

      A model type for constructing a eXtreme Gradient Boosting Classifier, based on XGBoost.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      XGBoostClassifier = @load XGBoostClassifier pkg=XGBoost

      Do model = XGBoostClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in XGBoostClassifier(test=...).

      Univariate classification using xgboost.

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      m = machine(model, X, y)

      where

      • X: any table of input features, either an AbstractMatrix or Tables.jl-compatible table.
      • y: is an AbstractVector Finite target.

      Train using fit!(m, rows=...).

      Hyper-parameters

      See https://xgboost.readthedocs.io/en/stable/parameter.html.

      diff --git a/v0.20.3/models/XGBoostCount_XGBoost/index.html b/v0.20.3/models/XGBoostCount_XGBoost/index.html new file mode 100644 index 000000000..7752b7368 --- /dev/null +++ b/v0.20.3/models/XGBoostCount_XGBoost/index.html @@ -0,0 +1,2 @@ + +XGBoostCount · MLJ

      XGBoostCount

      XGBoostCount

      A model type for constructing a eXtreme Gradient Boosting Count Regressor, based on XGBoost.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      XGBoostCount = @load XGBoostCount pkg=XGBoost

      Do model = XGBoostCount() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in XGBoostCount(test=...).

      Univariate discrete regression using xgboost.

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      m = machine(model, X, y)

      where

      • X: any table of input features, either an AbstractMatrix or Tables.jl-compatible table.
      • y: is an AbstractVector continuous target.

      Train using fit!(m, rows=...).

      Hyper-parameters

      See https://xgboost.readthedocs.io/en/stable/parameter.html.

      diff --git a/v0.20.3/models/XGBoostRegressor_XGBoost/index.html b/v0.20.3/models/XGBoostRegressor_XGBoost/index.html new file mode 100644 index 000000000..8b4e44024 --- /dev/null +++ b/v0.20.3/models/XGBoostRegressor_XGBoost/index.html @@ -0,0 +1,2 @@ + +XGBoostRegressor · MLJ

      XGBoostRegressor

      XGBoostRegressor

      A model type for constructing a eXtreme Gradient Boosting Regressor, based on XGBoost.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      XGBoostRegressor = @load XGBoostRegressor pkg=XGBoost

      Do model = XGBoostRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in XGBoostRegressor(test=...).

      Univariate continuous regression using xgboost.

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      m = machine(model, X, y)

      where

      • X: any table of input features whose columns have Continuous element scitype; check column scitypes with schema(X).
      • y: is an AbstractVector target with Continuous elements; check the scitype with scitype(y).

      Train using fit!(m, rows=...).

      Hyper-parameters

      See https://xgboost.readthedocs.io/en/stable/parameter.html.

      diff --git a/v0.20.3/models/dummy_file b/v0.20.3/models/dummy_file new file mode 100644 index 000000000..eecca4f8a --- /dev/null +++ b/v0.20.3/models/dummy_file @@ -0,0 +1 @@ +This file exists so that the directory that contains it exists in clones of the original repo. In that way files can be written to it CI. \ No newline at end of file diff --git a/v0.20.3/modifying_behavior/index.html b/v0.20.3/modifying_behavior/index.html new file mode 100644 index 000000000..f038363ba --- /dev/null +++ b/v0.20.3/modifying_behavior/index.html @@ -0,0 +1,4 @@ + +Modifying Behavior · MLJ

      Modifying Behavior

      To modify behavior of MLJ you will need to clone the relevant component package (e.g., MLJBase.jl) - or a fork thereof - and modify your local julia environment to use your local clone in place of the official release. For example, you might proceed something like this:

      using Pkg
      +Pkg.activate("my_MLJ_enf", shared=true)
      +Pkg.develop("path/to/my/local/MLJBase")

      To test your local clone, do

      Pkg.test("MLJBase")

      For more on package management, see here.

      diff --git a/v0.20.3/more_on_probabilistic_predictors/index.html b/v0.20.3/more_on_probabilistic_predictors/index.html new file mode 100644 index 000000000..db20b579f --- /dev/null +++ b/v0.20.3/more_on_probabilistic_predictors/index.html @@ -0,0 +1,26 @@ + +More on Probabilistic Predictors · MLJ

      More on Probabilistic Predictors

      Although one can call predict_mode on a probabilistic binary classifier to get deterministic predictions, a more flexible strategy is to wrap the model using BinaryThresholdPredictor, as this allows the user to specify the threshold probability for predicting a positive class. This wrapping converts a probabilistic classifier into a deterministic one.

      The positive class is always the second class returned when calling levels on the training target y.

      MLJModels.BinaryThresholdPredictorType
      BinaryThresholdPredictor(model; threshold=0.5)

      Wrap the Probabilistic model, model, assumed to support binary classification, as a Deterministic model, by applying the specified threshold to the positive class probability. In addition to conventional supervised classifiers, it can also be applied to outlier detection models that predict normalized scores - in the form of appropriate UnivariateFinite distributions - that is, models that subtype AbstractProbabilisticUnsupervisedDetector or AbstractProbabilisticSupervisedDetector.

      By convention the positive class is the second class returned by levels(y), where y is the target.

      If threshold=0.5 then calling predict on the wrapped model is equivalent to calling predict_mode on the atomic model.

      Example

      Below is an application to the well-known Pima Indian diabetes dataset, including optimization of the threshold parameter, with a high balanced accuracy the objective. The target class distribution is 500 positives to 268 negatives.

      Loading the data:

      using MLJ, Random
      +rng = Xoshiro(123)
      +
      +diabetes = OpenML.load(43582)
      +outcome, X = unpack(diabetes, ==(:Outcome), rng=rng);
      +y = coerce(Int.(outcome), OrderedFactor);

      Choosing a probabilistic classifier:

      EvoTreesClassifier = @load EvoTreesClassifier
      +prob_predictor = EvoTreesClassifier()

      Wrapping in TunedModel to get a deterministic classifier with threshold as a new hyperparameter:

      point_predictor = BinaryThresholdPredictor(prob_predictor, threshold=0.6)
      +Xnew, _ = make_moons(3, rng=rng)
      +mach = machine(point_predictor, X, y) |> fit!
      +predict(mach, X)[1:3] # [0, 0, 0]

      Estimating performance:

      balanced = BalancedAccuracy(adjusted=true)
      +e = evaluate!(mach, resampling=CV(nfolds=6), measures=[balanced, accuracy])
      +e.measurement[1] # 0.405 ± 0.089

      Wrapping in tuning strategy to learn threshold that maximizes balanced accuracy:

      r = range(point_predictor, :threshold, lower=0.1, upper=0.9)
      +tuned_point_predictor = TunedModel(
      +    point_predictor,
      +    tuning=RandomSearch(rng=rng),
      +    resampling=CV(nfolds=6),
      +    range = r,
      +    measure=balanced,
      +    n=30,
      +)
      +mach2 = machine(tuned_point_predictor, X, y) |> fit!
      +optimized_point_predictor = report(mach2).best_model
      +optimized_point_predictor.threshold # 0.260
      +predict(mach2, X)[1:3] # [1, 1, 0]

      Estimating the performance of the auto-thresholding model (nested resampling here):

      e = evaluate!(mach2, resampling=CV(nfolds=6), measure=[balanced, accuracy])
      +e.measurement[1] # 0.477 ± 0.110
      source
      diff --git a/v0.20.3/objects.inv b/v0.20.3/objects.inv new file mode 100644 index 0000000000000000000000000000000000000000..d7ea011b1982731707318a221f7a02621d48f731 GIT binary patch literal 11326 zcmV-EEWy(wAX9K?X>NERX>N99Zgg*Qc_4OWa&u{KZXhxWBOp+6Z)#;@bUGkSOiBtP zAXa5^b7^mGIv_AEGB7SP3L_v?Xk{RBWo=<;Ze(S0Aa78b#rNMXCQiPX<{x4c$}?0YjfK;lHc(wus7q{*{Y#E$=ufNrS581mhHqF zzs7QstGc>UQ4(!)LXjGha-!MV|9;(#2LTWyLFT?B7TMh(L7?B=Nb^ndl~?uV&8ArM z=oRPhifzKS#U{y?tFrh~C8c@Wx^cp<@h3kOKUusg_Vsd`{c2+9izj@BfAZ_Af3Pfx z%RJ4WTwwG13BQuR2<0iMYrIp{aakuD6U(`E!rzJud?7Bdb+O$RITBrdDay}}89c8^ zz<2+I&*>@-&OfQE4DaJuU()({xsL1PsVLKRoGmwT9h)?J0Z)h`J|f=c^X4``gWGcc zD|r9)b9?&5bv647`o*#$-q6eu$Qu5&Cbo4KrdM2(WajWM+s5nXG*7I_QP*HR`4>xG z;%pz+4Z&SfJ{ILR&eztceQ+oIT;G+CaM#>r2IO*^#MQpEpUQXt#H7Hl;|mPu(psc{`1)P#!j4Zp3QVJm;gt36EJOIpDe5-$adtxT4%{C8!sidSit zR&~0DU&$t2*M&XE5G*drGU2do?o0@L2MK|jaaUCS*>>;ICKOlnoWlx?%jGWJC7HWa z-Fq*<54sdYxp6__2g7y#%fhXBjN)YHLZ{9yk!f#$93zcg+p zlJfj5!tlD_v(3g}7Z%16_U%$EfeattnzcT_6PqfB+j6~R_^PTH*d~1cYj}3O&+1gj z^Y=G5&F8p*$%ln-gUa8kD6gB%xWmTHmv)j|;Iu?#re z=ZhUGi6i!TCxYDP&V4p}1c{i|2T+c8@sp6yV_s|oK0bj5oV|_rzzE{}I{k{B+`Z(3@p2KvIFfeH+bEcsT)Ip2KB%* zVgAWYaD~cZ^b8!YT<=#&RK?p}rW9*Eylp@NVQE5&HBU6Sroj~rp{qwf6suo>mqo>6 z6t61Syzn=&*oYX;pX`yeeEm)!AHDu-FZ$8z{}7YI)y>`pB9kbJG98R(dY zD)ntrQ`{mfSFmh;Hf4bfV&Omy5Ca1cmgioq4=s~m7b+t(vMf~Zw=iimA_#V&qQMmn z=2Gq2Z{N3YLEaw{ssdd(xjGAFgF%=$rUww%%D+ExG_X^AEnGn@jEb{Tk8skUBXIutan(u8QhXbV%)<}8*Y7S%rDqY9SY zr<_a7_aTJ%U6qJMV(5?9!e3sBa zu5yiL>h@xDi47zr{U(WIm||$#irpr|BSLa-5*6bBTeHlQEPQG7zzdsH-ey9zr}=3< z^4qIxzwXlkOyACPkDY%-ZwyU!OPob<1y`#YCo0a0T=KU!{S!5T;?d7DuF-hhl50G! zIPPhUCSLaqb3GpY)6y{`#$@F9Ny{M}Bw~wQvKEfC$c*!7l->{&YGxW=1%*s5y6ahcu+b9n2nkJ%a2Rmh~4AE{dn(SYI^7%bvdmfY`wZ5tA0 zae4n?;WwPS;c2pbx7mkW?U^&-^T7>{9HmFn58x30@PYU>z>&(H;P!d31!i<}I#EoI zq@g)~8^zLKbXX(u&0cSK#L;J30h0UK&0;p6=1~d6C8fj$bsY|u+ugn%vYAgYu- z_TWa%d(?}FoDk(U&51J;4~g^I-sC-V6O64~5Iw-`?e+DPMLmp08>xZvEP`<+OBi#o z7sY+C0;6)|%In9WQEU+W_!P(nl_djB=%qz^&@NFFI8fk%0=p1_ibax%Y3^SXozO_$ zxyk16_{BDY$Cv)V2)%OfSg9-QhV&aji8qvZLy6s4Nff<1;vO`p(Mb2XHQZ3b6*7V)_hQg@Y6GxKWCmFlw_{&c&@5ut zF&OAyJ@&Sn8HY*;!3z7i*Q~Ym>?>eM#kh^yk3vGuf-sG6D|TtoxXAWjhm7!kY{v-q z12dJABUJ9_OBwHW!+|b^B^~P$3v4^sbPr{cRMQyj2z+{qHaPOYlng*L7m|u>ZW(Gt z=Q>z?xDsJN=~F{6Ri%>GoZb3mD-^Qry>M@I;7BEZK9j* zfP8h}G$6w~9`uRT@D8xS=f4nDYbkO*&x&g?3-Or^Mnth(Xuss;gt3wJu{Rg z60e2baP^O<^6u((SXPoiFO5Nhk}V_&_VAo2J4+0Mi|ogUa)3}n+1P_L`6}OeF^^u) zO}@baeZ18rodNM{EW5}oVcuemo2VuAI@(ExVH;Lb4S-;XA`rI&ZBv; z7owVs@Elf!WE~XUi!cdmi@IKyYtK|*B~^aTBd+LjO;<7(m#!04>X)D$uT@4Lz<7hYqLd72J z@yTw6Q4}aDC(PgPrZS+rz|(3dE(MZBQmNN254qg%Y#(9%h$Et=Yt0Y&b6yH3R@(TJ|i!VXQwTiPKqa@xfmS8+K}rw+mtR*C`0 znyQs3egZ|XsxgiNKhabWPc_a$H_BinL5#2vK`sfh)3g$zZUKsGo(dDU_Tt!L1<^ew*YcU&b(gN|jx#3CpL&JPaUA8mQ266k$t* zO>|R@q2LNIubJvPzJlV7Bk&z2>v+#Zff{icj7`B@u2E^nP?%qSIF&^dFrXSYVpJ9h zu1PSkto}fpOj#6R@KjxEw7?E4V9H-KDsxcNI2l`cu( zEvb}Am0cdrw%IB(C=ExZ`J4d(rfzWerB!I@9jql3Unr}aM5twNPE>U=h=s%Xf!G8O z*s$BHOH2EBJ>9emHtGzm$tZq{&aI+3J%GP#4(H2TIEO-d{&Bh{5P++60RvRZg6A@c zH_0Zdm5XfH!nY8)LTs{t)}Ab&_D;}AKmwc%82z@LkET6&V5iSp7_xTtnlp!?>NMZO zSCqLP!z{H6UPZpG+v3%}ZCQj9(68t&kam6w=}z8-qNs;UQd0!@ZXGTb4f6ogTM7^x%JVHjb$iX{k)CK&n?EfGVaKk zCze79R{W-6@nqbm$h)f~>hK77gn2Ldqn~Gk11MeK6m$Vaf*ViRNDuNbNCMpOFx*e9 zDh)v-o@KzX>r}RHh^UDHg{;=kagJWKx=~ z+vW)Vh_pG-_9wT6dNx9lJ5$@tqW3R21tcLTjJ{6(*0)wY@JByifKB@a-&XkAB^mZU zFTO-ou|*dpEu>$;u#UdK_~f=&AwR2nM+>5W<0BwPK$v7&atO!~ko+8SDE3}^peCBy zV+at__rSB+CKl?!v}XuMr;X5pmI>*u!)YiX-3#9&q`Uqp(Sszw599+RvgUQlfGEoJ z3AQK}(nfi)Ne21c_mtWo(5SAKBN~%F_yNCB3yQv{@X6m#OL7}VBYo6Na?_23BKm+) zhtTh7MnrabXFDS75u>z1K#s+_^xVUdtLEV-x^G`({XV-C!WS{Q#WwlQy1~{gZ6QRc;Y@zBr z#MS3-@*9n&q7@$>k}pbCkOlj-MM6@<)oLfb(_ZFDlU*M&XgfQ z@=!Dpv0yRQk8DdaA1tUOl#4hu*EJMds3#7v@+YWSR$n>9r{Llkeg!qvnjD)wJ!N3- zi!Wq^^+y@f<7kjI?MGqbJIoArmN7fZ%yh?q`Z=z}424MHnQj??cmk&JE}k{+Ka$MR zu1IrYH}Ex(y;bp6R}YMc5T2P{L7^}x%J?yn^C4yRRc#%5RC}Jmwmdj_XtFP!)5m(8 zeKdqcY(@duY5#+zbDhww)PGS_cLP>3@K!1|(6t7U4zLFJ-~|P4Q;;EArAS?0od;=9 zCY+%Dt4BE5D)2aahsSn2d;5~!~yL)TQ8xt_8`!~2_?iTQ3pn8FjN z0og*uq94m*8^Hz=Y7lIy7yQaG=m6vU1u3E&iE<#y4Wis6%I=fLq)aIJH_rEQ*0NqS z02csu#|P2jJKFgRSG&{Bj4lv@1lJ_GoiJ1V8{a~(*AZiSJ#@Co<;0c_V+dI3Ee5>h z&mGZW#E;;5bRF4Vl+iX7`K_V?o2<~X*z9S^Psz4N8DErKl9h$sWS45P!q9$9Yj`4H z_t6g&G?cxevqK~sU}GIpg#n)85#s}$tr;6aN3Z`HL<-w9YAjB}bB)6#4wpF8xEO|K z^V7ao6MZfD0uX6WnQ&%Dm64SfL73VB=mHv=?}(885R$FwT48?EoNczvC}Y`==Gw|e zrd&#_uaz!gh4=*!-hNC+4rv1>L&r5z#@V>{@$1A#f`!K3rB0J)H_$Z=|a}k=>pnn ziXzneII9YkvhVoG&ErMB``$iagj9UQRT=$e>7=%Q1q%yK3)AJ4+U%7CL}LU{>L+Mm z7OGal9z5!7cNdJ7^a@Nh_cq$>D3bhuHToa`wxdd<5DP_sy2j>Zq9(~msE$-ZHI3j| zIh~4>BnJ~`kmHIR4K0?IZ#~i!qEWUiUO+qa_4@M~cee(#ilK;NT})&GIVPODhR;eV z?a?YLRuem4DjJ+XE)>OUVBkUz7d4wD}5E_E0vNe)Lr^2++C_EyB^b5VII@w$(leU zr4MWq>Cmw1i>~1N31n>CK1F{V zMx*y?pbQl*JZ?3BV=HiGEK8o+BEQCO`=o3a961hMb=f8L^~qA_)BvH+3AJ1Y(+E?aXanU3 z%pi3Y%dPwk++4$Gj)7%4yV@Nz{G!F%Xv4VL6gRa@ z88<0a*EOL%BaU8@8ZISJs!jaC&13FA#EDYU`i^lDIk4oBHjiE)Gkrn#I_-KB)jD{= zKt{b@z>&sc9=kwRy&OWca-QPOAVk?3+lmFk7pD<0mmRjL@mGkzJ|z}gE* z#?E}Ei)VDW#plm==ysi1vel6lpG&{$s6QTbjtrx@hLYhLxIUYdnG}FWjK~0Ev%5=> zgvvebXJAc(c7Wy@nu+uLBI7(in}FvGK;)k7Qvfe9y}Kh{sJ)>zwBQP|&!iDb9C-lI z7y^_=%73jizb~&){LN9n>|sHgE<1ST`_`~8!esG zp_P>NJ=d+becGtmB|yt8g-^-g8ejQbdN-ckbwC{NFxBT3j7^)%1!fP6k7s5=8~*l= z;{zm~VPCf-k}iArVT;Fst*ZgQnHLwtti(paYKkuHG|}W$;JJ$2yb>7*6k33r3a+t4 zUwrX{ztL&*i+aD^jY*mcK{Qk69XN6%G%Rw^kI)8HZiI$|dbOP!xY()dbSOE9gI~g{ z+YmvpPSE$3jqEi&h$ZY82MG06Y*lZ}vb@rzzWSl17Hi+c(;C1IR5m^`q9*+a4GQlV z!wniNvf^-XovnmqwJH|o@Kw-?`JiTyuETHztirrj=aojM<_V`Jx_ic{$c1uGIlWKT z31~p2Ec+7WP{^#|z0u_!XjLjQT#?}p8KdXwLM zkz6J6^i9>y8h1o%J&~1bP*SCM>iBc<)1PSNgEUI=no7%Q^ELusq;s{vWd++6XlNHI zKz96V7A8tXk(!IEJTc89g?MAy(1u6Gu-b`_oTfSGlg57TKQzh*-Y_Niu5!<)5-zt< zb)a8Hm2)K*xy5 zcR5kvnhLwTxtVlwPXHcqcLU6SkZ|uInVs;`W7<9@c;*}&w$`_zL@EaT{Znta^unnR z5%!m>o{P*qaiJ$-Gle=jM>2y0JnkUoxt;OssOHp7KIkYz_O z6qEu{+0I^R9m_J*Gd}9*S3N$lZCqNL@z(pgP!08ZxL_NMrlYya-hW#Iq;PcF4^|7L zJA9iLbTX4*eVKal?gTm_N23$XtMyNOx@n}k^nTMtA@X(xdQ!BYjD#LD@PrbRmF?A?SEt^picO!Avsxuf`HMc1hh zxlxem^E3A5k>VE@4XI2z$_FXt4^<*3_y$*&#p4n%@E9UcJeyg$Lg?a=FVsEl(7Zx_ zTVw>>9HPFn76cMEkW@STTAmN*%OsNbq(c;LDQHHWL83UrHn4c05uVBWggKV2ibPnQ zMYbxw{usfBsnwSOJMB37^n5_E^|x?keEhJuDY^#6#2e;iR&5K82NYYhLuCPVC3GlQRFd1^PqEx=Dvf7 z=1g~yR7&<(xunSElTSkp!X(~il$vz(9q3VBdNJI( z3p3&Wb}`iI&;}uwD_qh2E8yE_qS=6e7lERRJON``S!g0#hG)^GRg+ z2%b4_vV1%V*MkW`l9u3`1}%|}?QH9*e%G;7JRHTC)K76qCjjI6?{Og@7*}BI6!zMa zsycXzchL%@Ns{D7A7)eyL~s=~#Iz(&$lyUSiZIn40AVf(8%>!}LlC|^ld$ylM;&Jk zi(KoauEuU0`dnjp^3r!;Tc#@~Go8A1Zm@>8XxBZ4r>860R)RV;_-tsi@P-kcjUuo? z#Q5{s1`lQHIO9XN$nCSz$*=hh4%BexsaeSW`+<70dzq_OEkYD-ps+=fJ>Hg2ai0DT zu-EZ!*}Brv3MBxM_>_B8G`R$SXX{2-!NhhZ7sEXmU8T)a3c|Q_fHM>putq{0C;1Z#u(LkV;>yr!q<^r%U(`=FC)21pITe1Kfm|f=U zqTCg-Xjhz>sXY+b-NV=1t+V$_pDV)}Nd&352Ec|NzV>iUix_oow83+OPNDX|lM3Cs zjf`+pq?=$?=mnO{(7p#&OrD-%Bbf<@M+V{wtH%QMv7`StW;B%*HB0Z^>Fr`lyfy%j zyd?%0`yb&WmSuVJJIz9-U^1 zh~vvZ2?aGM#Qt?B;_XVd>ppH^D%6QMnFw9tW6mR=gQ=`WfZhvr8x2c?rOtt5t*t{`~T;Xz{3Y6yJtSn>a#G(Pq*B$|@;={%u zJhJuZ$+(LpYFivE^|rcLvz6{F?pj$^@|q6INF8;pTXqdsEh=Tz#L)~jpx3?&XR^_D zaCsUN8&Ad?T5cHF1)UH*gKc)G(kjpv9YUhWs|CYO3p|k;#UKS5UJW?JyTQrWcJ8vy zdbscOv8(t&-J}7_UO~4YvK7Hg+$OLQMYphjv~YT~jh_yVCrJX2ar?z`3U6UoY2oe= zMv`D@-jHVp50gv&J-kYijm=9N#G+N)3B-W;#f>rN>(qkrIn0kI4&gr~sl68!5-|vU zMAFRYsZ8YVnx4uIj~?j0Y*L~z3`(l~?9?LtFl13zW5cqC$8_`P^nZUyzWTMpE?`<3 z&OI`|TWcC?ONZAm+T5+R3#k2vEcYls0eJL{%;S|W^W8qkw@)!Me^YrCbO`KHL{T$; zXL%KLsLWHlrWLr1A)wV!FT6~HtFhBYE*cV3JiWJpO9u`HJa~9g%6}CTc*AfS8mZ3DZQ{Z)eUk8VI);#R1yBnH zdn?Q1U-o&KVD==K)G~fjg`FhRM}f|b(Z|J@0P@-2Sk1CtzrizWcOq0oT78P#HF^|P z_T5K7T13za=qF=ZEwA732AC`Y0_F->mY!(;FfQq&`cWzk-`*j^OCHmadIu0SE!yEM zA4)1E9St|U&7h|Q07Dazy{)Ae)BF;{Xv8xOuS8$+O<#Z5utbnj_Q4Shs+_2zoU_{dP(~D;9ow&sM{E{V)L$*ym?suS zr9Wd?z+y+Caw*&ITBRG}KDTIbO^e+GX0=7^cE0#{J>?sPAX0*t2`95p)888{{rqb! z*l)`^uwZFhX;`M3R;!#RSz+Q$jnqxn6p~7Ck`*QF^!oLmL-pzoSd#GeEzHE2oQDyb zg<%8*rfs#bM$Bd#X1BpukRm&Tq~jb;s0lZWzdh+tU8X*vLH8-WPAjnMvRL#J^>_m8 zvXUX4@&-P;uWZ|+X55I?gvHorO52F;6R+Oa8cAbtjlu4;TXe`)j#Q1M5(GgdmMvg% zSJ?OD5ttF3RdgoXiu-tNKQ9wh=)@y=kd98DPQLE46m0zg-in%p=zA#lH6G)gK2let zx=9<6{iN0<$ms|cPr4Cted(Efu$-%|GEXo|=g@e=HS1tAQ7Jm5t&qSJj7iIku2R2A zLZhqfk5u%w3)8ctT5XI%@+sWbMc9`B8<}GJ1@TRzA4H^WXW<=x8J_OQVTldBByMc*!9kAp$vTrfPOZBchuHu)3 z|C+r~t?RkVhNu<*#ZsJsu9DW(F?PF$FHt``c>eQABRyz!+W5}qPb;eT1(@h!;47#T@cab|dk8g>#4)7jYsA`_m(G#6EGp%{x z@h4z@`d4*UOxo?$v9!jW)#~`<%+&?|B^!~1W+VAmSM$K)FF)>>C$`Lz-brr>jJ58; zJ4sle5`j}7{_1*@zU))aGiAkIzQ-i#^{&V^*otI~pe95zwTC}p>v`vKmaXia8uYYs z`PlujyS87`Jlhp;sLWiY+jN|eY`q2U9&B_re&5jKHQRn8d`sR9(dmyem-7fwC%zjf z2t}C;Ybz2^$QUiV5@23g`a5;L>{CTsyha3bDV95(+sH29izFk%Y25Bz)XDA}RHHip zAcK6$2md?uGB%lZw|)Q|o1onXczZQ**35GUgVqQda3W~yXb-Jwz6QtWh~ z?Y1adD#@@u!SM6|s( z;|*7JAYyf_{_=5jqTN@)gL`P}yS!o-MT=YJBa=e&q7Nf{78W0c!dm|a6 z93<3szsw(}3X!Y;ODSA$b*2|VCd)4o_jyo#>tuw_5@cJDxyvG7S^=EpPC<6fNA)?~ zjaC9%RL;xNdYJG}F5HE|W_}CDwd&h^9oEI9C)?*cscr^JG9{{q`HpFeM_}3&`9H-3 zoc-%ZF8)Ht8U2Jk1-jFkf>!_f7ym+sBxnY_mu6B{0RU4py| zReR7jSkR$-;{||_AfkWNAUA7GUZXKhR92MGcBO9rv%A +OpenML Integration · MLJ diff --git a/v0.20.3/performance_measures/index.html b/v0.20.3/performance_measures/index.html new file mode 100644 index 000000000..9375021d0 --- /dev/null +++ b/v0.20.3/performance_measures/index.html @@ -0,0 +1,8 @@ + +Performance Measures · MLJ

      Performance Measures

      Introduction

      In MLJ loss functions, scoring rules, confusion matrices, sensitivities, etc, are collectively referred to as measures. These measures are provided by the package StatisticalMeasures.jl but are immediately available to the MLJ user. Here's a simple example of direct application of the log_loss measures to compute a training loss:

      using MLJ
      +X, y = @load_iris
      +DecisionTreeClassifier = @load DecisionTreeClassifier pkg=DecisionTree
      +tree = DecisionTreeClassifier(max_depth=2)
      +mach = machine(tree, X, y) |> fit!
      +yhat = predict(mach, X)
      +log_loss(yhat, y)
      0.143176310291424

      For more examples of direct measure usage, see the StatisticalMeasures.jl tutorial.

      A list of all measures, ready to use after running using MLJ or using StatisticalMeasures, is here. Alternatively, call measures() (experimental) to generate a dictionary keyed on available measure constructors, with measure metadata as values.

      Custom measures

      Any measure-like object with appropriate calling behavior can be used with MLJ. To quickly build custom measures, we recommend using the package StatisticalMeasuresBase.jl, which provides this tutorial. Note, in particular, that an "atomic" measure can be transformed into a multi-target measure using this package.

      Uses of measures

      In MLJ, measures are specified:

      and elsewhere.

      Using LossFunctions.jl

      In previous versions of MLJ, measures from LossFunctions.jl were also available. Now measures from that package must be explicitly imported and wrapped, as described here.

      Receiver operator characteristics

      A related performance evaluation tool provided by StatisticalMeasures.jl, and hence by MLJ, is the roc_curve method:

      StatisticalMeasures.roc_curveFunction
      roc_curve(ŷ, y) -> false_positive_rates, true_positive_rates, thresholds

      Return data for plotting the receiver operator characteristic (ROC curve) for a binary classification problem.

      Here is a vector of UnivariateFinite distributions (from CategoricalDistributions.jl) over the two values taken by the ground truth observations y, a CategoricalVector.

      If there are k unique probabilities, then there are correspondingly k thresholds and k+1 "bins" over which the false positive and true positive rates are constant.:

      • [0.0 - thresholds[1]]
      • [thresholds[1] - thresholds[2]]
      • ...
      • [thresholds[k] - 1]

      Consequently, true_positive_rates and false_positive_rates have length k+1 if thresholds has length k.

      To plot the curve using your favorite plotting backend, do something like plot(false_positive_rates, true_positive_rates).

      Core algorithm: Functions.roc_curve

      See also AreaUnderCurve.

      source

      Migration guide for changes to measures in MLJBase 1.0

      Prior to MLJBase.jl 1.0 (respectivey, MLJ.jl version 0.19.6) measures were defined in MLJBase.jl (a dependency of MLJ.jl) but now they are provided by MLJ.jl dependency StatisticalMeasures. Effects on users are detailed below:

      Breaking behavior likely relevant to many users

      • If using MLJBase without MLJ, then, in Julia 1.9 or higher, StatisticalMeasures must be explicitly imported to use measures that were previously part of MLJBase. If using MLJ, then all previous measures are still available, with the exception of those corresponding to LossFunctions.jl (see below).

      • All measures return a single aggregated measurement. In other words, measures previously reporting a measurement per-observation (previously subtyping Unaggregated) no longer do so. To get per-observation measurements, use the new method StatisticalMeasures.measurements(measure, ŷ, y[, weights, class_weights]).

      • The default measure for regression models (used in evaluate/evaluate! when measures is unspecified) is changed from rms to l2=LPLoss(2) (mean sum of squares).

      • MeanAbsoluteError has been removed and instead mae is an alias for LPLoss(p=1).

      • Measures that previously skipped NaN values will now (at least by default) propagate those values. Missing value behavior is unchanged, except some measures that previously did not support missing now do.

      • Aliases for measure types have been removed. For example RMSE (alias for RootMeanSquaredError) is gone. Aliases for instances, such as rms and cross_entropy persist. The exception is precision, for which ppv can be used in its place. (This is to avoid conflict with Base.precision, which was previously pirated.)

      • info(measure) has been decommissioned; query docstrings or access the new measure traits individually instead. These traits are now provided by StatisticalMeasures.jl and not are not exported. For example, to access the orientation of the measure rms, do import StatisticalMeasures as SM; SM.orientation(rms).

      • Behavior of the measures() method, to list all measures and associated traits, has changed. It now returns a dictionary instead of a vector of named tuples; measures(predicate) is decommissioned, but measures(needle) is preserved. (This method, owned by StatisticalMeasures.jl, has some other search options, but is experimental.)

      • Measures that were wraps of losses from LossFunctions.jl are no longer exposed by MLJBase or MLJ. To use such a loss, you must explicitly import LossFunctions and wrap the loss appropriately. See Using losses from LossFunctions.jl for examples.

      • Some user-defined measures working in previous versions of MLJBase.jl may not work without modification, as they must conform to the new StatisticalMeasuresBase.jl API. See this tutorial on how define new measures.

      • Measures with a "feature argument" X, as in some_measure(ŷ, y, X), are no longer supported. See What is a measure? for allowed signatures in measures.

      Packages implementing the MLJ model interface

      The migration of measures is not expected to require any changes to the source code in packges providing implementations of the MLJ model interface (MLJModelInterface.jl) such as MLJDecisionTreeInterface.jl and MLJFlux.jl, and this is confirmed by extensive integration tests. However, some current tests will fail, if they use MLJBase measures. The following should generally suffice to adapt such tests:

      • Add StatisticalMeasures as test dependency, and add using StatisticalMeasures to your runtests.jl (and/or included submodules).

      • If measures are qualified, as in MLJBase.rms, then the qualification must be removed or changed to StatisticalMeasures.rms, etc.

      • Be aware that the default measure used in methods such as evaluate!, when measure is not specified, is changed from rms to l2 for regression models.

      • Be aware of that all measures now report a measurement for every observation, and never an aggregate. See second point above.

      Breaking behavior possibly relevant to some developers

      • The abstract measure types Aggregated, Unaggregated, Measure have been decommissioned. (A measure is now defined purely by its calling behavior.)

      • What were previously exported as measure types are now only constructors.

      • target_scitype(measure) is decommissioned. Related is StatisticalMeasures.observation_scitype(measure) which declares an upper bound on the allowed scitype of a single observation.

      • prediction_type(measure) is decommissioned. Instead use StatisticalMeasures.kind_of_proxy(measure).

      • The trait reports_each_observation is decommissioned. Related is StatisticalMeasures.can_report_unaggregated; if false the new measurements method simply returns n copies of the aggregated measurement, where n is the number of observations provided, instead of individual observation-dependent measurements.

      • aggregation(measure) has been decommissioned. Instead use StatisticalMeasures.external_mode_of_aggregation(measure).

      • instances(measure) has been decommissioned; query docstrings for measure aliases, or follow this example: aliases = measures()[RootMeanSquaredError].aliases.

      • is_feature_dependent(measure) has been decommissioned. Measures consuming feature data are not longer supported; see above.

      • distribution_type(measure) has been decommissioned.

      • docstring(measure) has been decommissioned.

      • Behavior of aggregate has changed.

      • The following traits, previously exported by MLJBase and MLJ, cannot be applied to measures: supports_weights, supports_class_weights, orientation, human_name. Instead use the traits with these names provided by StatisticalMeausures.jl (they will need to be qualified, as in import StatisticalMeasures; StatisticalMeasures.orientation(measure)).

      diff --git a/v0.20.3/preparing_data/index.html b/v0.20.3/preparing_data/index.html new file mode 100644 index 000000000..386cbca3f --- /dev/null +++ b/v0.20.3/preparing_data/index.html @@ -0,0 +1,112 @@ + +Preparing Data · MLJ

      Preparing Data

      Splitting data

      MLJ has two tools for splitting data. To split data vertically (that is, to split by observations) use partition. This is commonly applied to a vector of observation indices, but can also be applied to datasets themselves, provided they are vectors, matrices or tables.

      To split tabular data horizontally (i.e., break up a table based on feature names) use unpack.

      MLJBase.partitionFunction
      partition(X, fractions...;
      +          shuffle=nothing,
      +          rng=Random.GLOBAL_RNG,
      +          stratify=nothing,
      +          multi=false)

      Splits the vector, matrix or table X into a tuple of objects of the same type, whose vertical concatenation is X. The number of rows in each component of the return value is determined by the corresponding fractions of length(nrows(X)), where valid fractions are floats between 0 and 1 whose sum is less than one. The last fraction is not provided, as it is inferred from the preceding ones.

      For "synchronized" partitioning of multiple objects, use the multi=true option described below.

      julia> partition(1:1000, 0.8)
      +([1,...,800], [801,...,1000])
      +
      +julia> partition(1:1000, 0.2, 0.7)
      +([1,...,200], [201,...,900], [901,...,1000])
      +
      +julia> partition(reshape(1:10, 5, 2), 0.2, 0.4)
      +([1 6], [2 7; 3 8], [4 9; 5 10])
      +
      +X, y = make_blobs() # a table and vector
      +Xtrain, Xtest = partition(X, 0.8, stratify=y)
      +
      +(Xtrain, Xtest), (ytrain, ytest) = partition((X, y), 0.8, rng=123, multi=true)

      Keywords

      • shuffle=nothing: if set to true, shuffles the rows before taking fractions.

      • rng=Random.GLOBAL_RNG: specifies the random number generator to be used, can be an integer seed. If specified, and shuffle === nothing is interpreted as true.

      • stratify=nothing: if a vector is specified, the partition will match the stratification of the given vector. In that case, shuffle cannot be false.

      • multi=false: if true then X is expected to be a tuple of objects sharing a common length, which are each partitioned separately using the same specified fractions and the same row shuffling. Returns a tuple of partitions (a tuple of tuples).

      source
      MLJBase.unpackFunction
      unpack(table, f1, f2, ... fk;
      +       wrap_singles=false,
      +       shuffle=false,
      +       rng::Union{AbstractRNG,Int,Nothing}=nothing,
      +       coerce_options...)

      Horizontally split any Tables.jl compatible table into smaller tables or vectors by making column selections determined by the predicates f1, f2, ..., fk. Selection from the column names is without replacement. A predicate is any object f such that f(name) is true or false for each column name::Symbol of table.

      Returns a tuple of tables/vectors with length one greater than the number of supplied predicates, with the last component including all previously unselected columns.

      julia> table = DataFrame(x=[1,2], y=['a', 'b'], z=[10.0, 20.0], w=["A", "B"])
      +2×4 DataFrame
      + Row │ x      y     z        w
      +     │ Int64  Char  Float64  String
      +─────┼──────────────────────────────
      +   1 │     1  a        10.0  A
      +   2 │     2  b        20.0  B
      +
      +Z, XY, W = unpack(table, ==(:z), !=(:w))
      +julia> Z
      +2-element Vector{Float64}:
      + 10.0
      + 20.0
      +
      +julia> XY
      +2×2 DataFrame
      + Row │ x      y
      +     │ Int64  Char
      +─────┼─────────────
      +   1 │     1  a
      +   2 │     2  b
      +
      +julia> W  # the column(s) left over
      +2-element Vector{String}:
      + "A"
      + "B"

      Whenever a returned table contains a single column, it is converted to a vector unless wrap_singles=true.

      If coerce_options are specified then table is first replaced with coerce(table, coerce_options). See ScientificTypes.coerce for details.

      If shuffle=true then the rows of table are first shuffled, using the global RNG, unless rng is specified; if rng is an integer, it specifies the seed of an automatically generated Mersenne twister. If rng is specified then shuffle=true is implicit.

      source

      Bridging the gap between data type and model requirements

      As outlined in Getting Started, it is important that the scientific type of data matches the requirements of the model of interest. For example, while the majority of supervised learning models require input features to be Continuous, newcomers to MLJ are sometimes surprised at the disappointing results of model queries such as this one:

      X = (height   = [185, 153, 163, 114, 180],
      +     time     = [2.3, 4.5, 4.2, 1.8, 7.1],
      +     mark     = ["D", "A", "C", "B", "A"],
      +     admitted = ["yes", "no", missing, "yes"]);
      +y = [12.4, 12.5, 12.0, 31.9, 43.0]
      +models(matching(X, y))
      4-element Vector{NamedTuple{(:name, :package_name, :is_supervised, :abstract_type, :deep_properties, :docstring, :fit_data_scitype, :human_name, :hyperparameter_ranges, :hyperparameter_types, :hyperparameters, :implemented_methods, :inverse_transform_scitype, :is_pure_julia, :is_wrapper, :iteration_parameter, :load_path, :package_license, :package_url, :package_uuid, :predict_scitype, :prediction_type, :reporting_operations, :reports_feature_importances, :supports_class_weights, :supports_online, :supports_training_losses, :supports_weights, :transform_scitype, :input_scitype, :target_scitype, :output_scitype)}}:
      + (name = ConstantRegressor, package_name = MLJModels, ... )
      + (name = DecisionTreeRegressor, package_name = BetaML, ... )
      + (name = DeterministicConstantRegressor, package_name = MLJModels, ... )
      + (name = RandomForestRegressor, package_name = BetaML, ... )

      Or are unsure about the source of the following warning:

      Tree = @load DecisionTreeRegressor pkg=DecisionTree verbosity=0
      +tree = Tree();
      +julia> machine(tree, X, y)
      +
      +julia> machine(tree, X, y)
      +┌ Warning: The scitype of `X`, in `machine(model, X, ...)` is incompatible with `model=DecisionTreeRegressor @378`:                                                                
      +│ scitype(X) = Table{Union{AbstractVector{Continuous}, AbstractVector{Count}, AbstractVector{Textual}, AbstractVector{Union{Missing, Textual}}}}
      +│ input_scitype(model) = Table{var"#s46"} where var"#s46"<:Union{AbstractVector{var"#s9"} where var"#s9"<:Continuous, AbstractVector{var"#s9"} where var"#s9"<:Count, AbstractVector{var"#s9"} where var"#s9"<:OrderedFactor}.
      +└ @ MLJBase ~/Dropbox/Julia7/MLJ/MLJBase/src/machines.jl:103
      +Machine{DecisionTreeRegressor,…} @198 trained 0 times; caches data
      +  args: 
      +    1:  Source @628 ⏎ `Table{Union{AbstractVector{Continuous}, AbstractVector{Count}, AbstractVector{Textual}, AbstractVector{Union{Missing, Textual}}}}`
      +    2:  Source @544 ⏎ `AbstractVector{Continuous}`

      The meaning of the warning is:

      • The input X is a table with column scitypes Continuous, Count, and Textual and Union{Missing, Textual}, which can also see by inspecting the schema:
      schema(X)
      ┌──────────┬─────────────────────────┬────────────────────────┐
      +│ names    │ scitypes                │ types                  │
      +├──────────┼─────────────────────────┼────────────────────────┤
      +│ height   │ Count                   │ Int64                  │
      +│ time     │ Continuous              │ Float64                │
      +│ mark     │ Textual                 │ String                 │
      +│ admitted │ Union{Missing, Textual} │ Union{Missing, String} │
      +└──────────┴─────────────────────────┴────────────────────────┘
      +
      • The model requires a table whose column element scitypes subtype Continuous, an incompatibility.

      Common data preprocessing workflows

      There are two tools for addressing data-model type mismatches like the above, with links to further documentation given below:

      Scientific type coercion: We coerce machine types to obtain the intended scientific interpretation. If height in the above example is intended to be Continuous, mark is supposed to be OrderedFactor, and admitted a (binary) Multiclass, then we can do

      X_coerced = coerce(X, :height=>Continuous, :mark=>OrderedFactor, :admitted=>Multiclass);
      +schema(X_coerced)
      ┌──────────┬───────────────────────────────┬────────────────────────────────────
      +│ names    │ scitypes                      │ types                             ⋯
      +├──────────┼───────────────────────────────┼────────────────────────────────────
      +│ height   │ Continuous                    │ Float64                           ⋯
      +│ time     │ Continuous                    │ Float64                           ⋯
      +│ mark     │ OrderedFactor{4}              │ CategoricalValue{String, UInt32}  ⋯
      +│ admitted │ Union{Missing, Multiclass{2}} │ Union{Missing, CategoricalValue{S ⋯
      +└──────────┴───────────────────────────────┴────────────────────────────────────
      +                                                                1 column omitted
      +

      Data transformations: We carry out conventional data transformations, such as missing value imputation and feature encoding:

      imputer = FillImputer()
      +mach = machine(imputer, X_coerced) |> fit!
      +X_imputed = transform(mach, X_coerced);
      +schema(X_imputed)
      ┌──────────┬──────────────────┬──────────────────────────────────┐
      +│ names    │ scitypes         │ types                            │
      +├──────────┼──────────────────┼──────────────────────────────────┤
      +│ height   │ Continuous       │ Float64                          │
      +│ time     │ Continuous       │ Float64                          │
      +│ mark     │ OrderedFactor{4} │ CategoricalValue{String, UInt32} │
      +│ admitted │ Multiclass{2}    │ CategoricalValue{String, UInt32} │
      +└──────────┴──────────────────┴──────────────────────────────────┘
      +
      encoder = ContinuousEncoder()
      +mach = machine(encoder, X_imputed) |> fit!
      +X_encoded = transform(mach, X_imputed)
      (height = [185.0, 153.0, 163.0, 114.0, 180.0],
      + time = [2.3, 4.5, 4.2, 1.8, 7.1],
      + mark = [4.0, 1.0, 3.0, 2.0, 1.0],
      + admitted__no = [0.0, 1.0, 0.0, 0.0],
      + admitted__yes = [1.0, 0.0, 1.0, 1.0],)
      schema(X_encoded)
      ┌───────────────┬────────────┬─────────┐
      +│ names         │ scitypes   │ types   │
      +├───────────────┼────────────┼─────────┤
      +│ height        │ Continuous │ Float64 │
      +│ time          │ Continuous │ Float64 │
      +│ mark          │ Continuous │ Float64 │
      +│ admitted__no  │ Continuous │ Float64 │
      +│ admitted__yes │ Continuous │ Float64 │
      +└───────────────┴────────────┴─────────┘
      +

      Such transformations can also be combined in a pipeline; see Linear Pipelines.

      Scientific type coercion

      Scientific type coercion is documented in detail at ScientificTypesBase.jl. See also the tutorial at the this MLJ Workshop (specifically, here) and this Data Science in Julia tutorial.

      Also relevant is the section, Working with Categorical Data.

      Data transformation

      MLJ's Built-in transformers are documented at Transformers and Other Unsupervised Models. The most relevant in the present context are: ContinuousEncoder, OneHotEncoder, FeatureSelector and FillImputer. A Gaussian mixture models imputer is provided by BetaML, which can be loaded with

      MissingImputator = @load MissingImputator pkg=BetaML

      This MLJ Workshop, and the "End-to-end examples" in Data Science in Julia tutorials give further illustrations of data preprocessing in MLJ.

      diff --git a/v0.20.3/quick_start_guide_to_adding_models/index.html b/v0.20.3/quick_start_guide_to_adding_models/index.html new file mode 100644 index 000000000..f09a596a3 --- /dev/null +++ b/v0.20.3/quick_start_guide_to_adding_models/index.html @@ -0,0 +1,2 @@ + +Quick-Start Guide to Adding Models · MLJ diff --git a/v0.20.3/search_index.js b/v0.20.3/search_index.js new file mode 100644 index 000000000..5a3e33db0 --- /dev/null +++ b/v0.20.3/search_index.js @@ -0,0 +1,3 @@ +var documenterSearchIndex = {"docs": +[{"location":"models/LDA_MultivariateStats/#LDA_MultivariateStats","page":"LDA","title":"LDA","text":"","category":"section"},{"location":"models/LDA_MultivariateStats/","page":"LDA","title":"LDA","text":"LDA","category":"page"},{"location":"models/LDA_MultivariateStats/","page":"LDA","title":"LDA","text":"A model type for constructing a linear discriminant analysis model, based on MultivariateStats.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/LDA_MultivariateStats/","page":"LDA","title":"LDA","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/LDA_MultivariateStats/","page":"LDA","title":"LDA","text":"LDA = @load LDA pkg=MultivariateStats","category":"page"},{"location":"models/LDA_MultivariateStats/","page":"LDA","title":"LDA","text":"Do model = LDA() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in LDA(method=...).","category":"page"},{"location":"models/LDA_MultivariateStats/","page":"LDA","title":"LDA","text":"Multiclass linear discriminant analysis learns a projection in a space of features to a lower dimensional space, in a way that attempts to preserve as much as possible the degree to which the classes of a discrete target variable can be discriminated. This can be used either for dimension reduction of the features (see transform below) or for probabilistic classification of the target (see predict below).","category":"page"},{"location":"models/LDA_MultivariateStats/","page":"LDA","title":"LDA","text":"In the case of prediction, the class probability for a new observation reflects the proximity of that observation to training observations associated with that class, and how far away the observation is from observations associated with other classes. Specifically, the distances, in the transformed (projected) space, of a new observation, from the centroid of each target class, is computed; the resulting vector of distances, multiplied by minus one, is passed to a softmax function to obtain a class probability prediction. Here \"distance\" is computed using a user-specified distance function.","category":"page"},{"location":"models/LDA_MultivariateStats/#Training-data","page":"LDA","title":"Training data","text":"","category":"section"},{"location":"models/LDA_MultivariateStats/","page":"LDA","title":"LDA","text":"In MLJ or MLJBase, bind an instance model to data with","category":"page"},{"location":"models/LDA_MultivariateStats/","page":"LDA","title":"LDA","text":"mach = machine(model, X, y)","category":"page"},{"location":"models/LDA_MultivariateStats/","page":"LDA","title":"LDA","text":"Here:","category":"page"},{"location":"models/LDA_MultivariateStats/","page":"LDA","title":"LDA","text":"X is any table of input features (eg, a DataFrame) whose columns are of scitype Continuous; check column scitypes with schema(X).\ny is the target, which can be any AbstractVector whose element scitype is OrderedFactor or Multiclass; check the scitype with scitype(y)","category":"page"},{"location":"models/LDA_MultivariateStats/","page":"LDA","title":"LDA","text":"Train the machine using fit!(mach, rows=...).","category":"page"},{"location":"models/LDA_MultivariateStats/#Hyper-parameters","page":"LDA","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/LDA_MultivariateStats/","page":"LDA","title":"LDA","text":"method::Symbol=:gevd: The solver, one of :gevd or :whiten methods.\ncov_w::StatsBase.SimpleCovariance(): An estimator for the within-class covariance (used in computing the within-class scatter matrix, Sw). Any robust estimator from CovarianceEstimation.jl can be used.\ncov_b::StatsBase.SimpleCovariance(): The same as cov_w but for the between-class covariance (used in computing the between-class scatter matrix, Sb).\noutdim::Int=0: The output dimension, i.e dimension of the transformed space, automatically set to min(indim, nclasses-1) if equal to 0.\nregcoef::Float64=1e-6: The regularization coefficient. A positive value regcoef*eigmax(Sw) where Sw is the within-class scatter matrix, is added to the diagonal of Sw to improve numerical stability. This can be useful if using the standard covariance estimator.\ndist=Distances.SqEuclidean(): The distance metric to use when performing classification (to compare the distance between a new point and centroids in the transformed space); must be a subtype of Distances.SemiMetric from Distances.jl, e.g., Distances.CosineDist.","category":"page"},{"location":"models/LDA_MultivariateStats/#Operations","page":"LDA","title":"Operations","text":"","category":"section"},{"location":"models/LDA_MultivariateStats/","page":"LDA","title":"LDA","text":"transform(mach, Xnew): Return a lower dimensional projection of the input Xnew, which should have the same scitype as X above.\npredict(mach, Xnew): Return predictions of the target given features Xnew having the same scitype as X above. Predictions are probabilistic but uncalibrated.\npredict_mode(mach, Xnew): Return the modes of the probabilistic predictions returned above.","category":"page"},{"location":"models/LDA_MultivariateStats/#Fitted-parameters","page":"LDA","title":"Fitted parameters","text":"","category":"section"},{"location":"models/LDA_MultivariateStats/","page":"LDA","title":"LDA","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/LDA_MultivariateStats/","page":"LDA","title":"LDA","text":"classes: The classes seen during model fitting.\nprojection_matrix: The learned projection matrix, of size (indim, outdim), where indim and outdim are the input and output dimensions respectively (See Report section below).","category":"page"},{"location":"models/LDA_MultivariateStats/#Report","page":"LDA","title":"Report","text":"","category":"section"},{"location":"models/LDA_MultivariateStats/","page":"LDA","title":"LDA","text":"The fields of report(mach) are:","category":"page"},{"location":"models/LDA_MultivariateStats/","page":"LDA","title":"LDA","text":"indim: The dimension of the input space i.e the number of training features.\noutdim: The dimension of the transformed space the model is projected to.\nmean: The mean of the untransformed training data. A vector of length indim.\nnclasses: The number of classes directly observed in the training data (which can be less than the total number of classes in the class pool).\nclass_means: The class-specific means of the training data. A matrix of size (indim, nclasses) with the ith column being the class-mean of the ith class in classes (See fitted params section above).\nclass_weights: The weights (class counts) of each class. A vector of length nclasses with the ith element being the class weight of the ith class in classes. (See fitted params section above.)\nSb: The between class scatter matrix.\nSw: The within class scatter matrix.","category":"page"},{"location":"models/LDA_MultivariateStats/#Examples","page":"LDA","title":"Examples","text":"","category":"section"},{"location":"models/LDA_MultivariateStats/","page":"LDA","title":"LDA","text":"using MLJ\n\nLDA = @load LDA pkg=MultivariateStats\n\nX, y = @load_iris ## a table and a vector\n\nmodel = LDA()\nmach = machine(model, X, y) |> fit!\n\nXproj = transform(mach, X)\ny_hat = predict(mach, X)\nlabels = predict_mode(mach, X)\n","category":"page"},{"location":"models/LDA_MultivariateStats/","page":"LDA","title":"LDA","text":"See also BayesianLDA, SubspaceLDA, BayesianSubspaceLDA","category":"page"},{"location":"models/NuSVC_LIBSVM/#NuSVC_LIBSVM","page":"NuSVC","title":"NuSVC","text":"","category":"section"},{"location":"models/NuSVC_LIBSVM/","page":"NuSVC","title":"NuSVC","text":"NuSVC","category":"page"},{"location":"models/NuSVC_LIBSVM/","page":"NuSVC","title":"NuSVC","text":"A model type for constructing a ν-support vector classifier, based on LIBSVM.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/NuSVC_LIBSVM/","page":"NuSVC","title":"NuSVC","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/NuSVC_LIBSVM/","page":"NuSVC","title":"NuSVC","text":"NuSVC = @load NuSVC pkg=LIBSVM","category":"page"},{"location":"models/NuSVC_LIBSVM/","page":"NuSVC","title":"NuSVC","text":"Do model = NuSVC() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in NuSVC(kernel=...).","category":"page"},{"location":"models/NuSVC_LIBSVM/","page":"NuSVC","title":"NuSVC","text":"This model is a re-parameterization of the SVC classifier, where nu replaces cost, and is mathematically equivalent to it. The parameter nu allows more direct control over the number of support vectors (see under \"Hyper-parameters\").","category":"page"},{"location":"models/NuSVC_LIBSVM/","page":"NuSVC","title":"NuSVC","text":"This model always predicts actual class labels. For probabilistic predictions, use instead ProbabilisticNuSVC.","category":"page"},{"location":"models/NuSVC_LIBSVM/","page":"NuSVC","title":"NuSVC","text":"Reference for algorithm and core C-library: C.-C. Chang and C.-J. Lin (2011): \"LIBSVM: a library for support vector machines.\" ACM Transactions on Intelligent Systems and Technology, 2(3):27:1–27:27. Updated at https://www.csie.ntu.edu.tw/~cjlin/papers/libsvm.pdf. ","category":"page"},{"location":"models/NuSVC_LIBSVM/#Training-data","page":"NuSVC","title":"Training data","text":"","category":"section"},{"location":"models/NuSVC_LIBSVM/","page":"NuSVC","title":"NuSVC","text":"In MLJ or MLJBase, bind an instance model to data with:","category":"page"},{"location":"models/NuSVC_LIBSVM/","page":"NuSVC","title":"NuSVC","text":"mach = machine(model, X, y)","category":"page"},{"location":"models/NuSVC_LIBSVM/","page":"NuSVC","title":"NuSVC","text":"where","category":"page"},{"location":"models/NuSVC_LIBSVM/","page":"NuSVC","title":"NuSVC","text":"X: any table of input features (eg, a DataFrame) whose columns each have Continuous element scitype; check column scitypes with schema(X)\ny: is the target, which can be any AbstractVector whose element scitype is <:OrderedFactor or <:Multiclass; check the scitype with scitype(y)","category":"page"},{"location":"models/NuSVC_LIBSVM/","page":"NuSVC","title":"NuSVC","text":"Train the machine using fit!(mach, rows=...).","category":"page"},{"location":"models/NuSVC_LIBSVM/#Hyper-parameters","page":"NuSVC","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/NuSVC_LIBSVM/","page":"NuSVC","title":"NuSVC","text":"kernel=LIBSVM.Kernel.RadialBasis: either an object that can be called, as in kernel(x1, x2), or one of the built-in kernels from the LIBSVM.jl package listed below. Here x1 and x2 are vectors whose lengths match the number of columns of the training data X (see \"Examples\" below).\nLIBSVM.Kernel.Linear: (x1, x2) -> x1'*x2\nLIBSVM.Kernel.Polynomial: (x1, x2) -> gamma*x1'*x2 + coef0)^degree\nLIBSVM.Kernel.RadialBasis: (x1, x2) -> (exp(-gamma*norm(x1 - x2)^2))\nLIBSVM.Kernel.Sigmoid: (x1, x2) - > tanh(gamma*x1'*x2 + coef0)\nHere gamma, coef0, degree are other hyper-parameters. Serialization of models with user-defined kernels comes with some restrictions. See LIVSVM.jl issue91\ngamma = 0.0: kernel parameter (see above); if gamma==-1.0 then gamma = 1/nfeatures is used in training, where nfeatures is the number of features (columns of X). If gamma==0.0 then gamma = 1/(var(Tables.matrix(X))*nfeatures) is used. Actual value used appears in the report (see below).\ncoef0 = 0.0: kernel parameter (see above)\ndegree::Int32 = Int32(3): degree in polynomial kernel (see above)\nnu=0.5 (range (0, 1]): An upper bound on the fraction of margin errors and a lower bound of the fraction of support vectors. Denoted ν in the cited paper. Changing nu changes the thickness of the margin (a neighborhood of the decision surface) and a margin error is said to have occurred if a training observation lies on the wrong side of the surface or within the margin.\ncachesize=200.0 cache memory size in MB\ntolerance=0.001: tolerance for the stopping criterion\nshrinking=true: whether to use shrinking heuristics","category":"page"},{"location":"models/NuSVC_LIBSVM/#Operations","page":"NuSVC","title":"Operations","text":"","category":"section"},{"location":"models/NuSVC_LIBSVM/","page":"NuSVC","title":"NuSVC","text":"predict(mach, Xnew): return predictions of the target given features Xnew having the same scitype as X above.","category":"page"},{"location":"models/NuSVC_LIBSVM/#Fitted-parameters","page":"NuSVC","title":"Fitted parameters","text":"","category":"section"},{"location":"models/NuSVC_LIBSVM/","page":"NuSVC","title":"NuSVC","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/NuSVC_LIBSVM/","page":"NuSVC","title":"NuSVC","text":"libsvm_model: the trained model object created by the LIBSVM.jl package\nencoding: class encoding used internally by libsvm_model - a dictionary of class labels keyed on the internal integer representation","category":"page"},{"location":"models/NuSVC_LIBSVM/#Report","page":"NuSVC","title":"Report","text":"","category":"section"},{"location":"models/NuSVC_LIBSVM/","page":"NuSVC","title":"NuSVC","text":"The fields of report(mach) are:","category":"page"},{"location":"models/NuSVC_LIBSVM/","page":"NuSVC","title":"NuSVC","text":"gamma: actual value of the kernel parameter gamma used in training","category":"page"},{"location":"models/NuSVC_LIBSVM/#Examples","page":"NuSVC","title":"Examples","text":"","category":"section"},{"location":"models/NuSVC_LIBSVM/#Using-a-built-in-kernel","page":"NuSVC","title":"Using a built-in kernel","text":"","category":"section"},{"location":"models/NuSVC_LIBSVM/","page":"NuSVC","title":"NuSVC","text":"using MLJ\nimport LIBSVM\n\nNuSVC = @load NuSVC pkg=LIBSVM ## model type\nmodel = NuSVC(kernel=LIBSVM.Kernel.Polynomial) ## instance\n\nX, y = @load_iris ## table, vector\nmach = machine(model, X, y) |> fit!\n\nXnew = (sepal_length = [6.4, 7.2, 7.4],\n sepal_width = [2.8, 3.0, 2.8],\n petal_length = [5.6, 5.8, 6.1],\n petal_width = [2.1, 1.6, 1.9],)\n\njulia> yhat = predict(mach, Xnew)\n3-element CategoricalArrays.CategoricalArray{String,1,UInt32}:\n \"virginica\"\n \"virginica\"\n \"virginica\"","category":"page"},{"location":"models/NuSVC_LIBSVM/#User-defined-kernels","page":"NuSVC","title":"User-defined kernels","text":"","category":"section"},{"location":"models/NuSVC_LIBSVM/","page":"NuSVC","title":"NuSVC","text":"k(x1, x2) = x1'*x2 ## equivalent to `LIBSVM.Kernel.Linear`\nmodel = NuSVC(kernel=k)\nmach = machine(model, X, y) |> fit!\n\njulia> yhat = predict(mach, Xnew)\n3-element CategoricalArrays.CategoricalArray{String,1,UInt32}:\n \"virginica\"\n \"virginica\"\n \"virginica\"","category":"page"},{"location":"models/NuSVC_LIBSVM/","page":"NuSVC","title":"NuSVC","text":"See also the classifiers SVC and LinearSVC, LIVSVM.jl and the original C implementation. documentation.","category":"page"},{"location":"models/KMedoidsClusterer_BetaML/#KMedoidsClusterer_BetaML","page":"KMedoidsClusterer","title":"KMedoidsClusterer","text":"","category":"section"},{"location":"models/KMedoidsClusterer_BetaML/","page":"KMedoidsClusterer","title":"KMedoidsClusterer","text":"mutable struct KMedoidsClusterer <: MLJModelInterface.Unsupervised","category":"page"},{"location":"models/KMedoidsClusterer_BetaML/#Parameters:","page":"KMedoidsClusterer","title":"Parameters:","text":"","category":"section"},{"location":"models/KMedoidsClusterer_BetaML/","page":"KMedoidsClusterer","title":"KMedoidsClusterer","text":"n_classes::Int64: Number of classes to discriminate the data [def: 3]\ndist::Function: Function to employ as distance. Default to the Euclidean distance. Can be one of the predefined distances (l1_distance, l2_distance, l2squared_distance), cosine_distance), any user defined function accepting two vectors and returning a scalar or an anonymous function with the same characteristics.\ninitialisation_strategy::String: The computation method of the vector of the initial representatives. One of the following:\n\"random\": randomly in the X space\n\"grid\": using a grid approach\n\"shuffle\": selecting randomly within the available points [default]\n\"given\": using a provided set of initial representatives provided in the initial_representatives parameter\ninitial_representatives::Union{Nothing, Matrix{Float64}}: Provided (K x D) matrix of initial representatives (useful only with initialisation_strategy=\"given\") [default: nothing]\nrng::Random.AbstractRNG: Random Number Generator [deafult: Random.GLOBAL_RNG]","category":"page"},{"location":"models/KMedoidsClusterer_BetaML/","page":"KMedoidsClusterer","title":"KMedoidsClusterer","text":"The K-medoids clustering algorithm with customisable distance function, from the Beta Machine Learning Toolkit (BetaML).","category":"page"},{"location":"models/KMedoidsClusterer_BetaML/","page":"KMedoidsClusterer","title":"KMedoidsClusterer","text":"Similar to K-Means, but the \"representatives\" (the cetroids) are guaranteed to be one of the training points. The algorithm work with any arbitrary distance measure.","category":"page"},{"location":"models/KMedoidsClusterer_BetaML/#Notes:","page":"KMedoidsClusterer","title":"Notes:","text":"","category":"section"},{"location":"models/KMedoidsClusterer_BetaML/","page":"KMedoidsClusterer","title":"KMedoidsClusterer","text":"data must be numerical\nonline fitting (re-fitting with new data) is supported","category":"page"},{"location":"models/KMedoidsClusterer_BetaML/#Example:","page":"KMedoidsClusterer","title":"Example:","text":"","category":"section"},{"location":"models/KMedoidsClusterer_BetaML/","page":"KMedoidsClusterer","title":"KMedoidsClusterer","text":"julia> using MLJ\n\njulia> X, y = @load_iris;\n\njulia> modelType = @load KMedoidsClusterer pkg = \"BetaML\" verbosity=0\nBetaML.Clustering.KMedoidsClusterer\n\njulia> model = modelType()\nKMedoidsClusterer(\n n_classes = 3, \n dist = BetaML.Clustering.var\"#39#41\"(), \n initialisation_strategy = \"shuffle\", \n initial_representatives = nothing, \n rng = Random._GLOBAL_RNG())\n\njulia> mach = machine(model, X);\n\njulia> fit!(mach);\n[ Info: Training machine(KMedoidsClusterer(n_classes = 3, …), …).\n\njulia> classes_est = predict(mach, X);\n\njulia> hcat(y,classes_est)\n150×2 CategoricalArrays.CategoricalArray{Union{Int64, String},2,UInt32}:\n \"setosa\" 3\n \"setosa\" 3\n \"setosa\" 3\n ⋮ \n \"virginica\" 1\n \"virginica\" 1\n \"virginica\" 2","category":"page"},{"location":"benchmarking/#Benchmarking","page":"Benchmarking","title":"Benchmarking","text":"","category":"section"},{"location":"benchmarking/","page":"Benchmarking","title":"Benchmarking","text":"This feature not yet available.","category":"page"},{"location":"benchmarking/","page":"Benchmarking","title":"Benchmarking","text":"CONTRIBUTE.md","category":"page"},{"location":"weights/#Weights","page":"Weights","title":"Weights","text":"","category":"section"},{"location":"weights/","page":"Weights","title":"Weights","text":"In machine learning it is possible to assign each observation an independent significance, or weight, either in training or in performance evaluation, or both. ","category":"page"},{"location":"weights/","page":"Weights","title":"Weights","text":"There are two kinds of weights in use in MLJ:","category":"page"},{"location":"weights/","page":"Weights","title":"Weights","text":"per observation weights (also just called weights) refer to weight vectors of the same length as the number of observations\nclass weights refer to dictionaries keyed on the target classes (levels) for use in classification problems","category":"page"},{"location":"weights/#Specifying-weights-in-training","page":"Weights","title":"Specifying weights in training","text":"","category":"section"},{"location":"weights/","page":"Weights","title":"Weights","text":"To specify weights in training you bind the weights to the model along with the data when constructing a machine. For supervised models the weights are specified last:","category":"page"},{"location":"weights/","page":"Weights","title":"Weights","text":"KNNRegressor = @load KNNRegressor\nmodel = KNNRegressor()\nX, y = make_regression(10, 3)\nw = rand(length(y))\n\nmach = machine(model, X, y, w) |> fit!","category":"page"},{"location":"weights/","page":"Weights","title":"Weights","text":"Note that model supports per observation weights if supports_weights(model) is true. To list all such models, do","category":"page"},{"location":"weights/","page":"Weights","title":"Weights","text":"models() do m\n m.supports_weights\nend","category":"page"},{"location":"weights/","page":"Weights","title":"Weights","text":"The model model supports class weights if supports_class_weights(model) is true.","category":"page"},{"location":"weights/#Specifying-weights-in-performance-evaluation","page":"Weights","title":"Specifying weights in performance evaluation","text":"","category":"section"},{"location":"weights/","page":"Weights","title":"Weights","text":"When calling a measure (metric) that supports weights, provide the weights as the last argument, as in","category":"page"},{"location":"weights/","page":"Weights","title":"Weights","text":"_, y = @load_iris\nŷ = shuffle(y)\nw = Dict(\"versicolor\" => 1, \"setosa\" => 2, \"virginica\"=> 3)\nmacro_f1score(ŷ, y, w)","category":"page"},{"location":"weights/","page":"Weights","title":"Weights","text":"Some measures also support specification of a class weight dictionary. For details see the StatisticalMeasures.jl tutorial.","category":"page"},{"location":"weights/","page":"Weights","title":"Weights","text":"To pass weights to all the measures listed in an evaluate!/evaluate call, use the keyword specifiers weights=... or class_weights=.... For details, see Evaluating Model Performance.","category":"page"},{"location":"models/NeuralNetworkClassifier_MLJFlux/#NeuralNetworkClassifier_MLJFlux","page":"NeuralNetworkClassifier","title":"NeuralNetworkClassifier","text":"","category":"section"},{"location":"models/NeuralNetworkClassifier_MLJFlux/","page":"NeuralNetworkClassifier","title":"NeuralNetworkClassifier","text":"NeuralNetworkClassifier","category":"page"},{"location":"models/NeuralNetworkClassifier_MLJFlux/","page":"NeuralNetworkClassifier","title":"NeuralNetworkClassifier","text":"A model type for constructing a neural network classifier, based on MLJFlux.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/NeuralNetworkClassifier_MLJFlux/","page":"NeuralNetworkClassifier","title":"NeuralNetworkClassifier","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/NeuralNetworkClassifier_MLJFlux/","page":"NeuralNetworkClassifier","title":"NeuralNetworkClassifier","text":"NeuralNetworkClassifier = @load NeuralNetworkClassifier pkg=MLJFlux","category":"page"},{"location":"models/NeuralNetworkClassifier_MLJFlux/","page":"NeuralNetworkClassifier","title":"NeuralNetworkClassifier","text":"Do model = NeuralNetworkClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in NeuralNetworkClassifier(builder=...).","category":"page"},{"location":"models/NeuralNetworkClassifier_MLJFlux/","page":"NeuralNetworkClassifier","title":"NeuralNetworkClassifier","text":"NeuralNetworkClassifier is for training a data-dependent Flux.jl neural network for making probabilistic predictions of a Multiclass or OrderedFactor target, given a table of Continuous features. Users provide a recipe for constructing the network, based on properties of the data that is encountered, by specifying an appropriate builder. See MLJFlux documentation for more on builders.","category":"page"},{"location":"models/NeuralNetworkClassifier_MLJFlux/#Training-data","page":"NeuralNetworkClassifier","title":"Training data","text":"","category":"section"},{"location":"models/NeuralNetworkClassifier_MLJFlux/","page":"NeuralNetworkClassifier","title":"NeuralNetworkClassifier","text":"In MLJ or MLJBase, bind an instance model to data with","category":"page"},{"location":"models/NeuralNetworkClassifier_MLJFlux/","page":"NeuralNetworkClassifier","title":"NeuralNetworkClassifier","text":"mach = machine(model, X, y)","category":"page"},{"location":"models/NeuralNetworkClassifier_MLJFlux/","page":"NeuralNetworkClassifier","title":"NeuralNetworkClassifier","text":"Here:","category":"page"},{"location":"models/NeuralNetworkClassifier_MLJFlux/","page":"NeuralNetworkClassifier","title":"NeuralNetworkClassifier","text":"X is either a Matrix or any table of input features (eg, a DataFrame) whose columns are of scitype Continuous; check column scitypes with schema(X). If X is a Matrix, it is assumed to have columns corresponding to features and rows corresponding to observations.\ny is the target, which can be any AbstractVector whose element scitype is Multiclass or OrderedFactor; check the scitype with scitype(y)","category":"page"},{"location":"models/NeuralNetworkClassifier_MLJFlux/","page":"NeuralNetworkClassifier","title":"NeuralNetworkClassifier","text":"Train the machine with fit!(mach, rows=...).","category":"page"},{"location":"models/NeuralNetworkClassifier_MLJFlux/#Hyper-parameters","page":"NeuralNetworkClassifier","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/NeuralNetworkClassifier_MLJFlux/","page":"NeuralNetworkClassifier","title":"NeuralNetworkClassifier","text":"builder=MLJFlux.Short(): An MLJFlux builder that constructs a neural network. Possible builders include: MLJFlux.Linear, MLJFlux.Short, and MLJFlux.MLP. See MLJFlux.jl documentation for examples of user-defined builders. See also finaliser below.\noptimiser::Flux.Adam(): A Flux.Optimise optimiser. The optimiser performs the updating of the weights of the network. For further reference, see the Flux optimiser documentation. To choose a learning rate (the update rate of the optimizer), a good rule of thumb is to start out at 10e-3, and tune using powers of 10 between 1 and 1e-7.\nloss=Flux.crossentropy: The loss function which the network will optimize. Should be a function which can be called in the form loss(yhat, y). Possible loss functions are listed in the Flux loss function documentation. For a classification task, the most natural loss functions are:\nFlux.crossentropy: Standard multiclass classification loss, also known as the log loss.\nFlux.logitcrossentopy: Mathematically equal to crossentropy, but numerically more stable than finalising the outputs with softmax and then calculating crossentropy. You will need to specify finaliser=identity to remove MLJFlux's default softmax finaliser, and understand that the output of predict is then unnormalized (no longer probabilistic).\nFlux.tversky_loss: Used with imbalanced data to give more weight to false negatives.\nFlux.focal_loss: Used with highly imbalanced data. Weights harder examples more than easier examples.\nCurrently MLJ measures are not supported values of loss.\nepochs::Int=10: The duration of training, in epochs. Typically, one epoch represents one pass through the complete the training dataset.\nbatch_size::int=1: the batch size to be used for training, representing the number of samples per update of the network weights. Typically, batch size is between 8 and\nIncreassing batch size may accelerate training if acceleration=CUDALibs() and a\nGPU is available.\nlambda::Float64=0: The strength of the weight regularization penalty. Can be any value in the range [0, ∞).\nalpha::Float64=0: The L2/L1 mix of regularization, in the range [0, 1]. A value of 0 represents L2 regularization, and a value of 1 represents L1 regularization.\nrng::Union{AbstractRNG, Int64}: The random number generator or seed used during training.\noptimizer_changes_trigger_retraining::Bool=false: Defines what happens when re-fitting a machine if the associated optimiser has changed. If true, the associated machine will retrain from scratch on fit! call, otherwise it will not.\nacceleration::AbstractResource=CPU1(): Defines on what hardware training is done. For Training on GPU, use CUDALibs().\nfinaliser=Flux.softmax: The final activation function of the neural network (applied after the network defined by builder). Defaults to Flux.softmax.","category":"page"},{"location":"models/NeuralNetworkClassifier_MLJFlux/#Operations","page":"NeuralNetworkClassifier","title":"Operations","text":"","category":"section"},{"location":"models/NeuralNetworkClassifier_MLJFlux/","page":"NeuralNetworkClassifier","title":"NeuralNetworkClassifier","text":"predict(mach, Xnew): return predictions of the target given new features Xnew, which should have the same scitype as X above. Predictions are probabilistic but uncalibrated.\npredict_mode(mach, Xnew): Return the modes of the probabilistic predictions returned above.","category":"page"},{"location":"models/NeuralNetworkClassifier_MLJFlux/#Fitted-parameters","page":"NeuralNetworkClassifier","title":"Fitted parameters","text":"","category":"section"},{"location":"models/NeuralNetworkClassifier_MLJFlux/","page":"NeuralNetworkClassifier","title":"NeuralNetworkClassifier","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/NeuralNetworkClassifier_MLJFlux/","page":"NeuralNetworkClassifier","title":"NeuralNetworkClassifier","text":"chain: The trained \"chain\" (Flux.jl model), namely the series of layers, functions, and activations which make up the neural network. This includes the final layer specified by finaliser (eg, softmax).","category":"page"},{"location":"models/NeuralNetworkClassifier_MLJFlux/#Report","page":"NeuralNetworkClassifier","title":"Report","text":"","category":"section"},{"location":"models/NeuralNetworkClassifier_MLJFlux/","page":"NeuralNetworkClassifier","title":"NeuralNetworkClassifier","text":"The fields of report(mach) are:","category":"page"},{"location":"models/NeuralNetworkClassifier_MLJFlux/","page":"NeuralNetworkClassifier","title":"NeuralNetworkClassifier","text":"training_losses: A vector of training losses (penalised if lambda != 0) in historical order, of length epochs + 1. The first element is the pre-training loss.","category":"page"},{"location":"models/NeuralNetworkClassifier_MLJFlux/#Examples","page":"NeuralNetworkClassifier","title":"Examples","text":"","category":"section"},{"location":"models/NeuralNetworkClassifier_MLJFlux/","page":"NeuralNetworkClassifier","title":"NeuralNetworkClassifier","text":"In this example we build a classification model using the Iris dataset. This is a very basic example, using a default builder and no standardization. For a more advanced illustration, see NeuralNetworkRegressor or ImageClassifier, and examples in the MLJFlux.jl documentation.","category":"page"},{"location":"models/NeuralNetworkClassifier_MLJFlux/","page":"NeuralNetworkClassifier","title":"NeuralNetworkClassifier","text":"using MLJ\nusing Flux\nimport RDatasets","category":"page"},{"location":"models/NeuralNetworkClassifier_MLJFlux/","page":"NeuralNetworkClassifier","title":"NeuralNetworkClassifier","text":"First, we can load the data:","category":"page"},{"location":"models/NeuralNetworkClassifier_MLJFlux/","page":"NeuralNetworkClassifier","title":"NeuralNetworkClassifier","text":"iris = RDatasets.dataset(\"datasets\", \"iris\");\ny, X = unpack(iris, ==(:Species), rng=123); ## a vector and a table\nNeuralNetworkClassifier = @load NeuralNetworkClassifier pkg=MLJFlux\nclf = NeuralNetworkClassifier()","category":"page"},{"location":"models/NeuralNetworkClassifier_MLJFlux/","page":"NeuralNetworkClassifier","title":"NeuralNetworkClassifier","text":"Next, we can train the model:","category":"page"},{"location":"models/NeuralNetworkClassifier_MLJFlux/","page":"NeuralNetworkClassifier","title":"NeuralNetworkClassifier","text":"mach = machine(clf, X, y)\nfit!(mach)","category":"page"},{"location":"models/NeuralNetworkClassifier_MLJFlux/","page":"NeuralNetworkClassifier","title":"NeuralNetworkClassifier","text":"We can train the model in an incremental fashion, altering the learning rate as we go, provided optimizer_changes_trigger_retraining is false (the default). Here, we also change the number of (total) iterations:","category":"page"},{"location":"models/NeuralNetworkClassifier_MLJFlux/","page":"NeuralNetworkClassifier","title":"NeuralNetworkClassifier","text":"clf.optimiser.eta = clf.optimiser.eta * 2\nclf.epochs = clf.epochs + 5\n\nfit!(mach, verbosity=2) ## trains 5 more epochs","category":"page"},{"location":"models/NeuralNetworkClassifier_MLJFlux/","page":"NeuralNetworkClassifier","title":"NeuralNetworkClassifier","text":"We can inspect the mean training loss using the cross_entropy function:","category":"page"},{"location":"models/NeuralNetworkClassifier_MLJFlux/","page":"NeuralNetworkClassifier","title":"NeuralNetworkClassifier","text":"training_loss = cross_entropy(predict(mach, X), y) |> mean","category":"page"},{"location":"models/NeuralNetworkClassifier_MLJFlux/","page":"NeuralNetworkClassifier","title":"NeuralNetworkClassifier","text":"And we can access the Flux chain (model) using fitted_params:","category":"page"},{"location":"models/NeuralNetworkClassifier_MLJFlux/","page":"NeuralNetworkClassifier","title":"NeuralNetworkClassifier","text":"chain = fitted_params(mach).chain","category":"page"},{"location":"models/NeuralNetworkClassifier_MLJFlux/","page":"NeuralNetworkClassifier","title":"NeuralNetworkClassifier","text":"Finally, we can see how the out-of-sample performance changes over time, using MLJ's learning_curve function:","category":"page"},{"location":"models/NeuralNetworkClassifier_MLJFlux/","page":"NeuralNetworkClassifier","title":"NeuralNetworkClassifier","text":"r = range(clf, :epochs, lower=1, upper=200, scale=:log10)\ncurve = learning_curve(clf, X, y,\n range=r,\n resampling=Holdout(fraction_train=0.7),\n measure=cross_entropy)\nusing Plots\nplot(curve.parameter_values,\n curve.measurements,\n xlab=curve.parameter_name,\n xscale=curve.parameter_scale,\n ylab = \"Cross Entropy\")\n","category":"page"},{"location":"models/NeuralNetworkClassifier_MLJFlux/","page":"NeuralNetworkClassifier","title":"NeuralNetworkClassifier","text":"See also ImageClassifier.","category":"page"},{"location":"models/HBOSDetector_OutlierDetectionPython/#HBOSDetector_OutlierDetectionPython","page":"HBOSDetector","title":"HBOSDetector","text":"","category":"section"},{"location":"models/HBOSDetector_OutlierDetectionPython/","page":"HBOSDetector","title":"HBOSDetector","text":"HBOSDetector(n_bins = 10,\n alpha = 0.1,\n tol = 0.5)","category":"page"},{"location":"models/HBOSDetector_OutlierDetectionPython/","page":"HBOSDetector","title":"HBOSDetector","text":"https://pyod.readthedocs.io/en/latest/pyod.models.html#module-pyod.models.hbos","category":"page"},{"location":"models/DBSCAN_Clustering/#DBSCAN_Clustering","page":"DBSCAN","title":"DBSCAN","text":"","category":"section"},{"location":"models/DBSCAN_Clustering/","page":"DBSCAN","title":"DBSCAN","text":"DBSCAN","category":"page"},{"location":"models/DBSCAN_Clustering/","page":"DBSCAN","title":"DBSCAN","text":"A model type for constructing a DBSCAN clusterer (density-based spatial clustering of applications with noise), based on Clustering.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/DBSCAN_Clustering/","page":"DBSCAN","title":"DBSCAN","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/DBSCAN_Clustering/","page":"DBSCAN","title":"DBSCAN","text":"DBSCAN = @load DBSCAN pkg=Clustering","category":"page"},{"location":"models/DBSCAN_Clustering/","page":"DBSCAN","title":"DBSCAN","text":"Do model = DBSCAN() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in DBSCAN(radius=...).","category":"page"},{"location":"models/DBSCAN_Clustering/","page":"DBSCAN","title":"DBSCAN","text":"DBSCAN is a clustering algorithm that groups together points that are closely packed together (points with many nearby neighbors), marking as outliers points that lie alone in low-density regions (whose nearest neighbors are too far away). More information is available at the Clustering.jl documentation. Use predict to get cluster assignments. Point types - core, boundary or noise - are accessed from the machine report (see below).","category":"page"},{"location":"models/DBSCAN_Clustering/","page":"DBSCAN","title":"DBSCAN","text":"This is a static implementation, i.e., it does not generalize to new data instances, and there is no training data. For clusterers that do generalize, see KMeans or KMedoids.","category":"page"},{"location":"models/DBSCAN_Clustering/","page":"DBSCAN","title":"DBSCAN","text":"In MLJ or MLJBase, create a machine with","category":"page"},{"location":"models/DBSCAN_Clustering/","page":"DBSCAN","title":"DBSCAN","text":"mach = machine(model)","category":"page"},{"location":"models/DBSCAN_Clustering/#Hyper-parameters","page":"DBSCAN","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/DBSCAN_Clustering/","page":"DBSCAN","title":"DBSCAN","text":"radius=1.0: query radius.\nleafsize=20: number of points binned in each leaf node of the nearest neighbor k-d tree.\nmin_neighbors=1: minimum number of a core point neighbors.\nmin_cluster_size=1: minimum number of points in a valid cluster.","category":"page"},{"location":"models/DBSCAN_Clustering/#Operations","page":"DBSCAN","title":"Operations","text":"","category":"section"},{"location":"models/DBSCAN_Clustering/","page":"DBSCAN","title":"DBSCAN","text":"predict(mach, X): return cluster label assignments, as an unordered CategoricalVector. Here X is any table of input features (eg, a DataFrame) whose columns are of scitype Continuous; check column scitypes with schema(X). Note that points of type noise will always get a label of 0.","category":"page"},{"location":"models/DBSCAN_Clustering/#Report","page":"DBSCAN","title":"Report","text":"","category":"section"},{"location":"models/DBSCAN_Clustering/","page":"DBSCAN","title":"DBSCAN","text":"After calling predict(mach), the fields of report(mach) are:","category":"page"},{"location":"models/DBSCAN_Clustering/","page":"DBSCAN","title":"DBSCAN","text":"point_types: A CategoricalVector with the DBSCAN point type classification, one element per row of X. Elements are either 'C' (core), 'B' (boundary), or 'N' (noise).\nnclusters: The number of clusters (excluding the noise \"cluster\")\ncluster_labels: The unique list of cluster labels\nclusters: A vector of Clustering.DbscanCluster objects from Clustering.jl, which have these fields:\nsize: number of points in a cluster (core + boundary)\ncore_indices: indices of points in the cluster core\nboundary_indices: indices of points on the cluster boundary","category":"page"},{"location":"models/DBSCAN_Clustering/#Examples","page":"DBSCAN","title":"Examples","text":"","category":"section"},{"location":"models/DBSCAN_Clustering/","page":"DBSCAN","title":"DBSCAN","text":"using MLJ\n\nX, labels = make_moons(400, noise=0.09, rng=1) ## synthetic data with 2 clusters; X\ny = map(labels) do label\n label == 0 ? \"cookie\" : \"monster\"\nend;\ny = coerce(y, Multiclass);\n\nDBSCAN = @load DBSCAN pkg=Clustering\nmodel = DBSCAN(radius=0.13, min_cluster_size=5)\nmach = machine(model)\n\n## compute and output cluster assignments for observations in `X`:\nyhat = predict(mach, X)\n\n## get DBSCAN point types:\nreport(mach).point_types\nreport(mach).nclusters\n\n## compare cluster labels with actual labels:\ncompare = zip(yhat, y) |> collect;\ncompare[1:10] ## clusters align with classes\n\n## visualize clusters, noise in red:\npoints = zip(X.x1, X.x2) |> collect\ncolors = map(yhat) do i\n i == 0 ? :red :\n i == 1 ? :blue :\n i == 2 ? :green :\n i == 3 ? :yellow :\n :black\nend\nusing Plots\nscatter(points, color=colors)","category":"page"},{"location":"glossary/#Glossary","page":"Glossary","title":"Glossary","text":"","category":"section"},{"location":"glossary/","page":"Glossary","title":"Glossary","text":"Note: This glossary includes some detail intended mainly for MLJ developers.","category":"page"},{"location":"glossary/#Basics","page":"Glossary","title":"Basics","text":"","category":"section"},{"location":"glossary/#hyperparameters","page":"Glossary","title":"hyperparameters","text":"","category":"section"},{"location":"glossary/","page":"Glossary","title":"Glossary","text":"Parameters on which some learning algorithm depends, specified before the algorithm is applied, and where learning is interpreted in the broadest sense. For example, PCA feature reduction is a \"preprocessing\" transformation \"learning\" a projection from training data, governed by a dimension hyperparameter. Hyperparameters in our sense may specify configuration (eg, number of parallel processes) even when this does not affect the end-product of learning. (But we exclude verbosity level.)","category":"page"},{"location":"glossary/#model-(object-of-abstract-type-Model)","page":"Glossary","title":"model (object of abstract type Model)","text":"","category":"section"},{"location":"glossary/","page":"Glossary","title":"Glossary","text":"Object collecting together hyperpameters of a single algorithm. Models are classified either as supervised or unsupervised models (eg, \"transformers\"), with corresponding subtypes Supervised <: Model and Unsupervised <: Model.","category":"page"},{"location":"glossary/#fitresult-(type-generally-defined-outside-of-MLJ)","page":"Glossary","title":"fitresult (type generally defined outside of MLJ)","text":"","category":"section"},{"location":"glossary/","page":"Glossary","title":"Glossary","text":"Also known as \"learned\" or \"fitted\" parameters, these are \"weights\", \"coefficients\", or similar parameters learned by an algorithm, after adopting the prescribed hyper-parameters. For example, decision trees of a random forest, the coefficients and intercept of a linear model, or the projection matrices of a PCA dimension-reduction algorithm.","category":"page"},{"location":"glossary/#operation","page":"Glossary","title":"operation","text":"","category":"section"},{"location":"glossary/","page":"Glossary","title":"Glossary","text":"Data-manipulating operations (methods) using some fitresult. For supervised learners, the predict, predict_mean, predict_median, or predict_mode methods; for transformers, the transform or inverse_transform method. An operation may also refer to an ordinary data-manipulating method that does not depend on a fit-result (e.g., a broadcasted logarithm) which is then called static operation for clarity. An operation that is not static is dynamic.","category":"page"},{"location":"glossary/#machine-(object-of-type-Machine)","page":"Glossary","title":"machine (object of type Machine)","text":"","category":"section"},{"location":"glossary/","page":"Glossary","title":"Glossary","text":"An object consisting of:","category":"page"},{"location":"glossary/","page":"Glossary","title":"Glossary","text":"(1) A model","category":"page"},{"location":"glossary/","page":"Glossary","title":"Glossary","text":"(2) A fit-result (undefined until training)","category":"page"},{"location":"glossary/","page":"Glossary","title":"Glossary","text":"(3) Training arguments (one for each data argument of the model's associated fit method). A training argument is data used for training (subsampled by specifying rows=... in fit!) but also in evaluation (subsampled by specifying rows=... in predict, predict_mean, etc). Generally, there are two training arguments for supervised models, and just one for unsupervised models. Each argument is either a Source node, wrapping concrete data supplied to the machine constructor, or a Node, in the case of a learning network (see below). Both kinds of nodes can be called with an optional rows=... keyword argument to (lazily) return concrete data.","category":"page"},{"location":"glossary/","page":"Glossary","title":"Glossary","text":"In addition, machines store \"report\" metadata, for recording algorithm-specific statistics of training (eg, an internal estimate of generalization error, feature importances); and they cache information allowing the fit-result to be updated without repeating unnecessary information.","category":"page"},{"location":"glossary/","page":"Glossary","title":"Glossary","text":"Machines are trained by calls to a fit! method which may be passed an optional argument specifying the rows of data to be used in training.","category":"page"},{"location":"glossary/","page":"Glossary","title":"Glossary","text":"For more, see the Machines section.","category":"page"},{"location":"glossary/#Learning-Networks-and-Composite-Models","page":"Glossary","title":"Learning Networks and Composite Models","text":"","category":"section"},{"location":"glossary/","page":"Glossary","title":"Glossary","text":"Note: Multiple machines in a learning network may share the same model, and multiple learning nodes may share the same machine.","category":"page"},{"location":"glossary/#source-node-(object-of-type-Source)","page":"Glossary","title":"source node (object of type Source)","text":"","category":"section"},{"location":"glossary/","page":"Glossary","title":"Glossary","text":"A container for training data and point of entry for new data in a learning network (see below).","category":"page"},{"location":"glossary/#node-(object-of-type-Node)","page":"Glossary","title":"node (object of type Node)","text":"","category":"section"},{"location":"glossary/","page":"Glossary","title":"Glossary","text":"Essentially a machine (whose arguments are possibly other nodes) wrapped in an associated operation (e.g., predict or inverse_transform). It consists primarily of:","category":"page"},{"location":"glossary/","page":"Glossary","title":"Glossary","text":"An operation, static or dynamic.\nA machine, or nothing if the operation is static.\nUpstream connections to other nodes, specified by a list of arguments (one for each argument of the operation). These are the arguments on which the operation \"acts\" when the node N is called, as in N().","category":"page"},{"location":"glossary/#learning-network","page":"Glossary","title":"learning network","text":"","category":"section"},{"location":"glossary/","page":"Glossary","title":"Glossary","text":"A directed acyclic graph implicit in the connections of a collection of source(s) and nodes. ","category":"page"},{"location":"glossary/#wrapper","page":"Glossary","title":"wrapper","text":"","category":"section"},{"location":"glossary/","page":"Glossary","title":"Glossary","text":"Any model with one or more other models as hyper-parameters.","category":"page"},{"location":"glossary/#composite-model","page":"Glossary","title":"composite model","text":"","category":"section"},{"location":"glossary/","page":"Glossary","title":"Glossary","text":"Any wrapper, or any learning network, \"exported\" as a model (see Composing Models).","category":"page"},{"location":"models/ProbabilisticSGDClassifier_MLJScikitLearnInterface/#ProbabilisticSGDClassifier_MLJScikitLearnInterface","page":"ProbabilisticSGDClassifier","title":"ProbabilisticSGDClassifier","text":"","category":"section"},{"location":"models/ProbabilisticSGDClassifier_MLJScikitLearnInterface/","page":"ProbabilisticSGDClassifier","title":"ProbabilisticSGDClassifier","text":"ProbabilisticSGDClassifier","category":"page"},{"location":"models/ProbabilisticSGDClassifier_MLJScikitLearnInterface/","page":"ProbabilisticSGDClassifier","title":"ProbabilisticSGDClassifier","text":"A model type for constructing a probabilistic sgd classifier, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/ProbabilisticSGDClassifier_MLJScikitLearnInterface/","page":"ProbabilisticSGDClassifier","title":"ProbabilisticSGDClassifier","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/ProbabilisticSGDClassifier_MLJScikitLearnInterface/","page":"ProbabilisticSGDClassifier","title":"ProbabilisticSGDClassifier","text":"ProbabilisticSGDClassifier = @load ProbabilisticSGDClassifier pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/ProbabilisticSGDClassifier_MLJScikitLearnInterface/","page":"ProbabilisticSGDClassifier","title":"ProbabilisticSGDClassifier","text":"Do model = ProbabilisticSGDClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in ProbabilisticSGDClassifier(loss=...).","category":"page"},{"location":"models/ProbabilisticSGDClassifier_MLJScikitLearnInterface/#Hyper-parameters","page":"ProbabilisticSGDClassifier","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/ProbabilisticSGDClassifier_MLJScikitLearnInterface/","page":"ProbabilisticSGDClassifier","title":"ProbabilisticSGDClassifier","text":"loss = log_loss\npenalty = l2\nalpha = 0.0001\nl1_ratio = 0.15\nfit_intercept = true\nmax_iter = 1000\ntol = 0.001\nshuffle = true\nverbose = 0\nepsilon = 0.1\nn_jobs = nothing\nrandom_state = nothing\nlearning_rate = optimal\neta0 = 0.0\npower_t = 0.5\nearly_stopping = false\nvalidation_fraction = 0.1\nn_iter_no_change = 5\nclass_weight = nothing\nwarm_start = false\naverage = false","category":"page"},{"location":"models/HuberRegressor_MLJScikitLearnInterface/#HuberRegressor_MLJScikitLearnInterface","page":"HuberRegressor","title":"HuberRegressor","text":"","category":"section"},{"location":"models/HuberRegressor_MLJScikitLearnInterface/","page":"HuberRegressor","title":"HuberRegressor","text":"HuberRegressor","category":"page"},{"location":"models/HuberRegressor_MLJScikitLearnInterface/","page":"HuberRegressor","title":"HuberRegressor","text":"A model type for constructing a Huber regressor, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/HuberRegressor_MLJScikitLearnInterface/","page":"HuberRegressor","title":"HuberRegressor","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/HuberRegressor_MLJScikitLearnInterface/","page":"HuberRegressor","title":"HuberRegressor","text":"HuberRegressor = @load HuberRegressor pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/HuberRegressor_MLJScikitLearnInterface/","page":"HuberRegressor","title":"HuberRegressor","text":"Do model = HuberRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in HuberRegressor(epsilon=...).","category":"page"},{"location":"models/HuberRegressor_MLJScikitLearnInterface/#Hyper-parameters","page":"HuberRegressor","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/HuberRegressor_MLJScikitLearnInterface/","page":"HuberRegressor","title":"HuberRegressor","text":"epsilon = 1.35\nmax_iter = 100\nalpha = 0.0001\nwarm_start = false\nfit_intercept = true\ntol = 1.0e-5","category":"page"},{"location":"models/KPLSRegressor_PartialLeastSquaresRegressor/#KPLSRegressor_PartialLeastSquaresRegressor","page":"KPLSRegressor","title":"KPLSRegressor","text":"","category":"section"},{"location":"models/KPLSRegressor_PartialLeastSquaresRegressor/","page":"KPLSRegressor","title":"KPLSRegressor","text":"A Kernel Partial Least Squares Regressor. A Kernel PLS2 NIPALS algorithms. Can be used mainly for regression.","category":"page"},{"location":"models/EpsilonSVR_LIBSVM/#EpsilonSVR_LIBSVM","page":"EpsilonSVR","title":"EpsilonSVR","text":"","category":"section"},{"location":"models/EpsilonSVR_LIBSVM/","page":"EpsilonSVR","title":"EpsilonSVR","text":"EpsilonSVR","category":"page"},{"location":"models/EpsilonSVR_LIBSVM/","page":"EpsilonSVR","title":"EpsilonSVR","text":"A model type for constructing a ϵ-support vector regressor, based on LIBSVM.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/EpsilonSVR_LIBSVM/","page":"EpsilonSVR","title":"EpsilonSVR","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/EpsilonSVR_LIBSVM/","page":"EpsilonSVR","title":"EpsilonSVR","text":"EpsilonSVR = @load EpsilonSVR pkg=LIBSVM","category":"page"},{"location":"models/EpsilonSVR_LIBSVM/","page":"EpsilonSVR","title":"EpsilonSVR","text":"Do model = EpsilonSVR() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in EpsilonSVR(kernel=...).","category":"page"},{"location":"models/EpsilonSVR_LIBSVM/","page":"EpsilonSVR","title":"EpsilonSVR","text":"Reference for algorithm and core C-library: C.-C. Chang and C.-J. Lin (2011): \"LIBSVM: a library for support vector machines.\" ACM Transactions on Intelligent Systems and Technology, 2(3):27:1–27:27. Updated at https://www.csie.ntu.edu.tw/~cjlin/papers/libsvm.pdf. ","category":"page"},{"location":"models/EpsilonSVR_LIBSVM/","page":"EpsilonSVR","title":"EpsilonSVR","text":"This model is an adaptation of the classifier SVC to regression, but has an additional parameter epsilon (denoted ϵ in the cited reference).","category":"page"},{"location":"models/EpsilonSVR_LIBSVM/#Training-data","page":"EpsilonSVR","title":"Training data","text":"","category":"section"},{"location":"models/EpsilonSVR_LIBSVM/","page":"EpsilonSVR","title":"EpsilonSVR","text":"In MLJ or MLJBase, bind an instance model to data with:","category":"page"},{"location":"models/EpsilonSVR_LIBSVM/","page":"EpsilonSVR","title":"EpsilonSVR","text":"mach = machine(model, X, y)","category":"page"},{"location":"models/EpsilonSVR_LIBSVM/","page":"EpsilonSVR","title":"EpsilonSVR","text":"where","category":"page"},{"location":"models/EpsilonSVR_LIBSVM/","page":"EpsilonSVR","title":"EpsilonSVR","text":"X: any table of input features (eg, a DataFrame) whose columns each have Continuous element scitype; check column scitypes with schema(X)\ny: is the target, which can be any AbstractVector whose element scitype is Continuous; check the scitype with scitype(y)","category":"page"},{"location":"models/EpsilonSVR_LIBSVM/","page":"EpsilonSVR","title":"EpsilonSVR","text":"Train the machine using fit!(mach, rows=...).","category":"page"},{"location":"models/EpsilonSVR_LIBSVM/#Hyper-parameters","page":"EpsilonSVR","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/EpsilonSVR_LIBSVM/","page":"EpsilonSVR","title":"EpsilonSVR","text":"kernel=LIBSVM.Kernel.RadialBasis: either an object that can be called, as in kernel(x1, x2), or one of the built-in kernels from the LIBSVM.jl package listed below. Here x1 and x2 are vectors whose lengths match the number of columns of the training data X (see \"Examples\" below).\nLIBSVM.Kernel.Linear: (x1, x2) -> x1'*x2\nLIBSVM.Kernel.Polynomial: (x1, x2) -> gamma*x1'*x2 + coef0)^degree\nLIBSVM.Kernel.RadialBasis: (x1, x2) -> (exp(-gamma*norm(x1 - x2)^2))\nLIBSVM.Kernel.Sigmoid: (x1, x2) - > tanh(gamma*x1'*x2 + coef0)\nHere gamma, coef0, degree are other hyper-parameters. Serialization of models with user-defined kernels comes with some restrictions. See LIVSVM.jl issue91\ngamma = 0.0: kernel parameter (see above); if gamma==-1.0 then gamma = 1/nfeatures is used in training, where nfeatures is the number of features (columns of X). If gamma==0.0 then gamma = 1/(var(Tables.matrix(X))*nfeatures) is used. Actual value used appears in the report (see below).\ncoef0 = 0.0: kernel parameter (see above)\ndegree::Int32 = Int32(3): degree in polynomial kernel (see above)\ncost=1.0 (range (0, Inf)): the parameter denoted C in the cited reference; for greater regularization, decrease cost\nepsilon=0.1 (range (0, Inf)): the parameter denoted ϵ in the cited reference; epsilon is the thickness of the penalty-free neighborhood of the graph of the prediction function (\"slab\" or \"tube\"). Specifically, a data point (x, y) incurs no training loss unless it is outside this neighborhood; the further away it is from the this neighborhood, the greater the loss penalty.\ncachesize=200.0 cache memory size in MB\ntolerance=0.001: tolerance for the stopping criterion\nshrinking=true: whether to use shrinking heuristics","category":"page"},{"location":"models/EpsilonSVR_LIBSVM/#Operations","page":"EpsilonSVR","title":"Operations","text":"","category":"section"},{"location":"models/EpsilonSVR_LIBSVM/","page":"EpsilonSVR","title":"EpsilonSVR","text":"predict(mach, Xnew): return predictions of the target given features Xnew having the same scitype as X above.","category":"page"},{"location":"models/EpsilonSVR_LIBSVM/#Fitted-parameters","page":"EpsilonSVR","title":"Fitted parameters","text":"","category":"section"},{"location":"models/EpsilonSVR_LIBSVM/","page":"EpsilonSVR","title":"EpsilonSVR","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/EpsilonSVR_LIBSVM/","page":"EpsilonSVR","title":"EpsilonSVR","text":"libsvm_model: the trained model object created by the LIBSVM.jl package","category":"page"},{"location":"models/EpsilonSVR_LIBSVM/#Report","page":"EpsilonSVR","title":"Report","text":"","category":"section"},{"location":"models/EpsilonSVR_LIBSVM/","page":"EpsilonSVR","title":"EpsilonSVR","text":"The fields of report(mach) are:","category":"page"},{"location":"models/EpsilonSVR_LIBSVM/","page":"EpsilonSVR","title":"EpsilonSVR","text":"gamma: actual value of the kernel parameter gamma used in training","category":"page"},{"location":"models/EpsilonSVR_LIBSVM/#Examples","page":"EpsilonSVR","title":"Examples","text":"","category":"section"},{"location":"models/EpsilonSVR_LIBSVM/#Using-a-built-in-kernel","page":"EpsilonSVR","title":"Using a built-in kernel","text":"","category":"section"},{"location":"models/EpsilonSVR_LIBSVM/","page":"EpsilonSVR","title":"EpsilonSVR","text":"using MLJ\nimport LIBSVM\n\nEpsilonSVR = @load EpsilonSVR pkg=LIBSVM ## model type\nmodel = EpsilonSVR(kernel=LIBSVM.Kernel.Polynomial) ## instance\n\nX, y = make_regression(rng=123) ## table, vector\nmach = machine(model, X, y) |> fit!\n\nXnew, _ = make_regression(3, rng=123)\n\njulia> yhat = predict(mach, Xnew)\n3-element Vector{Float64}:\n 0.2512132502584155\n 0.007340201523624579\n -0.2482949812264707","category":"page"},{"location":"models/EpsilonSVR_LIBSVM/#User-defined-kernels","page":"EpsilonSVR","title":"User-defined kernels","text":"","category":"section"},{"location":"models/EpsilonSVR_LIBSVM/","page":"EpsilonSVR","title":"EpsilonSVR","text":"k(x1, x2) = x1'*x2 ## equivalent to `LIBSVM.Kernel.Linear`\nmodel = EpsilonSVR(kernel=k)\nmach = machine(model, X, y) |> fit!\n\njulia> yhat = predict(mach, Xnew)\n3-element Vector{Float64}:\n 1.1121225361666656\n 0.04667702229741916\n -0.6958148424680672","category":"page"},{"location":"models/EpsilonSVR_LIBSVM/","page":"EpsilonSVR","title":"EpsilonSVR","text":"See also NuSVR, LIVSVM.jl and the original C implementation documentation.","category":"page"},{"location":"models/EvoSplineRegressor_EvoLinear/#EvoSplineRegressor_EvoLinear","page":"EvoSplineRegressor","title":"EvoSplineRegressor","text":"","category":"section"},{"location":"models/EvoSplineRegressor_EvoLinear/","page":"EvoSplineRegressor","title":"EvoSplineRegressor","text":"EvoSplineRegressor(; kwargs...)","category":"page"},{"location":"models/EvoSplineRegressor_EvoLinear/","page":"EvoSplineRegressor","title":"EvoSplineRegressor","text":"A model type for constructing a EvoSplineRegressor, based on EvoLinear.jl, and implementing both an internal API and the MLJ model interface.","category":"page"},{"location":"models/EvoSplineRegressor_EvoLinear/#Keyword-arguments","page":"EvoSplineRegressor","title":"Keyword arguments","text":"","category":"section"},{"location":"models/EvoSplineRegressor_EvoLinear/","page":"EvoSplineRegressor","title":"EvoSplineRegressor","text":"loss=:mse: loss function to be minimised. Can be one of:\n:mse\n:logistic\n:poisson\n:gamma\n:tweedie\nnrounds=10: maximum number of training rounds.\neta=1: Learning rate. Typically in the range [1e-2, 1].\nL1=0: Regularization penalty applied by shrinking to 0 weight update if update is < L1. No penalty if update > L1. Results in sparse feature selection. Typically in the [0, 1] range on normalized features.\nL2=0: Regularization penalty applied to the squared of the weight update value. Restricts large parameter values. Typically in the [0, 1] range on normalized features.\nrng=123: random seed. Not used at the moment.\nupdater=:all: training method. Only :all is supported at the moment. Gradients for each feature are computed simultaneously, then bias is updated based on all features update.\ndevice=:cpu: Only :cpu is supported at the moment.","category":"page"},{"location":"models/EvoSplineRegressor_EvoLinear/#Internal-API","page":"EvoSplineRegressor","title":"Internal API","text":"","category":"section"},{"location":"models/EvoSplineRegressor_EvoLinear/","page":"EvoSplineRegressor","title":"EvoSplineRegressor","text":"Do config = EvoSplineRegressor() to construct an hyper-parameter struct with default hyper-parameters. Provide keyword arguments as listed above to override defaults, for example:","category":"page"},{"location":"models/EvoSplineRegressor_EvoLinear/","page":"EvoSplineRegressor","title":"EvoSplineRegressor","text":"EvoSplineRegressor(loss=:logistic, L1=1e-3, L2=1e-2, nrounds=100)","category":"page"},{"location":"models/EvoSplineRegressor_EvoLinear/#Training-model","page":"EvoSplineRegressor","title":"Training model","text":"","category":"section"},{"location":"models/EvoSplineRegressor_EvoLinear/","page":"EvoSplineRegressor","title":"EvoSplineRegressor","text":"A model is built using fit:","category":"page"},{"location":"models/EvoSplineRegressor_EvoLinear/","page":"EvoSplineRegressor","title":"EvoSplineRegressor","text":"config = EvoSplineRegressor()\nm = fit(config; x, y, w)","category":"page"},{"location":"models/EvoSplineRegressor_EvoLinear/#Inference","page":"EvoSplineRegressor","title":"Inference","text":"","category":"section"},{"location":"models/EvoSplineRegressor_EvoLinear/","page":"EvoSplineRegressor","title":"EvoSplineRegressor","text":"Fitted results is an EvoLinearModel which acts as a prediction function when passed a features matrix as argument. ","category":"page"},{"location":"models/EvoSplineRegressor_EvoLinear/","page":"EvoSplineRegressor","title":"EvoSplineRegressor","text":"preds = m(x)","category":"page"},{"location":"models/EvoSplineRegressor_EvoLinear/#MLJ-Interface","page":"EvoSplineRegressor","title":"MLJ Interface","text":"","category":"section"},{"location":"models/EvoSplineRegressor_EvoLinear/","page":"EvoSplineRegressor","title":"EvoSplineRegressor","text":"From MLJ, the type can be imported using:","category":"page"},{"location":"models/EvoSplineRegressor_EvoLinear/","page":"EvoSplineRegressor","title":"EvoSplineRegressor","text":"EvoSplineRegressor = @load EvoSplineRegressor pkg=EvoLinear","category":"page"},{"location":"models/EvoSplineRegressor_EvoLinear/","page":"EvoSplineRegressor","title":"EvoSplineRegressor","text":"Do model = EvoLinearRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in EvoSplineRegressor(loss=...).","category":"page"},{"location":"models/EvoSplineRegressor_EvoLinear/#Training-model-2","page":"EvoSplineRegressor","title":"Training model","text":"","category":"section"},{"location":"models/EvoSplineRegressor_EvoLinear/","page":"EvoSplineRegressor","title":"EvoSplineRegressor","text":"In MLJ or MLJBase, bind an instance model to data with mach = machine(model, X, y) where: ","category":"page"},{"location":"models/EvoSplineRegressor_EvoLinear/","page":"EvoSplineRegressor","title":"EvoSplineRegressor","text":"X: any table of input features (eg, a DataFrame) whose columns each have one of the following element scitypes: Continuous, Count, or <:OrderedFactor; check column scitypes with schema(X)\ny: is the target, which can be any AbstractVector whose element scitype is <:Continuous; check the scitype with scitype(y)","category":"page"},{"location":"models/EvoSplineRegressor_EvoLinear/","page":"EvoSplineRegressor","title":"EvoSplineRegressor","text":"Train the machine using fit!(mach, rows=...).","category":"page"},{"location":"models/EvoSplineRegressor_EvoLinear/#Operations","page":"EvoSplineRegressor","title":"Operations","text":"","category":"section"},{"location":"models/EvoSplineRegressor_EvoLinear/","page":"EvoSplineRegressor","title":"EvoSplineRegressor","text":"predict(mach, Xnew): return predictions of the target given","category":"page"},{"location":"models/EvoSplineRegressor_EvoLinear/","page":"EvoSplineRegressor","title":"EvoSplineRegressor","text":"features Xnew having the same scitype as X above. Predictions are deterministic.","category":"page"},{"location":"models/EvoSplineRegressor_EvoLinear/#Fitted-parameters","page":"EvoSplineRegressor","title":"Fitted parameters","text":"","category":"section"},{"location":"models/EvoSplineRegressor_EvoLinear/","page":"EvoSplineRegressor","title":"EvoSplineRegressor","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/EvoSplineRegressor_EvoLinear/","page":"EvoSplineRegressor","title":"EvoSplineRegressor","text":":fitresult: the SplineModel object returned by EvoSplineRegressor fitting algorithm.","category":"page"},{"location":"models/EvoSplineRegressor_EvoLinear/#Report","page":"EvoSplineRegressor","title":"Report","text":"","category":"section"},{"location":"models/EvoSplineRegressor_EvoLinear/","page":"EvoSplineRegressor","title":"EvoSplineRegressor","text":"The fields of report(mach) are:","category":"page"},{"location":"models/EvoSplineRegressor_EvoLinear/","page":"EvoSplineRegressor","title":"EvoSplineRegressor","text":":coef: Vector of coefficients (βs) associated to each of the features.\n:bias: Value of the bias.\n:names: Names of each of the features.","category":"page"},{"location":"models/RandomForestRegressor_BetaML/#RandomForestRegressor_BetaML","page":"RandomForestRegressor","title":"RandomForestRegressor","text":"","category":"section"},{"location":"models/RandomForestRegressor_BetaML/","page":"RandomForestRegressor","title":"RandomForestRegressor","text":"mutable struct RandomForestRegressor <: MLJModelInterface.Deterministic","category":"page"},{"location":"models/RandomForestRegressor_BetaML/","page":"RandomForestRegressor","title":"RandomForestRegressor","text":"A simple Random Forest model for regression with support for Missing data, from the Beta Machine Learning Toolkit (BetaML).","category":"page"},{"location":"models/RandomForestRegressor_BetaML/#Hyperparameters:","page":"RandomForestRegressor","title":"Hyperparameters:","text":"","category":"section"},{"location":"models/RandomForestRegressor_BetaML/","page":"RandomForestRegressor","title":"RandomForestRegressor","text":"n_trees::Int64: Number of (decision) trees in the forest [def: 30]\nmax_depth::Int64: The maximum depth the tree is allowed to reach. When this is reached the node is forced to become a leaf [def: 0, i.e. no limits]\nmin_gain::Float64: The minimum information gain to allow for a node's partition [def: 0]\nmin_records::Int64: The minimum number of records a node must holds to consider for a partition of it [def: 2]\nmax_features::Int64: The maximum number of (random) features to consider at each partitioning [def: 0, i.e. square root of the data dimension]\nsplitting_criterion::Function: This is the name of the function to be used to compute the information gain of a specific partition. This is done by measuring the difference betwwen the \"impurity\" of the labels of the parent node with those of the two child nodes, weighted by the respective number of items. [def: variance]. Either variance or a custom function. It can also be an anonymous function.\nβ::Float64: Parameter that regulate the weights of the scoring of each tree, to be (optionally) used in prediction based on the error of the individual trees computed on the records on which trees have not been trained. Higher values favour \"better\" trees, but too high values will cause overfitting [def: 0, i.e. uniform weigths]\nrng::Random.AbstractRNG: A Random Number Generator to be used in stochastic parts of the code [deafult: Random.GLOBAL_RNG]","category":"page"},{"location":"models/RandomForestRegressor_BetaML/#Example:","page":"RandomForestRegressor","title":"Example:","text":"","category":"section"},{"location":"models/RandomForestRegressor_BetaML/","page":"RandomForestRegressor","title":"RandomForestRegressor","text":"julia> using MLJ\n\njulia> X, y = @load_boston;\n\njulia> modelType = @load RandomForestRegressor pkg = \"BetaML\" verbosity=0\nBetaML.Trees.RandomForestRegressor\n\njulia> model = modelType()\nRandomForestRegressor(\n n_trees = 30, \n max_depth = 0, \n min_gain = 0.0, \n min_records = 2, \n max_features = 0, \n splitting_criterion = BetaML.Utils.variance, \n β = 0.0, \n rng = Random._GLOBAL_RNG())\n\njulia> mach = machine(model, X, y);\n\njulia> fit!(mach);\n[ Info: Training machine(RandomForestRegressor(n_trees = 30, …), …).\n\njulia> ŷ = predict(mach, X);\n\njulia> hcat(y,ŷ)\n506×2 Matrix{Float64}:\n 24.0 25.8433\n 21.6 22.4317\n 34.7 35.5742\n 33.4 33.9233\n ⋮ \n 23.9 24.42\n 22.0 22.4433\n 11.9 15.5833","category":"page"},{"location":"models/KMeans_ParallelKMeans/#KMeans_ParallelKMeans","page":"KMeans","title":"KMeans","text":"","category":"section"},{"location":"models/KMeans_ParallelKMeans/","page":"KMeans","title":"KMeans","text":"Parallel & lightning fast implementation of all available variants of the KMeans clustering algorithm in native Julia. Compatible with Julia 1.3+","category":"page"},{"location":"models/BisectingKMeans_MLJScikitLearnInterface/#BisectingKMeans_MLJScikitLearnInterface","page":"BisectingKMeans","title":"BisectingKMeans","text":"","category":"section"},{"location":"models/BisectingKMeans_MLJScikitLearnInterface/","page":"BisectingKMeans","title":"BisectingKMeans","text":"BisectingKMeans","category":"page"},{"location":"models/BisectingKMeans_MLJScikitLearnInterface/","page":"BisectingKMeans","title":"BisectingKMeans","text":"A model type for constructing a bisecting k means, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/BisectingKMeans_MLJScikitLearnInterface/","page":"BisectingKMeans","title":"BisectingKMeans","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/BisectingKMeans_MLJScikitLearnInterface/","page":"BisectingKMeans","title":"BisectingKMeans","text":"BisectingKMeans = @load BisectingKMeans pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/BisectingKMeans_MLJScikitLearnInterface/","page":"BisectingKMeans","title":"BisectingKMeans","text":"Do model = BisectingKMeans() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in BisectingKMeans(n_clusters=...).","category":"page"},{"location":"models/BisectingKMeans_MLJScikitLearnInterface/","page":"BisectingKMeans","title":"BisectingKMeans","text":"Bisecting K-Means clustering.","category":"page"},{"location":"logging_workflows/#Logging-Workflows","page":"Logging Workflows","title":"Logging Workflows","text":"","category":"section"},{"location":"logging_workflows/#MLflow-integration","page":"Logging Workflows","title":"MLflow integration","text":"","category":"section"},{"location":"logging_workflows/","page":"Logging Workflows","title":"Logging Workflows","text":"MLflow is a popular, language-agnostic, tool for externally logging the outcomes of machine learning experiments, including those carried out using MLJ.","category":"page"},{"location":"logging_workflows/","page":"Logging Workflows","title":"Logging Workflows","text":"MLJ logging examples are given in the MLJFlow.jl documentation. MLJ includes and re-exports all the methods of MLJFlow.jl, so there is no need to import MLJFlow.jl if using MLJ.","category":"page"},{"location":"logging_workflows/","page":"Logging Workflows","title":"Logging Workflows","text":"warning: Warning\nMLJFlow.jl is a new package still under active development and should be regarded as experimental. At this time, breaking changes to MLJFlow.jl will not necessarily trigger new breaking releases of MLJ.jl.","category":"page"},{"location":"models/ComplementNBClassifier_MLJScikitLearnInterface/#ComplementNBClassifier_MLJScikitLearnInterface","page":"ComplementNBClassifier","title":"ComplementNBClassifier","text":"","category":"section"},{"location":"models/ComplementNBClassifier_MLJScikitLearnInterface/","page":"ComplementNBClassifier","title":"ComplementNBClassifier","text":"ComplementNBClassifier","category":"page"},{"location":"models/ComplementNBClassifier_MLJScikitLearnInterface/","page":"ComplementNBClassifier","title":"ComplementNBClassifier","text":"A model type for constructing a Complement naive Bayes classifier, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/ComplementNBClassifier_MLJScikitLearnInterface/","page":"ComplementNBClassifier","title":"ComplementNBClassifier","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/ComplementNBClassifier_MLJScikitLearnInterface/","page":"ComplementNBClassifier","title":"ComplementNBClassifier","text":"ComplementNBClassifier = @load ComplementNBClassifier pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/ComplementNBClassifier_MLJScikitLearnInterface/","page":"ComplementNBClassifier","title":"ComplementNBClassifier","text":"Do model = ComplementNBClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in ComplementNBClassifier(alpha=...).","category":"page"},{"location":"models/ComplementNBClassifier_MLJScikitLearnInterface/","page":"ComplementNBClassifier","title":"ComplementNBClassifier","text":"Similar to MultinomialNBClassifier but with more robust assumptions. Suited for imbalanced datasets.","category":"page"},{"location":"models/RobustRegressor_MLJLinearModels/#RobustRegressor_MLJLinearModels","page":"RobustRegressor","title":"RobustRegressor","text":"","category":"section"},{"location":"models/RobustRegressor_MLJLinearModels/","page":"RobustRegressor","title":"RobustRegressor","text":"RobustRegressor","category":"page"},{"location":"models/RobustRegressor_MLJLinearModels/","page":"RobustRegressor","title":"RobustRegressor","text":"A model type for constructing a robust regressor, based on MLJLinearModels.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/RobustRegressor_MLJLinearModels/","page":"RobustRegressor","title":"RobustRegressor","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/RobustRegressor_MLJLinearModels/","page":"RobustRegressor","title":"RobustRegressor","text":"RobustRegressor = @load RobustRegressor pkg=MLJLinearModels","category":"page"},{"location":"models/RobustRegressor_MLJLinearModels/","page":"RobustRegressor","title":"RobustRegressor","text":"Do model = RobustRegressor() to construct an instance with default hyper-parameters.","category":"page"},{"location":"models/RobustRegressor_MLJLinearModels/","page":"RobustRegressor","title":"RobustRegressor","text":"Robust regression is a linear model with objective function","category":"page"},{"location":"models/RobustRegressor_MLJLinearModels/","page":"RobustRegressor","title":"RobustRegressor","text":"$","category":"page"},{"location":"models/RobustRegressor_MLJLinearModels/","page":"RobustRegressor","title":"RobustRegressor","text":"∑ρ(Xθ - y) + n⋅λ|θ|₂² + n⋅γ|θ|₁ $","category":"page"},{"location":"models/RobustRegressor_MLJLinearModels/","page":"RobustRegressor","title":"RobustRegressor","text":"where ρ is a robust loss function (e.g. the Huber function) and n is the number of observations.","category":"page"},{"location":"models/RobustRegressor_MLJLinearModels/","page":"RobustRegressor","title":"RobustRegressor","text":"If scale_penalty_with_samples = false the objective function is instead","category":"page"},{"location":"models/RobustRegressor_MLJLinearModels/","page":"RobustRegressor","title":"RobustRegressor","text":"$","category":"page"},{"location":"models/RobustRegressor_MLJLinearModels/","page":"RobustRegressor","title":"RobustRegressor","text":"∑ρ(Xθ - y) + λ|θ|₂² + γ|θ|₁ $","category":"page"},{"location":"models/RobustRegressor_MLJLinearModels/","page":"RobustRegressor","title":"RobustRegressor","text":".","category":"page"},{"location":"models/RobustRegressor_MLJLinearModels/","page":"RobustRegressor","title":"RobustRegressor","text":"Different solver options exist, as indicated under \"Hyperparameters\" below. ","category":"page"},{"location":"models/RobustRegressor_MLJLinearModels/#Training-data","page":"RobustRegressor","title":"Training data","text":"","category":"section"},{"location":"models/RobustRegressor_MLJLinearModels/","page":"RobustRegressor","title":"RobustRegressor","text":"In MLJ or MLJBase, bind an instance model to data with","category":"page"},{"location":"models/RobustRegressor_MLJLinearModels/","page":"RobustRegressor","title":"RobustRegressor","text":"mach = machine(model, X, y)","category":"page"},{"location":"models/RobustRegressor_MLJLinearModels/","page":"RobustRegressor","title":"RobustRegressor","text":"where:","category":"page"},{"location":"models/RobustRegressor_MLJLinearModels/","page":"RobustRegressor","title":"RobustRegressor","text":"X is any table of input features (eg, a DataFrame) whose columns have Continuous scitype; check column scitypes with schema(X)\ny is the target, which can be any AbstractVector whose element scitype is Continuous; check the scitype with scitype(y)","category":"page"},{"location":"models/RobustRegressor_MLJLinearModels/","page":"RobustRegressor","title":"RobustRegressor","text":"Train the machine using fit!(mach, rows=...).","category":"page"},{"location":"models/RobustRegressor_MLJLinearModels/#Hyperparameters","page":"RobustRegressor","title":"Hyperparameters","text":"","category":"section"},{"location":"models/RobustRegressor_MLJLinearModels/","page":"RobustRegressor","title":"RobustRegressor","text":"rho::MLJLinearModels.RobustRho: the type of robust loss, which can be any instance of MLJLinearModels.L where L is one of: AndrewsRho, BisquareRho, FairRho, HuberRho, LogisticRho, QuantileRho, TalwarRho, HuberRho, TalwarRho. Default: HuberRho(0.1)\nlambda::Real: strength of the regularizer if penalty is :l2 or :l1. Strength of the L2 regularizer if penalty is :en. Default: 1.0\ngamma::Real: strength of the L1 regularizer if penalty is :en. Default: 0.0\npenalty::Union{String, Symbol}: the penalty to use, either :l2, :l1, :en (elastic net) or :none. Default: :l2\nfit_intercept::Bool: whether to fit the intercept or not. Default: true\npenalize_intercept::Bool: whether to penalize the intercept. Default: false\nscale_penalty_with_samples::Bool: whether to scale the penalty with the number of observations. Default: true\nsolver::Union{Nothing, MLJLinearModels.Solver}: some instance of MLJLinearModels.S where S is one of: LBFGS, IWLSCG, Newton, NewtonCG, if penalty = :l2, and ProxGrad otherwise.\nIf solver = nothing (default) then LBFGS() is used, if penalty = :l2, and otherwise ProxGrad(accel=true) (FISTA) is used.\nSolver aliases: FISTA(; kwargs...) = ProxGrad(accel=true, kwargs...), ISTA(; kwargs...) = ProxGrad(accel=false, kwargs...) Default: nothing","category":"page"},{"location":"models/RobustRegressor_MLJLinearModels/#Example","page":"RobustRegressor","title":"Example","text":"","category":"section"},{"location":"models/RobustRegressor_MLJLinearModels/","page":"RobustRegressor","title":"RobustRegressor","text":"using MLJ\nX, y = make_regression()\nmach = fit!(machine(RobustRegressor(), X, y))\npredict(mach, X)\nfitted_params(mach)","category":"page"},{"location":"models/RobustRegressor_MLJLinearModels/","page":"RobustRegressor","title":"RobustRegressor","text":"See also HuberRegressor, QuantileRegressor.","category":"page"},{"location":"controlling_iterative_models/#Controlling-Iterative-Models","page":"Controlling Iterative Models","title":"Controlling Iterative Models","text":"","category":"section"},{"location":"controlling_iterative_models/","page":"Controlling Iterative Models","title":"Controlling Iterative Models","text":"Iterative supervised machine learning models are usually trained until an out-of-sample estimate of the performance satisfies some stopping criterion, such as k consecutive deteriorations of the performance (see Patience below). A more sophisticated kind of control might dynamically mutate parameters, such as a learning rate, in response to the behavior of these estimates.","category":"page"},{"location":"controlling_iterative_models/","page":"Controlling Iterative Models","title":"Controlling Iterative Models","text":"Some iterative model implementations enable some form of automated control, with the method and options for doing so varying from model to model. But sometimes it is up to the user to arrange control, which in the crudest case reduces to manually experimenting with the iteration parameter.","category":"page"},{"location":"controlling_iterative_models/","page":"Controlling Iterative Models","title":"Controlling Iterative Models","text":"In response to this ad hoc state of affairs, MLJ provides a uniform and feature-rich interface for controlling any iterative model that exposes its iteration parameter as a hyper-parameter, and which implements the \"warm restart\" behavior described in Machines.","category":"page"},{"location":"controlling_iterative_models/#Basic-use","page":"Controlling Iterative Models","title":"Basic use","text":"","category":"section"},{"location":"controlling_iterative_models/","page":"Controlling Iterative Models","title":"Controlling Iterative Models","text":"As in Tuning Models, iteration control in MLJ is implemented as a model wrapper, which allows composition with other meta-algorithms. Ordinarily, the wrapped model behaves just like the original model, but with the training occurring on a subset of the provided data (to allow computation of an out-of-sample loss) and with the iteration parameter automatically determined by the controls specified in the wrapper.","category":"page"},{"location":"controlling_iterative_models/","page":"Controlling Iterative Models","title":"Controlling Iterative Models","text":"By setting retrain=true one can ask that the wrapped model retrain on all supplied data, after learning the appropriate number of iterations from the controlled training phase:","category":"page"},{"location":"controlling_iterative_models/","page":"Controlling Iterative Models","title":"Controlling Iterative Models","text":"using MLJ\n\nX, y = make_moons(100, rng=123, noise=0.5)\nEvoTreeClassifier = @load EvoTreeClassifier verbosity=0\n\niterated_model = IteratedModel(model=EvoTreeClassifier(rng=123, eta=0.005),\n resampling=Holdout(),\n measures=log_loss,\n controls=[Step(5),\n Patience(2),\n NumberLimit(100)],\n retrain=true)\n\nmach = machine(iterated_model, X, y)\nnothing # hide","category":"page"},{"location":"controlling_iterative_models/","page":"Controlling Iterative Models","title":"Controlling Iterative Models","text":"fit!(mach)","category":"page"},{"location":"controlling_iterative_models/","page":"Controlling Iterative Models","title":"Controlling Iterative Models","text":"As detailed under IteratedModel below, the specified controls are repeatedly applied in sequence to a training machine, constructed under the hood, until one of the controls triggers a stop. Here Step(5) means \"Compute 5 more iterations\" (in this case starting from none); Patience(2) means \"Stop at the end of the control cycle if there have been 2 consecutive drops in the log loss\"; and NumberLimit(100) is a safeguard ensuring a stop after 100 control cycles (500 iterations). See Controls provided below for a complete list.","category":"page"},{"location":"controlling_iterative_models/","page":"Controlling Iterative Models","title":"Controlling Iterative Models","text":"Because iteration is implemented as a wrapper, the \"self-iterating\" model can be evaluated using cross-validation, say, and the number of iterations on each fold will generally be different:","category":"page"},{"location":"controlling_iterative_models/","page":"Controlling Iterative Models","title":"Controlling Iterative Models","text":"e = evaluate!(mach, resampling=CV(nfolds=3), measure=log_loss, verbosity=0);\nmap(e.report_per_fold) do r\n r.n_iterations\nend","category":"page"},{"location":"controlling_iterative_models/","page":"Controlling Iterative Models","title":"Controlling Iterative Models","text":"Alternatively, one might wrap the self-iterating model in a tuning strategy, using TunedModel; see Tuning Models. In this way, the optimization of some other hyper-parameter is realized simultaneously with that of the iteration parameter, which will frequently be more efficient than a direct two-parameter search.","category":"page"},{"location":"controlling_iterative_models/#Controls-provided","page":"Controlling Iterative Models","title":"Controls provided","text":"","category":"section"},{"location":"controlling_iterative_models/","page":"Controlling Iterative Models","title":"Controlling Iterative Models","text":"In the table below, mach is the training machine being iterated, constructed by binding the supplied data to the model specified in the IteratedModel wrapper, but trained in each iteration on a subset of the data, according to the value of the resampling hyper-parameter of the wrapper (using all data if resampling=nothing).","category":"page"},{"location":"controlling_iterative_models/","page":"Controlling Iterative Models","title":"Controlling Iterative Models","text":"control description can trigger a stop\nStep(n=1) Train model for n more iterations no\nTimeLimit(t=0.5) Stop after t hours yes\nNumberLimit(n=100) Stop after n applications of the control yes\nNumberSinceBest(n=6) Stop when best loss occurred n control applications ago yes\nInvalidValue() Stop when NaN, Inf or -Inf loss/training loss encountered yes\nThreshold(value=0.0) Stop when loss < value yes\nGL(alpha=2.0) † Stop after the \"generalization loss (GL)\" exceeds alpha yes\nPQ(alpha=0.75, k=5) † Stop after \"progress-modified GL\" exceeds alpha yes\nPatience(n=5) † Stop after n consecutive loss increases yes\nWarmup(c; n=1) Wait for n loss updates before checking criteria c no\nInfo(f=identity) Log to Info the value of f(mach), where mach is current machine no\nWarn(predicate; f=\"\") Log to Warn the value of f or f(mach), if predicate(mach) holds no\nError(predicate; f=\"\") Log to Error the value of f or f(mach), if predicate(mach) holds and then stop yes\nCallback(f=mach->nothing) Call f(mach) yes\nWithNumberDo(f=n->@info(n)) Call f(n + 1) where n is the number of complete control cycles so far yes\nWithIterationsDo(f=i->@info(\"iterations: $i\")) Call f(i), where i is total number of iterations yes\nWithLossDo(f=x->@info(\"loss: $x\")) Call f(loss) where loss is the current loss yes\nWithTrainingLossesDo(f=v->@info(v)) Call f(v) where v is the current batch of training losses yes\nWithEvaluationDo(f->e->@info(\"evaluation: $e)) Call f(e) where e is the current performance evaluation object yes\nWithFittedParamsDo(f->fp->@info(\"fitted_params: $fp)) Call f(fp) where fp is fitted parameters of training machine yes\nWithReportDo(f->e->@info(\"report: $e)) Call f(r) where r is the training machine report yes\nWithModelDo(f->m->@info(\"model: $m)) Call f(m) where m is the model, which may be mutated by f yes\nWithMachineDo(f->mach->@info(\"report: $mach)) Call f(mach) wher mach is the training machine in its current state yes\nSave(filename=\"machine.jls\") Save current training machine to machine1.jls, machine2.jsl, etc yes","category":"page"},{"location":"controlling_iterative_models/","page":"Controlling Iterative Models","title":"Controlling Iterative Models","text":"Table 1. Atomic controls. Some advanced options are omitted.","category":"page"},{"location":"controlling_iterative_models/","page":"Controlling Iterative Models","title":"Controlling Iterative Models","text":"† For more on these controls see Prechelt, Lutz (1998): \"Early Stopping - But When?\", in Neural Networks: Tricks of the Trade, ed. G. Orr, Springer.","category":"page"},{"location":"controlling_iterative_models/","page":"Controlling Iterative Models","title":"Controlling Iterative Models","text":"Stopping option. All the following controls trigger a stop if the provided function f returns true and stop_if_true=true is specified in the constructor: Callback, WithNumberDo, WithLossDo, WithTrainingLossesDo.","category":"page"},{"location":"controlling_iterative_models/","page":"Controlling Iterative Models","title":"Controlling Iterative Models","text":"There are also three control wrappers to modify a control's behavior:","category":"page"},{"location":"controlling_iterative_models/","page":"Controlling Iterative Models","title":"Controlling Iterative Models","text":"wrapper description\nIterationControl.skip(control, predicate=1) Apply control every predicate applications of the control wrapper (can also be a function; see doc-string)\nIterationControl.louder(control, by=1) Increase the verbosity level of control by the specified value (negative values lower verbosity)\nIterationControl.with_state_do(control; f=...) Apply control and call f(x) where x is the internal state of control; useful for debugging. Default f logs state to Info. Warning: internal control state is not yet part of the public API.\nIterationControl.composite(controls...) Apply each control in controls in sequence; used internally by IterationControl.jl","category":"page"},{"location":"controlling_iterative_models/","page":"Controlling Iterative Models","title":"Controlling Iterative Models","text":"Table 2. Wrapped controls","category":"page"},{"location":"controlling_iterative_models/#Using-training-losses,-and-controlling-model-tuning","page":"Controlling Iterative Models","title":"Using training losses, and controlling model tuning","text":"","category":"section"},{"location":"controlling_iterative_models/","page":"Controlling Iterative Models","title":"Controlling Iterative Models","text":"Some iterative models report a training loss, as a byproduct of a fit! call and these can be used in two ways:","category":"page"},{"location":"controlling_iterative_models/","page":"Controlling Iterative Models","title":"Controlling Iterative Models","text":"To supplement an out-of-sample estimate of the loss in deciding when to stop, as in the PQ stopping criterion (see Prechelt, Lutz (1998))); or\nAs a (generally less reliable) substitute for an out-of-sample loss, when wishing to train exclusively on all supplied data.","category":"page"},{"location":"controlling_iterative_models/","page":"Controlling Iterative Models","title":"Controlling Iterative Models","text":"To have IteratedModel bind all data to the training machine and use training losses in place of an out-of-sample loss, specify resampling=nothing. To check if MyFavoriteIterativeModel reports training losses, load the model code and inspect supports_training_losses(MyFavoriteIterativeModel) (or do info(\"MyFavoriteIterativeModel\"))","category":"page"},{"location":"controlling_iterative_models/#Controlling-model-tuning","page":"Controlling Iterative Models","title":"Controlling model tuning","text":"","category":"section"},{"location":"controlling_iterative_models/","page":"Controlling Iterative Models","title":"Controlling Iterative Models","text":"An example of scenario 2 occurs when controlling hyperparameter optimization (model tuning). Recall that MLJ's TunedModel wrapper is implemented as an iterative model. Moreover, this wrapper reports, as a training loss, the lowest value of the optimization objective function so far (typically the lowest value of an out-of-sample loss, or -1 times an out-of-sample score). One may want to simply end the hyperparameter search when this value meets the NumberSinceBest stopping criterion discussed below, say, rather than introducing an extra layer of resampling to first \"learn\" the optimal value of the iteration parameter.","category":"page"},{"location":"controlling_iterative_models/","page":"Controlling Iterative Models","title":"Controlling Iterative Models","text":"In the following example, we conduct a RandomSearch for the optimal value of the regularization parameter lambda in a RidgeRegressor using 6-fold cross-validation. By wrapping our \"self-tuning\" version of the regressor as an IteratedModel, with resampling=nothing and NumberSinceBest(20) in the controls, we terminate the search when the number of lambda values tested since the previous best cross-validation loss reaches 20.","category":"page"},{"location":"controlling_iterative_models/","page":"Controlling Iterative Models","title":"Controlling Iterative Models","text":"using MLJ\n\nX, y = @load_boston;\nRidgeRegressor = @load RidgeRegressor pkg=MLJLinearModels verbosity=0\nmodel = RidgeRegressor()\nr = range(model, :lambda, lower=-1, upper=2, scale=x->10^x)\nself_tuning_model = TunedModel(model=model,\n tuning=RandomSearch(rng=123),\n resampling=CV(nfolds=6),\n range=r,\n measure=mae);\niterated_model = IteratedModel(model=self_tuning_model,\n resampling=nothing,\n control=[Step(1), NumberSinceBest(20), NumberLimit(1000)])\nmach = machine(iterated_model, X, y)\nnothing # hide","category":"page"},{"location":"controlling_iterative_models/","page":"Controlling Iterative Models","title":"Controlling Iterative Models","text":"fit!(mach)","category":"page"},{"location":"controlling_iterative_models/","page":"Controlling Iterative Models","title":"Controlling Iterative Models","text":"report(mach).model_report.best_model","category":"page"},{"location":"controlling_iterative_models/","page":"Controlling Iterative Models","title":"Controlling Iterative Models","text":"We can use mach here to directly obtain predictions using the optimal model (trained on all data), as in","category":"page"},{"location":"controlling_iterative_models/","page":"Controlling Iterative Models","title":"Controlling Iterative Models","text":"predict(mach, selectrows(X, 1:4))","category":"page"},{"location":"controlling_iterative_models/#Custom-controls","page":"Controlling Iterative Models","title":"Custom controls","text":"","category":"section"},{"location":"controlling_iterative_models/","page":"Controlling Iterative Models","title":"Controlling Iterative Models","text":"Under the hood, control in MLJIteration is implemented using IterationControl.jl. Rather than iterating a training machine directly, we iterate a wrapped version of this object, which includes other information that a control may want to access, such as the MLJ evaluation object. This information is summarized under The training machine wrapper below.","category":"page"},{"location":"controlling_iterative_models/","page":"Controlling Iterative Models","title":"Controlling Iterative Models","text":"Controls must implement two update! methods, one for initializing the control's state on the first application of the control (this state being external to the control struct) and one for all subsequent control applications, which generally updates the state as well. There are two optional methods: done, for specifying conditions triggering a stop, and takedown for specifying actions to perform at the end of controlled training.","category":"page"},{"location":"controlling_iterative_models/","page":"Controlling Iterative Models","title":"Controlling Iterative Models","text":"We summarize the training algorithm, as it relates to controls, after giving a simple example.","category":"page"},{"location":"controlling_iterative_models/#Example-1-Non-uniform-iteration-steps","page":"Controlling Iterative Models","title":"Example 1 - Non-uniform iteration steps","text":"","category":"section"},{"location":"controlling_iterative_models/","page":"Controlling Iterative Models","title":"Controlling Iterative Models","text":"Below we define a control, IterateFromList(list), to train, on each application of the control, until the iteration count reaches the next value in a user-specified list, triggering a stop when the list is exhausted. For example, to train on iteration counts on a log scale, one might use IterateFromList([round(Int, 10^x) for x in range(1, 2, length=10)].","category":"page"},{"location":"controlling_iterative_models/","page":"Controlling Iterative Models","title":"Controlling Iterative Models","text":"In the code, wrapper is an object that wraps the training machine (see above). The variable n is a counter for control cycles (unused in this example).","category":"page"},{"location":"controlling_iterative_models/","page":"Controlling Iterative Models","title":"Controlling Iterative Models","text":"\nimport IterationControl # or MLJ.IterationControl\n\nstruct IterateFromList\n list::Vector{<:Int} # list of iteration parameter values\n IterateFromList(v) = new(unique(sort(v)))\nend\n\nfunction IterationControl.update!(control::IterateFromList, wrapper, verbosity, n)\n Δi = control.list[1]\n verbosity > 1 && @info \"Training $Δi more iterations. \"\n MLJIteration.train!(wrapper, Δi) # trains the training machine\n return (index = 2, )\nend\n\nfunction IterationControl.update!(control::IterateFromList, wrapper, verbosity, n, state)\n index = state.positioin_in_list\n Δi = control.list[i] - wrapper.n_iterations\n verbosity > 1 && @info \"Training $Δi more iterations. \"\n MLJIteration.train!(wrapper, Δi)\n return (index = index + 1, )\nend","category":"page"},{"location":"controlling_iterative_models/","page":"Controlling Iterative Models","title":"Controlling Iterative Models","text":"The first update method will be called the first time the control is applied, returning an initialized state = (index = 2,), which is passed to the second update method, which is called on subsequent control applications (and which returns the updated state).","category":"page"},{"location":"controlling_iterative_models/","page":"Controlling Iterative Models","title":"Controlling Iterative Models","text":"A done method articulates the criterion for stopping:","category":"page"},{"location":"controlling_iterative_models/","page":"Controlling Iterative Models","title":"Controlling Iterative Models","text":"IterationControl.done(control::IterateFromList, state) =\n state.index > length(control.list)","category":"page"},{"location":"controlling_iterative_models/","page":"Controlling Iterative Models","title":"Controlling Iterative Models","text":"For the sake of illustration, we'll implement a takedown method; its return value is included in the IteratedModel report:","category":"page"},{"location":"controlling_iterative_models/","page":"Controlling Iterative Models","title":"Controlling Iterative Models","text":"IterationControl.takedown(control::IterateFromList, verbosity, state)\n verbosity > 1 && = @info \"Stepped through these values of the \"*\n \"iteration parameter: $(control.list)\"\n return (iteration_values=control.list, )\nend","category":"page"},{"location":"controlling_iterative_models/#The-training-machine-wrapper","page":"Controlling Iterative Models","title":"The training machine wrapper","text":"","category":"section"},{"location":"controlling_iterative_models/","page":"Controlling Iterative Models","title":"Controlling Iterative Models","text":"A training machine wrapper has these properties:","category":"page"},{"location":"controlling_iterative_models/","page":"Controlling Iterative Models","title":"Controlling Iterative Models","text":"wrapper.machine - the training machine, type Machine\nwrapper.model - the mutable atomic model, coinciding with wrapper.machine.model\nwrapper.n_cycles - the number IterationControl.train!(wrapper, _) calls so far; generally the current control cycle count\nwrapper.n_iterations - the total number of iterations applied to the model so far\nwrapper.Δiterations - the number of iterations applied in the last IterationControl.train!(wrapper, _) call\nwrapper.loss - the out-of-sample loss (based on the first measure in measures)\nwrapper.training_losses - the last batch of training losses (if reported by model), an abstract vector of length wrapper.Δiteration.\nwrapper.evaluation - the complete MLJ performance evaluation object, which has the following properties: measure, measurement, per_fold, per_observation, fitted_params_per_fold, report_per_fold (here there is only one fold). For further details, see Evaluating Model Performance.","category":"page"},{"location":"controlling_iterative_models/#The-training-algorithm","page":"Controlling Iterative Models","title":"The training algorithm","text":"","category":"section"},{"location":"controlling_iterative_models/","page":"Controlling Iterative Models","title":"Controlling Iterative Models","text":"Here now is a simplified description of the training of an IteratedModel. First, the atomic model is bound in a machine - the training machine above - to a subset of the supplied data, and then wrapped in an object called wrapper below. To train the training machine machine for i more iterations, and update the other data in the wrapper, requires the call MLJIteration.train!(wrapper, i). Only controls can make this call (e.g., Step(...), or IterateFromList(...) above). If we assume for simplicity there is only a single control, called control, then training proceeds as follows:","category":"page"},{"location":"controlling_iterative_models/","page":"Controlling Iterative Models","title":"Controlling Iterative Models","text":"n = 1 # initialize control cycle counter\nstate = update!(control, wrapper, verbosity, n)\nfinished = done(control, state)\n\n# subsequent training events:\nwhile !finished\n n += 1\n state = update!(control, wrapper, verbosity, n, state)\n finished = done(control, state)\nend\n\n# finalization:\nreturn takedown(control, verbosity, state)","category":"page"},{"location":"controlling_iterative_models/#Example-2-Cyclic-learning-rates","page":"Controlling Iterative Models","title":"Example 2 - Cyclic learning rates","text":"","category":"section"},{"location":"controlling_iterative_models/","page":"Controlling Iterative Models","title":"Controlling Iterative Models","text":"The control below implements a triangular cyclic learning rate policy, close to that described in L. N. Smith (2019): \"Cyclical Learning Rates for Training Neural Networks,\" 2017 IEEE Winter Conference on Applications of Computer Vision (WACV), Santa Rosa, CA, USA, pp. 464-472. [In that paper learning rates are mutated (slowly) during each training iteration (epoch) while here mutations can occur once per iteration of the model, at most.]","category":"page"},{"location":"controlling_iterative_models/","page":"Controlling Iterative Models","title":"Controlling Iterative Models","text":"For the sake of illustration, we suppose the iterative model, model, specified in the IteratedModel constructor, has a field called :learning_parameter, and that mutating this parameter does not trigger cold-restarts.","category":"page"},{"location":"controlling_iterative_models/","page":"Controlling Iterative Models","title":"Controlling Iterative Models","text":"struct CycleLearningRate{F<:AbstractFloat}\n stepsize::Int\n lower::F\n upper::F\nend\n\n# return one cycle of learning rate values:\nfunction one_cycle(c::CycleLearningRate)\n rise = range(c.lower, c.upper, length=c.stepsize + 1)\n fall = reverse(rise)\n return vcat(rise[1:end - 1], fall[1:end - 1])\nend\n\nfunction IterationControl.update!(control::CycleLearningRate,\n wrapper,\n verbosity,\n n,\n state = (learning_rates=nothing, ))\n rates = n == 0 ? one_cycle(control) : state.learning_rates\n index = mod(n, length(rates)) + 1\n r = rates[index]\n verbosity > 1 && @info \"learning rate: $r\"\n wrapper.model.iteration_control = r\n return (learning_rates = rates,)\nend","category":"page"},{"location":"controlling_iterative_models/#API-Reference","page":"Controlling Iterative Models","title":"API Reference","text":"","category":"section"},{"location":"controlling_iterative_models/","page":"Controlling Iterative Models","title":"Controlling Iterative Models","text":"MLJIteration.IteratedModel","category":"page"},{"location":"controlling_iterative_models/#MLJIteration.IteratedModel","page":"Controlling Iterative Models","title":"MLJIteration.IteratedModel","text":"IteratedModel(model=nothing,\n controls=Any[Step(1), Patience(5), GL(2.0), TimeLimit(Dates.Millisecond(108000)), InvalidValue()],\n retrain=false,\n resampling=Holdout(),\n measure=nothing,\n weights=nothing,\n class_weights=nothing,\n operation=predict,\n verbosity=1,\n check_measure=true,\n iteration_parameter=nothing,\n cache=true)\n\nWrap the specified model <: Supervised in the specified iteration controls. Training a machine bound to the wrapper iterates a corresonding machine bound to model. Here model should support iteration.\n\nTo list all controls, do MLJIteration.CONTROLS. Controls are summarized at https://alan-turing-institute.github.io/MLJ.jl/dev/getting_started/ but query individual doc-strings for details and advanced options. For creating your own controls, refer to the documentation just cited.\n\nTo make out-of-sample losses available to the controls, the machine bound to model is only trained on part of the data, as iteration proceeds. See details on training below. Specify retrain=true to ensure the model is retrained on all available data, using the same number of iterations, once controlled iteration has stopped.\n\nSpecify resampling=nothing if all data is to be used for controlled iteration, with each out-of-sample loss replaced by the most recent training loss, assuming this is made available by the model (supports_training_losses(model) == true). Otherwise, resampling must have type Holdout (eg, Holdout(fraction_train=0.8, rng=123)).\n\nAssuming retrain=true or resampling=nothing, iterated_model behaves exactly like the original model but with the iteration parameter automatically selected. If retrain=false (default) and resampling is not nothing, then iterated_model behaves like the original model trained on a subset of the provided data.\n\nControlled iteration can be continued with new fit! calls (warm restart) by mutating a control, or by mutating the iteration parameter of model, which is otherwise ignored.\n\nTraining\n\nGiven an instance iterated_model of IteratedModel, calling fit!(mach) on a machine mach = machine(iterated_model, data...) performs the following actions:\n\nAssuming resampling !== nothing, the data is split into train and test sets, according to the specified resampling strategy, which must have type Holdout.\nA clone of the wrapped model, iterated_model.model, is bound to the train data in an internal machine, train_mach. If resampling === nothing, all data is used instead. This machine is the object to which controls are applied. For example, Callback(fitted_params |> print) will print the value of fitted_params(train_mach).\nThe iteration parameter of the clone is set to 0.\nThe specified controls are repeatedly applied to train_mach in sequence, until one of the controls triggers a stop. Loss-based controls (eg, Patience(), GL(), Threshold(0.001)) use an out-of-sample loss, obtained by applying measure to predictions and the test target values. (Specifically, these predictions are those returned by operation(train_mach).) If resampling === nothing then the most recent training loss is used instead. Some controls require both out-of-sample and training losses (eg, PQ()).\nOnce a stop has been triggered, a clone of model is bound to all data in a machine called mach_production below, unless retrain == false or resampling === nothing, in which case mach_production coincides with train_mach.\n\nPrediction\n\nCalling predict(mach, Xnew) returns predict(mach_production, Xnew). Similar similar statements hold for predict_mean, predict_mode, predict_median.\n\nControls\n\nA control is permitted to mutate the fields (hyper-parameters) of train_mach.model (the clone of model). For example, to mutate a learning rate one might use the control\n\nCallback(mach -> mach.model.eta = 1.05*mach.model.eta)\n\nHowever, unless model supports warm restarts with respect to changes in that parameter, this will trigger retraining of train_mach from scratch, with a different training outcome, which is not recommended.\n\nWarm restarts\n\nIf iterated_model is mutated and fit!(mach) is called again, then a warm restart is attempted if the only parameters to change are model or controls or both.\n\nSpecifically, train_mach.model is mutated to match the current value of iterated_model.model and the iteration parameter of the latter is updated to the last value used in the preceding fit!(mach) call. Then repeated application of the (updated) controls begin anew.\n\n\n\n\n\n","category":"function"},{"location":"controlling_iterative_models/#Controls","page":"Controlling Iterative Models","title":"Controls","text":"","category":"section"},{"location":"controlling_iterative_models/","page":"Controlling Iterative Models","title":"Controlling Iterative Models","text":"IterationControl.Step\nEarlyStopping.TimeLimit\nEarlyStopping.NumberLimit\nEarlyStopping.NumberSinceBest\nEarlyStopping.InvalidValue\nEarlyStopping.Threshold\nEarlyStopping.GL\nEarlyStopping.PQ\nEarlyStopping.Patience\nIterationControl.Info\nIterationControl.Warn\nIterationControl.Error\nIterationControl.Callback\nIterationControl.WithNumberDo\nMLJIteration.WithIterationsDo\nIterationControl.WithLossDo\nIterationControl.WithTrainingLossesDo\nMLJIteration.WithEvaluationDo\nMLJIteration.WithFittedParamsDo\nMLJIteration.WithReportDo\nMLJIteration.WithModelDo\nMLJIteration.WithMachineDo\nMLJIteration.Save","category":"page"},{"location":"controlling_iterative_models/#IterationControl.Step","page":"Controlling Iterative Models","title":"IterationControl.Step","text":"Step(; n=1)\n\nAn iteration control, as in, Step(2). \n\nTrain for n more iterations. Will never trigger a stop. \n\n\n\n\n\n","category":"type"},{"location":"controlling_iterative_models/#EarlyStopping.TimeLimit","page":"Controlling Iterative Models","title":"EarlyStopping.TimeLimit","text":"TimeLimit(; t=0.5)\n\nAn early stopping criterion for loss-reporting iterative algorithms. \n\nStopping is triggered after t hours have elapsed since the stopping criterion was initiated.\n\nAny Julia built-in Real type can be used for t. Subtypes of Period may also be used, as in TimeLimit(t=Minute(30)).\n\nInternally, t is rounded to nearest millisecond. ``\n\n\n\n\n\n","category":"type"},{"location":"controlling_iterative_models/#EarlyStopping.NumberLimit","page":"Controlling Iterative Models","title":"EarlyStopping.NumberLimit","text":"NumberLimit(; n=100)\n\nAn early stopping criterion for loss-reporting iterative algorithms. \n\nA stop is triggered by n consecutive loss updates, excluding \"training\" loss updates.\n\nIf wrapped in a stopper::EarlyStopper, this is the number of calls to done!(stopper).\n\n\n\n\n\n","category":"type"},{"location":"controlling_iterative_models/#EarlyStopping.NumberSinceBest","page":"Controlling Iterative Models","title":"EarlyStopping.NumberSinceBest","text":"NumberSinceBest(; n=6)\n\nAn early stopping criterion for loss-reporting iterative algorithms. \n\nA stop is triggered when the number of calls to the control, since the lowest value of the loss so far, is n.\n\nFor a customizable loss-based stopping criterion, use WithLossDo or WithTrainingLossesDo with the stop_if_true=true option. \n\n\n\n\n\n","category":"type"},{"location":"controlling_iterative_models/#EarlyStopping.InvalidValue","page":"Controlling Iterative Models","title":"EarlyStopping.InvalidValue","text":"InvalidValue()\n\nAn early stopping criterion for loss-reporting iterative algorithms. \n\nStop if a loss (or training loss) is NaN, Inf or -Inf (or, more precisely, if isnan(loss) or isinf(loss) is true).\n\nFor a customizable loss-based stopping criterion, use WithLossDo or WithTrainingLossesDo with the stop_if_true=true option. \n\n\n\n\n\n","category":"type"},{"location":"controlling_iterative_models/#EarlyStopping.Threshold","page":"Controlling Iterative Models","title":"EarlyStopping.Threshold","text":"Threshold(; value=0.0)\n\nAn early stopping criterion for loss-reporting iterative algorithms. \n\nA stop is triggered as soon as the loss drops below value.\n\nFor a customizable loss-based stopping criterion, use WithLossDo or WithTrainingLossesDo with the stop_if_true=true option. \n\n\n\n\n\n","category":"type"},{"location":"controlling_iterative_models/#EarlyStopping.GL","page":"Controlling Iterative Models","title":"EarlyStopping.GL","text":"GL(; alpha=2.0)\n\nAn early stopping criterion for loss-reporting iterative algorithms. \n\nA stop is triggered when the (rescaled) generalization loss exceeds the threshold alpha.\n\nTerminology. Suppose E_1 E_2 E_t are a sequence of losses, for example, out-of-sample estimates of the loss associated with some iterative machine learning algorithm. Then the generalization loss at time t, is given by\n\nGL_t = 100 (E_t - E_opt) over E_opt\n\nwhere E_opt is the minimum value of the sequence.\n\nReference: Prechelt, Lutz (1998): \"Early Stopping- But When?\", in Neural Networks: Tricks of the Trade, ed. G. Orr, Springer..\n\n\n\n\n\n","category":"type"},{"location":"controlling_iterative_models/#EarlyStopping.PQ","page":"Controlling Iterative Models","title":"EarlyStopping.PQ","text":"PQ(; alpha=0.75, k=5, tol=eps(Float64))\n\nA stopping criterion for training iterative supervised learners.\n\nA stop is triggered when Prechelt's progress-modified generalization loss exceeds the threshold PQ_T alpha, or if the training progress drops below P_j tol. Here k is the number of training (in-sample) losses used to estimate the training progress.\n\nContext and explanation of terminology\n\nThe training progress at time j is defined by\n\nP_j = 1000 M - mm\n\nwhere M is the mean of the last k training losses F_1 F_2 F_k and m is the minimum value of those losses.\n\nThe progress-modified generalization loss at time t is then given by\n\nPQ_t = GL_t P_t\n\nwhere GL_t is the generalization loss at time t; see GL.\n\nPQ will stop when the following are true:\n\nAt least k training samples have been collected via done!(c::PQ, loss; training = true) or update_training(c::PQ, loss, state)\nThe last update was an out-of-sample update. (done!(::PQ, loss; training=true) is always false)\nThe progress-modified generalization loss exceeds the threshold PQ_t alpha OR the training progress stalls P_j tol.\n\nReference: Prechelt, Lutz (1998): \"Early Stopping- But When?\", in Neural Networks: Tricks of the Trade, ed. G. Orr, Springer..\n\n\n\n\n\n","category":"type"},{"location":"controlling_iterative_models/#EarlyStopping.Patience","page":"Controlling Iterative Models","title":"EarlyStopping.Patience","text":"Patience(; n=5)\n\nAn early stopping criterion for loss-reporting iterative algorithms. \n\nA stop is triggered by n consecutive increases in the loss.\n\nDenoted \"UPs\" in Prechelt, Lutz (1998): \"Early Stopping- But When?\", in Neural Networks: Tricks of the Trade, ed. G. Orr, Springer..\n\nFor a customizable loss-based stopping criterion, use WithLossDo or WithTrainingLossesDo with the stop_if_true=true option. \n\n\n\n\n\n","category":"type"},{"location":"controlling_iterative_models/#IterationControl.Info","page":"Controlling Iterative Models","title":"IterationControl.Info","text":"Info(f=identity)\n\nAn iteration control, as in, Info(my_loss_function). \n\nLog to Info the value of f(m), where m is the object being iterated. If IterativeControl.expose(m) has been overloaded, then log f(expose(m)) instead.\n\nCan be suppressed by setting the global verbosity level sufficiently low. \n\nSee also Warn, Error. \n\n\n\n\n\n","category":"type"},{"location":"controlling_iterative_models/#IterationControl.Warn","page":"Controlling Iterative Models","title":"IterationControl.Warn","text":"Warn(predicate; f=\"\")\n\nAn iteration control, as in, Warn(m -> length(m.cache) > 100, f=\"Memory low\"). \n\nIf predicate(m) is true, then log to Warn the value of f (or f(IterationControl.expose(m)) if f is a function). Here m is the object being iterated.\n\nCan be suppressed by setting the global verbosity level sufficiently low.\n\nSee also Info, Error. \n\n\n\n\n\n","category":"type"},{"location":"controlling_iterative_models/#IterationControl.Error","page":"Controlling Iterative Models","title":"IterationControl.Error","text":"Error(predicate; f=\"\", exception=nothing))\n\nAn iteration control, as in, Error(m -> isnan(m.bias), f=\"Bias overflow!\"). \n\nIf predicate(m) is true, then log at the Error level the value of f (or f(IterationControl.expose(m)) if f is a function) and stop iteration at the end of the current control cycle. Here m is the object being iterated.\n\nSpecify exception=... to throw an immediate execption, without waiting to the end of the control cycle.\n\nSee also Info, Warn. \n\n\n\n\n\n","category":"type"},{"location":"controlling_iterative_models/#IterationControl.Callback","page":"Controlling Iterative Models","title":"IterationControl.Callback","text":"Callback(f=_->nothing, stop_if_true=false, stop_message=nothing, raw=false)\n\nAn iteration control, as in, Callback(m->put!(v, my_loss_function(m)). \n\nCall f(IterationControl.expose(m)), where m is the object being iterated, unless raw=true, in which case call f(m) (guaranteed if expose has not been overloaded.) If stop_if_true is true, then trigger an early stop if the value returned by f is true, logging the stop_message if specified. \n\n\n\n\n\n","category":"type"},{"location":"controlling_iterative_models/#IterationControl.WithNumberDo","page":"Controlling Iterative Models","title":"IterationControl.WithNumberDo","text":"WithNumberDo(f=n->@info(\"number: $n\"), stop_if_true=false, stop_message=nothing)\n\nAn iteration control, as in, WithNumberDo(n->put!(my_channel, n)). \n\nCall f(n + 1), where n is the number of complete control cycles. of the control (so, n = 1, 2, 3, ..., unless control is wrapped in a IterationControl.skip)`.\n\nIf stop_if_true is true, then trigger an early stop if the value returned by f is true, logging the stop_message if specified. \n\n\n\n\n\n","category":"type"},{"location":"controlling_iterative_models/#MLJIteration.WithIterationsDo","page":"Controlling Iterative Models","title":"MLJIteration.WithIterationsDo","text":"WithIterationsDo(f=x->@info(\"iterations: $x\"), stop_if_true=false, stop_message=nothing)\n\nAn iteration control, as in, WithIterationsDo(x->put!(my_channel, x)). \n\nCall f(x), where x is the current number of model iterations (generally more than the number of control cycles). If stop_if_true is true, then trigger an early stop if the value returned by f is true, logging the stop_message if specified. \n\n\n\n\n\n","category":"type"},{"location":"controlling_iterative_models/#IterationControl.WithLossDo","page":"Controlling Iterative Models","title":"IterationControl.WithLossDo","text":"WithLossDo(f=x->@info(\"loss: $x\"), stop_if_true=false, stop_message=nothing)\n\nAn iteration control, as in, WithLossDo(x->put!(my_losses, x)). \n\nCall f(loss), where loss is current loss.\n\nIf stop_if_true is true, then trigger an early stop if the value returned by f is true, logging the stop_message if specified. \n\n\n\n\n\n","category":"type"},{"location":"controlling_iterative_models/#IterationControl.WithTrainingLossesDo","page":"Controlling Iterative Models","title":"IterationControl.WithTrainingLossesDo","text":"WithTrainingLossesDo(f=v->@info(\"training: $v\"), stop_if_true=false, stop_message=nothing)\n\nAn iteration control, as in, WithTrainingLossesDo(v->put!(my_losses, last(v)). \n\nCall f(training_losses), where training_losses is the vector of most recent batch of training losses.\n\nIf stop_if_true is true, then trigger an early stop if the value returned by f is true, logging the stop_message if specified. \n\n\n\n\n\n","category":"type"},{"location":"controlling_iterative_models/#MLJIteration.WithEvaluationDo","page":"Controlling Iterative Models","title":"MLJIteration.WithEvaluationDo","text":"WithEvaluationDo(f=x->@info(\"evaluation: $x\"), stop_if_true=false, stop_message=nothing)\n\nAn iteration control, as in, WithEvaluationDo(x->put!(my_channel, x)). \n\nCall f(x), where x is the latest performance evaluation, as returned by evaluate!(train_mach, resampling=..., ...). Not valid if resampling=nothing. If stop_if_true is true, then trigger an early stop if the value returned by f is true, logging the stop_message if specified. \n\n\n\n\n\n","category":"type"},{"location":"controlling_iterative_models/#MLJIteration.WithFittedParamsDo","page":"Controlling Iterative Models","title":"MLJIteration.WithFittedParamsDo","text":"WithFittedParamsDo(f=x->@info(\"fitted_params: $x\"), stop_if_true=false, stop_message=nothing)\n\nAn iteration control, as in, WithFittedParamsDo(x->put!(my_channel, x)). \n\nCall f(x), where x = fitted_params(mach) is the fitted parameters of the training machine, mach, in its current state. If stop_if_true is true, then trigger an early stop if the value returned by f is true, logging the stop_message if specified. \n\n\n\n\n\n","category":"type"},{"location":"controlling_iterative_models/#MLJIteration.WithReportDo","page":"Controlling Iterative Models","title":"MLJIteration.WithReportDo","text":"WithReportDo(f=x->@info(\"report: $x\"), stop_if_true=false, stop_message=nothing)\n\nAn iteration control, as in, WithReportDo(x->put!(my_channel, x)). \n\nCall f(x), where x = report(mach) is the report associated with the training machine, mach, in its current state. If stop_if_true is true, then trigger an early stop if the value returned by f is true, logging the stop_message if specified. \n\n\n\n\n\n","category":"type"},{"location":"controlling_iterative_models/#MLJIteration.WithModelDo","page":"Controlling Iterative Models","title":"MLJIteration.WithModelDo","text":"WithModelDo(f=x->@info(\"model: $x\"), stop_if_true=false, stop_message=nothing)\n\nAn iteration control, as in, WithModelDo(x->put!(my_channel, x)). \n\nCall f(x), where x is the model associated with the training machine; f may mutate x, as in f(x) = (x.learning_rate *= 0.9). If stop_if_true is true, then trigger an early stop if the value returned by f is true, logging the stop_message if specified. \n\n\n\n\n\n","category":"type"},{"location":"controlling_iterative_models/#MLJIteration.WithMachineDo","page":"Controlling Iterative Models","title":"MLJIteration.WithMachineDo","text":"WithMachineDo(f=x->@info(\"machine: $x\"), stop_if_true=false, stop_message=nothing)\n\nAn iteration control, as in, WithMachineDo(x->put!(my_channel, x)). \n\nCall f(x), where x is the training machine in its current state. If stop_if_true is true, then trigger an early stop if the value returned by f is true, logging the stop_message if specified. \n\n\n\n\n\n","category":"type"},{"location":"controlling_iterative_models/#MLJIteration.Save","page":"Controlling Iterative Models","title":"MLJIteration.Save","text":"Save(filename=\"machine.jls\")\n\nAn iteration control, as in, Save(\"run3/machine.jls\"). \n\nSave the current state of the machine being iterated to disk, using the provided filename, decorated with a number, as in \"run3/machine42.jls\". The default behaviour uses the Serialization module but this can be changed by setting the method=save_fn(::String, ::Any) argument where save_fn is any serialization method. For more on what is meant by \"the machine being iterated\", see IteratedModel.\n\n\n\n\n\n","category":"type"},{"location":"controlling_iterative_models/#Control-wrappers","page":"Controlling Iterative Models","title":"Control wrappers","text":"","category":"section"},{"location":"controlling_iterative_models/","page":"Controlling Iterative Models","title":"Controlling Iterative Models","text":"IterationControl.skip\nIterationControl.louder\nIterationControl.with_state_do\nIterationControl.composite","category":"page"},{"location":"controlling_iterative_models/#IterationControl.skip","page":"Controlling Iterative Models","title":"IterationControl.skip","text":"IterationControl.skip(control, predicate=1)\n\nAn iteration control wrapper.\n\nIf predicate is an integer, k: Apply control on every k calls to apply the wrapped control, starting with the kth call.\n\nIf predicate is a function: Apply control as usual when predicate(n + 1) is true but otherwise skip. Here n is the number of control cycles applied so far.\n\n\n\n\n\n","category":"function"},{"location":"controlling_iterative_models/#IterationControl.louder","page":"Controlling Iterative Models","title":"IterationControl.louder","text":"IterationControl.louder(control, by=1)\n\nWrap control to make in more (or less) verbose. The same as control, but as if the global verbosity were increased by the value by.\n\n\n\n\n\n","category":"function"},{"location":"controlling_iterative_models/#IterationControl.with_state_do","page":"Controlling Iterative Models","title":"IterationControl.with_state_do","text":"IterationControl.with_state_do(control,\n f=x->@info \"$(typeof(control)) state: $x\")\n\nWrap control to give access to it's internal state. Acts exactly like control except that f is called on the internal state of control. If f is not specified, the control type and state are logged to Info at every update (useful for debugging new controls).\n\nWarning. The internal state of a control is not yet considered part of the public interface and could change between in any pre 1.0 release of IterationControl.jl.\n\n\n\n\n\n","category":"function"},{"location":"controlling_iterative_models/#IterationControl.composite","page":"Controlling Iterative Models","title":"IterationControl.composite","text":"composite(controls...)\n\nConstruct an iteration control that applies the specified controls in sequence.\n\n\n\n\n\n","category":"function"},{"location":"models/SODDetector_OutlierDetectionPython/#SODDetector_OutlierDetectionPython","page":"SODDetector","title":"SODDetector","text":"","category":"section"},{"location":"models/SODDetector_OutlierDetectionPython/","page":"SODDetector","title":"SODDetector","text":"SODDetector(n_neighbors = 5,\n ref_set = 10,\n alpha = 0.8)","category":"page"},{"location":"models/SODDetector_OutlierDetectionPython/","page":"SODDetector","title":"SODDetector","text":"https://pyod.readthedocs.io/en/latest/pyod.models.html#module-pyod.models.sod","category":"page"},{"location":"models/RandomUndersampler_Imbalance/#RandomUndersampler_Imbalance","page":"RandomUndersampler","title":"RandomUndersampler","text":"","category":"section"},{"location":"models/RandomUndersampler_Imbalance/","page":"RandomUndersampler","title":"RandomUndersampler","text":"Initiate a random undersampling model with the given hyper-parameters.","category":"page"},{"location":"models/RandomUndersampler_Imbalance/","page":"RandomUndersampler","title":"RandomUndersampler","text":"RandomUndersampler","category":"page"},{"location":"models/RandomUndersampler_Imbalance/","page":"RandomUndersampler","title":"RandomUndersampler","text":"A model type for constructing a random undersampler, based on Imbalance.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/RandomUndersampler_Imbalance/","page":"RandomUndersampler","title":"RandomUndersampler","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/RandomUndersampler_Imbalance/","page":"RandomUndersampler","title":"RandomUndersampler","text":"RandomUndersampler = @load RandomUndersampler pkg=Imbalance","category":"page"},{"location":"models/RandomUndersampler_Imbalance/","page":"RandomUndersampler","title":"RandomUndersampler","text":"Do model = RandomUndersampler() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in RandomUndersampler(ratios=...).","category":"page"},{"location":"models/RandomUndersampler_Imbalance/","page":"RandomUndersampler","title":"RandomUndersampler","text":"RandomUndersampler implements naive undersampling by randomly removing existing observations. ","category":"page"},{"location":"models/RandomUndersampler_Imbalance/#Training-data","page":"RandomUndersampler","title":"Training data","text":"","category":"section"},{"location":"models/RandomUndersampler_Imbalance/","page":"RandomUndersampler","title":"RandomUndersampler","text":"In MLJ or MLJBase, wrap the model in a machine by mach = machine(model)","category":"page"},{"location":"models/RandomUndersampler_Imbalance/","page":"RandomUndersampler","title":"RandomUndersampler","text":"There is no need to provide any data here because the model is a static transformer.","category":"page"},{"location":"models/RandomUndersampler_Imbalance/","page":"RandomUndersampler","title":"RandomUndersampler","text":"Likewise, there is no need to fit!(mach). ","category":"page"},{"location":"models/RandomUndersampler_Imbalance/","page":"RandomUndersampler","title":"RandomUndersampler","text":"For default values of the hyper-parameters, model can be constructed by model = RandomUndersampler()","category":"page"},{"location":"models/RandomUndersampler_Imbalance/#Hyperparameters","page":"RandomUndersampler","title":"Hyperparameters","text":"","category":"section"},{"location":"models/RandomUndersampler_Imbalance/","page":"RandomUndersampler","title":"RandomUndersampler","text":"ratios=1.0: A parameter that controls the amount of undersampling to be done for each class\nCan be a float and in this case each class will be undersampled to the size of the minority class times the float. By default, all classes are undersampled to the size of the minority class\nCan be a dictionary mapping each class label to the float ratio for that class\nrng::Union{AbstractRNG, Integer}=default_rng(): Either an AbstractRNG object or an Integer seed to be used with Xoshiro if the Julia VERSION supports it. Otherwise, uses MersenneTwister`.","category":"page"},{"location":"models/RandomUndersampler_Imbalance/#Transform-Inputs","page":"RandomUndersampler","title":"Transform Inputs","text":"","category":"section"},{"location":"models/RandomUndersampler_Imbalance/","page":"RandomUndersampler","title":"RandomUndersampler","text":"X: A matrix of real numbers or a table with element scitypes that subtype Union{Finite, Infinite}. Elements in nominal columns should subtype Finite (i.e., have scitype OrderedFactor or Multiclass) and elements in continuous columns should subtype Infinite (i.e., have scitype Count or Continuous).\ny: An abstract vector of labels (e.g., strings) that correspond to the observations in X","category":"page"},{"location":"models/RandomUndersampler_Imbalance/#Transform-Outputs","page":"RandomUndersampler","title":"Transform Outputs","text":"","category":"section"},{"location":"models/RandomUndersampler_Imbalance/","page":"RandomUndersampler","title":"RandomUndersampler","text":"X_under: A matrix or table that includes the data after undersampling depending on whether the input X is a matrix or table respectively\ny_under: An abstract vector of labels corresponding to X_under","category":"page"},{"location":"models/RandomUndersampler_Imbalance/#Operations","page":"RandomUndersampler","title":"Operations","text":"","category":"section"},{"location":"models/RandomUndersampler_Imbalance/","page":"RandomUndersampler","title":"RandomUndersampler","text":"transform(mach, X, y): resample the data X and y using RandomUndersampler, returning both the new and original observations","category":"page"},{"location":"models/RandomUndersampler_Imbalance/#Example","page":"RandomUndersampler","title":"Example","text":"","category":"section"},{"location":"models/RandomUndersampler_Imbalance/","page":"RandomUndersampler","title":"RandomUndersampler","text":"using MLJ\nimport Imbalance\n\n## set probability of each class\nclass_probs = [0.5, 0.2, 0.3] \nnum_rows, num_continuous_feats = 100, 5\n## generate a table and categorical vector accordingly\nX, y = Imbalance.generate_imbalanced_data(num_rows, num_continuous_feats; \n class_probs, rng=42) \n\njulia> Imbalance.checkbalance(y; ref=\"minority\")\n 1: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 19 (100.0%) \n 2: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 33 (173.7%) \n 0: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 48 (252.6%) \n\n## load RandomUndersampler\nRandomUndersampler = @load RandomUndersampler pkg=Imbalance\n\n## wrap the model in a machine\nundersampler = RandomUndersampler(ratios=Dict(0=>1.0, 1=> 1.0, 2=>1.0), \n rng=42)\nmach = machine(undersampler)\n\n## provide the data to transform (there is nothing to fit)\nX_under, y_under = transform(mach, X, y)\n \njulia> Imbalance.checkbalance(y_under; ref=\"minority\")\n0: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 19 (100.0%) \n2: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 19 (100.0%) \n1: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 19 (100.0%) ","category":"page"},{"location":"models/RandomForestRegressor_MLJScikitLearnInterface/#RandomForestRegressor_MLJScikitLearnInterface","page":"RandomForestRegressor","title":"RandomForestRegressor","text":"","category":"section"},{"location":"models/RandomForestRegressor_MLJScikitLearnInterface/","page":"RandomForestRegressor","title":"RandomForestRegressor","text":"RandomForestRegressor","category":"page"},{"location":"models/RandomForestRegressor_MLJScikitLearnInterface/","page":"RandomForestRegressor","title":"RandomForestRegressor","text":"A model type for constructing a random forest regressor, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/RandomForestRegressor_MLJScikitLearnInterface/","page":"RandomForestRegressor","title":"RandomForestRegressor","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/RandomForestRegressor_MLJScikitLearnInterface/","page":"RandomForestRegressor","title":"RandomForestRegressor","text":"RandomForestRegressor = @load RandomForestRegressor pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/RandomForestRegressor_MLJScikitLearnInterface/","page":"RandomForestRegressor","title":"RandomForestRegressor","text":"Do model = RandomForestRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in RandomForestRegressor(n_estimators=...).","category":"page"},{"location":"models/RandomForestRegressor_MLJScikitLearnInterface/","page":"RandomForestRegressor","title":"RandomForestRegressor","text":"A random forest is a meta estimator that fits a number of classifying decision trees on various sub-samples of the dataset and uses averaging to improve the predictive accuracy and control over-fitting. The sub-sample size is controlled with the max_samples parameter if bootstrap=True (default), otherwise the whole dataset is used to build each tree.","category":"page"},{"location":"models/FillImputer_MLJModels/#FillImputer_MLJModels","page":"FillImputer","title":"FillImputer","text":"","category":"section"},{"location":"models/FillImputer_MLJModels/","page":"FillImputer","title":"FillImputer","text":"FillImputer","category":"page"},{"location":"models/FillImputer_MLJModels/","page":"FillImputer","title":"FillImputer","text":"A model type for constructing a fill imputer, based on MLJModels.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/FillImputer_MLJModels/","page":"FillImputer","title":"FillImputer","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/FillImputer_MLJModels/","page":"FillImputer","title":"FillImputer","text":"FillImputer = @load FillImputer pkg=MLJModels","category":"page"},{"location":"models/FillImputer_MLJModels/","page":"FillImputer","title":"FillImputer","text":"Do model = FillImputer() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in FillImputer(features=...).","category":"page"},{"location":"models/FillImputer_MLJModels/","page":"FillImputer","title":"FillImputer","text":"Use this model to impute missing values in tabular data. A fixed \"filler\" value is learned from the training data, one for each column of the table.","category":"page"},{"location":"models/FillImputer_MLJModels/","page":"FillImputer","title":"FillImputer","text":"For imputing missing values in a vector, use UnivariateFillImputer instead.","category":"page"},{"location":"models/FillImputer_MLJModels/#Training-data","page":"FillImputer","title":"Training data","text":"","category":"section"},{"location":"models/FillImputer_MLJModels/","page":"FillImputer","title":"FillImputer","text":"In MLJ or MLJBase, bind an instance model to data with","category":"page"},{"location":"models/FillImputer_MLJModels/","page":"FillImputer","title":"FillImputer","text":"mach = machine(model, X)","category":"page"},{"location":"models/FillImputer_MLJModels/","page":"FillImputer","title":"FillImputer","text":"where","category":"page"},{"location":"models/FillImputer_MLJModels/","page":"FillImputer","title":"FillImputer","text":"X: any table of input features (eg, a DataFrame) whose columns each have element scitypes Union{Missing, T}, where T is a subtype of Continuous, Multiclass, OrderedFactor or Count. Check scitypes with schema(X).","category":"page"},{"location":"models/FillImputer_MLJModels/","page":"FillImputer","title":"FillImputer","text":"Train the machine using fit!(mach, rows=...).","category":"page"},{"location":"models/FillImputer_MLJModels/#Hyper-parameters","page":"FillImputer","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/FillImputer_MLJModels/","page":"FillImputer","title":"FillImputer","text":"features: a vector of names of features (symbols) for which imputation is to be attempted; default is empty, which is interpreted as \"impute all\".\ncontinuous_fill: function or other callable to determine value to be imputed in the case of Continuous (abstract float) data; default is to apply median after skipping missing values\ncount_fill: function or other callable to determine value to be imputed in the case of Count (integer) data; default is to apply rounded median after skipping missing values\nfinite_fill: function or other callable to determine value to be imputed in the case of Multiclass or OrderedFactor data (categorical vectors); default is to apply mode after skipping missing values","category":"page"},{"location":"models/FillImputer_MLJModels/#Operations","page":"FillImputer","title":"Operations","text":"","category":"section"},{"location":"models/FillImputer_MLJModels/","page":"FillImputer","title":"FillImputer","text":"transform(mach, Xnew): return Xnew with missing values imputed with the fill values learned when fitting mach","category":"page"},{"location":"models/FillImputer_MLJModels/#Fitted-parameters","page":"FillImputer","title":"Fitted parameters","text":"","category":"section"},{"location":"models/FillImputer_MLJModels/","page":"FillImputer","title":"FillImputer","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/FillImputer_MLJModels/","page":"FillImputer","title":"FillImputer","text":"features_seen_in_fit: the names of features (columns) encountered during training\nunivariate_transformer: the univariate model applied to determine the fillers (it's fields contain the functions defining the filler computations)\nfiller_given_feature: dictionary of filler values, keyed on feature (column) names","category":"page"},{"location":"models/FillImputer_MLJModels/#Examples","page":"FillImputer","title":"Examples","text":"","category":"section"},{"location":"models/FillImputer_MLJModels/","page":"FillImputer","title":"FillImputer","text":"using MLJ\nimputer = FillImputer()\n\nX = (a = [1.0, 2.0, missing, 3.0, missing],\n b = coerce([\"y\", \"n\", \"y\", missing, \"y\"], Multiclass),\n c = [1, 1, 2, missing, 3])\n\nschema(X)\njulia> schema(X)\n┌───────┬───────────────────────────────┐\n│ names │ scitypes │\n├───────┼───────────────────────────────┤\n│ a │ Union{Missing, Continuous} │\n│ b │ Union{Missing, Multiclass{2}} │\n│ c │ Union{Missing, Count} │\n└───────┴───────────────────────────────┘\n\nmach = machine(imputer, X)\nfit!(mach)\n\njulia> fitted_params(mach).filler_given_feature\n(filler = 2.0,)\n\njulia> fitted_params(mach).filler_given_feature\nDict{Symbol, Any} with 3 entries:\n :a => 2.0\n :b => \"y\"\n :c => 2\n\njulia> transform(mach, X)\n(a = [1.0, 2.0, 2.0, 3.0, 2.0],\n b = CategoricalValue{String, UInt32}[\"y\", \"n\", \"y\", \"y\", \"y\"],\n c = [1, 1, 2, 2, 3],)","category":"page"},{"location":"models/FillImputer_MLJModels/","page":"FillImputer","title":"FillImputer","text":"See also UnivariateFillImputer.","category":"page"},{"location":"composing_models/#Composing-Models","page":"Composing Models","title":"Composing Models","text":"","category":"section"},{"location":"composing_models/","page":"Composing Models","title":"Composing Models","text":"Three common ways of combining multiple models together have out-of-the-box implementations in MLJ:","category":"page"},{"location":"composing_models/","page":"Composing Models","title":"Composing Models","text":"Linear Pipelines (Pipeline)- for unbranching chains that take the output of one model (e.g., dimension reduction, such as PCA) and make it the input of the next model in the chain (e.g., a classification model, such as EvoTreeClassifier). To include transformations of the target variable in a supervised pipeline model, see Target Transformations.\nHomogeneous Ensembles (EnsembleModel) - for blending the predictions of multiple supervised models all of the same type, but which receive different views of the training data to reduce overall variance. The technique implemented here is known as observation bagging. \nModel Stacking - (Stack) for combining the predictions of a smaller number of models of possibly different types, with the help of an adjudicating model.","category":"page"},{"location":"composing_models/","page":"Composing Models","title":"Composing Models","text":"Additionally, more complicated model compositions are possible using:","category":"page"},{"location":"composing_models/","page":"Composing Models","title":"Composing Models","text":"Learning Networks - \"blueprints\" for combining models in flexible ways; these are simple transformations of your existing workflows which can be \"exported\" to define new, stand-alone model types.","category":"page"},{"location":"models/OPTICS_MLJScikitLearnInterface/#OPTICS_MLJScikitLearnInterface","page":"OPTICS","title":"OPTICS","text":"","category":"section"},{"location":"models/OPTICS_MLJScikitLearnInterface/","page":"OPTICS","title":"OPTICS","text":"OPTICS","category":"page"},{"location":"models/OPTICS_MLJScikitLearnInterface/","page":"OPTICS","title":"OPTICS","text":"A model type for constructing a optics, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/OPTICS_MLJScikitLearnInterface/","page":"OPTICS","title":"OPTICS","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/OPTICS_MLJScikitLearnInterface/","page":"OPTICS","title":"OPTICS","text":"OPTICS = @load OPTICS pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/OPTICS_MLJScikitLearnInterface/","page":"OPTICS","title":"OPTICS","text":"Do model = OPTICS() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in OPTICS(min_samples=...).","category":"page"},{"location":"models/OPTICS_MLJScikitLearnInterface/","page":"OPTICS","title":"OPTICS","text":"OPTICS (Ordering Points To Identify the Clustering Structure), closely related to `DBSCAN', finds core sample of high density and expands clusters from them. Unlike DBSCAN, keeps cluster hierarchy for a variable neighborhood radius. Better suited for usage on large datasets than the current sklearn implementation of DBSCAN.","category":"page"},{"location":"models/OneHotEncoder_MLJModels/#OneHotEncoder_MLJModels","page":"OneHotEncoder","title":"OneHotEncoder","text":"","category":"section"},{"location":"models/OneHotEncoder_MLJModels/","page":"OneHotEncoder","title":"OneHotEncoder","text":"OneHotEncoder","category":"page"},{"location":"models/OneHotEncoder_MLJModels/","page":"OneHotEncoder","title":"OneHotEncoder","text":"A model type for constructing a one-hot encoder, based on MLJModels.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/OneHotEncoder_MLJModels/","page":"OneHotEncoder","title":"OneHotEncoder","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/OneHotEncoder_MLJModels/","page":"OneHotEncoder","title":"OneHotEncoder","text":"OneHotEncoder = @load OneHotEncoder pkg=MLJModels","category":"page"},{"location":"models/OneHotEncoder_MLJModels/","page":"OneHotEncoder","title":"OneHotEncoder","text":"Do model = OneHotEncoder() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in OneHotEncoder(features=...).","category":"page"},{"location":"models/OneHotEncoder_MLJModels/","page":"OneHotEncoder","title":"OneHotEncoder","text":"Use this model to one-hot encode the Multiclass and OrderedFactor features (columns) of some table, leaving other columns unchanged.","category":"page"},{"location":"models/OneHotEncoder_MLJModels/","page":"OneHotEncoder","title":"OneHotEncoder","text":"New data to be transformed may lack features present in the fit data, but no new features can be present.","category":"page"},{"location":"models/OneHotEncoder_MLJModels/","page":"OneHotEncoder","title":"OneHotEncoder","text":"Warning: This transformer assumes that levels(col) for any Multiclass or OrderedFactor column, col, is the same for training data and new data to be transformed.","category":"page"},{"location":"models/OneHotEncoder_MLJModels/","page":"OneHotEncoder","title":"OneHotEncoder","text":"To ensure all features are transformed into Continuous features, or dropped, use ContinuousEncoder instead.","category":"page"},{"location":"models/OneHotEncoder_MLJModels/#Training-data","page":"OneHotEncoder","title":"Training data","text":"","category":"section"},{"location":"models/OneHotEncoder_MLJModels/","page":"OneHotEncoder","title":"OneHotEncoder","text":"In MLJ or MLJBase, bind an instance model to data with","category":"page"},{"location":"models/OneHotEncoder_MLJModels/","page":"OneHotEncoder","title":"OneHotEncoder","text":"mach = machine(model, X)","category":"page"},{"location":"models/OneHotEncoder_MLJModels/","page":"OneHotEncoder","title":"OneHotEncoder","text":"where","category":"page"},{"location":"models/OneHotEncoder_MLJModels/","page":"OneHotEncoder","title":"OneHotEncoder","text":"X: any Tables.jl compatible table. Columns can be of mixed type but only those with element scitype Multiclass or OrderedFactor can be encoded. Check column scitypes with schema(X).","category":"page"},{"location":"models/OneHotEncoder_MLJModels/","page":"OneHotEncoder","title":"OneHotEncoder","text":"Train the machine using fit!(mach, rows=...).","category":"page"},{"location":"models/OneHotEncoder_MLJModels/#Hyper-parameters","page":"OneHotEncoder","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/OneHotEncoder_MLJModels/","page":"OneHotEncoder","title":"OneHotEncoder","text":"features: a vector of symbols (column names). If empty (default) then all Multiclass and OrderedFactor features are encoded. Otherwise, encoding is further restricted to the specified features (ignore=false) or the unspecified features (ignore=true). This default behavior can be modified by the ordered_factor flag.\nordered_factor=false: when true, OrderedFactor features are universally excluded\ndrop_last=true: whether to drop the column corresponding to the final class of encoded features. For example, a three-class feature is spawned into three new features if drop_last=false, but just two features otherwise.","category":"page"},{"location":"models/OneHotEncoder_MLJModels/#Fitted-parameters","page":"OneHotEncoder","title":"Fitted parameters","text":"","category":"section"},{"location":"models/OneHotEncoder_MLJModels/","page":"OneHotEncoder","title":"OneHotEncoder","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/OneHotEncoder_MLJModels/","page":"OneHotEncoder","title":"OneHotEncoder","text":"all_features: names of all features encountered in training\nfitted_levels_given_feature: dictionary of the levels associated with each feature encoded, keyed on the feature name\nref_name_pairs_given_feature: dictionary of pairs r => ftr (such as 0x00000001 => :grad__A) where r is a CategoricalArrays.jl reference integer representing a level, and ftr the corresponding new feature name; the dictionary is keyed on the names of features that are encoded","category":"page"},{"location":"models/OneHotEncoder_MLJModels/#Report","page":"OneHotEncoder","title":"Report","text":"","category":"section"},{"location":"models/OneHotEncoder_MLJModels/","page":"OneHotEncoder","title":"OneHotEncoder","text":"The fields of report(mach) are:","category":"page"},{"location":"models/OneHotEncoder_MLJModels/","page":"OneHotEncoder","title":"OneHotEncoder","text":"features_to_be_encoded: names of input features to be encoded\nnew_features: names of all output features","category":"page"},{"location":"models/OneHotEncoder_MLJModels/#Example","page":"OneHotEncoder","title":"Example","text":"","category":"section"},{"location":"models/OneHotEncoder_MLJModels/","page":"OneHotEncoder","title":"OneHotEncoder","text":"using MLJ\n\nX = (name=categorical([\"Danesh\", \"Lee\", \"Mary\", \"John\"]),\n grade=categorical([\"A\", \"B\", \"A\", \"C\"], ordered=true),\n height=[1.85, 1.67, 1.5, 1.67],\n n_devices=[3, 2, 4, 3])\n\njulia> schema(X)\n┌───────────┬──────────────────┐\n│ names │ scitypes │\n├───────────┼──────────────────┤\n│ name │ Multiclass{4} │\n│ grade │ OrderedFactor{3} │\n│ height │ Continuous │\n│ n_devices │ Count │\n└───────────┴──────────────────┘\n\nhot = OneHotEncoder(drop_last=true)\nmach = fit!(machine(hot, X))\nW = transform(mach, X)\n\njulia> schema(W)\n┌──────────────┬────────────┐\n│ names │ scitypes │\n├──────────────┼────────────┤\n│ name__Danesh │ Continuous │\n│ name__John │ Continuous │\n│ name__Lee │ Continuous │\n│ grade__A │ Continuous │\n│ grade__B │ Continuous │\n│ height │ Continuous │\n│ n_devices │ Count │\n└──────────────┴────────────┘","category":"page"},{"location":"models/OneHotEncoder_MLJModels/","page":"OneHotEncoder","title":"OneHotEncoder","text":"See also ContinuousEncoder.","category":"page"},{"location":"internals/#internals_section","page":"Internals","title":"Internals","text":"","category":"section"},{"location":"internals/#The-machine-interface,-simplified","page":"Internals","title":"The machine interface, simplified","text":"","category":"section"},{"location":"internals/","page":"Internals","title":"Internals","text":"The following is a simplified description of the Machine interface. It predates the introduction of an optional data front-end for models (see Implementing a data front-end). See also the Glossary","category":"page"},{"location":"internals/#The-Machine-type","page":"Internals","title":"The Machine type","text":"","category":"section"},{"location":"internals/","page":"Internals","title":"Internals","text":"mutable struct Machine{M fit!\n\nXnew, _ = make_regression(3, 9)\nyhat = predict(mach, Xnew) ## new predictions","category":"page"},{"location":"models/MultitargetLinearRegressor_MultivariateStats/","page":"MultitargetLinearRegressor","title":"MultitargetLinearRegressor","text":"See also LinearRegressor, RidgeRegressor, MultitargetRidgeRegressor","category":"page"},{"location":"models/CDDetector_OutlierDetectionPython/#CDDetector_OutlierDetectionPython","page":"CDDetector","title":"CDDetector","text":"","category":"section"},{"location":"models/CDDetector_OutlierDetectionPython/","page":"CDDetector","title":"CDDetector","text":"CDDetector(whitening = true,\n rule_of_thumb = false)","category":"page"},{"location":"models/CDDetector_OutlierDetectionPython/","page":"CDDetector","title":"CDDetector","text":"https://pyod.readthedocs.io/en/latest/pyod.models.html#module-pyod.models.cd","category":"page"},{"location":"models/ConstantRegressor_MLJModels/#ConstantRegressor_MLJModels","page":"ConstantRegressor","title":"ConstantRegressor","text":"","category":"section"},{"location":"models/ConstantRegressor_MLJModels/","page":"ConstantRegressor","title":"ConstantRegressor","text":"ConstantRegressor","category":"page"},{"location":"models/ConstantRegressor_MLJModels/","page":"ConstantRegressor","title":"ConstantRegressor","text":"This \"dummy\" probabilistic predictor always returns the same distribution, irrespective of the provided input pattern. The distribution returned is the one of the type specified that best fits the training target data. Use predict_mean or predict_median to predict the mean or median values instead. If not specified, a normal distribution is fit.","category":"page"},{"location":"models/ConstantRegressor_MLJModels/","page":"ConstantRegressor","title":"ConstantRegressor","text":"Almost any reasonable model is expected to outperform ConstantRegressor which is used almost exclusively for testing and establishing performance baselines.","category":"page"},{"location":"models/ConstantRegressor_MLJModels/","page":"ConstantRegressor","title":"ConstantRegressor","text":"In MLJ (or MLJModels) do model = ConstantRegressor() or model = ConstantRegressor(distribution=...) to construct a model instance.","category":"page"},{"location":"models/ConstantRegressor_MLJModels/#Training-data","page":"ConstantRegressor","title":"Training data","text":"","category":"section"},{"location":"models/ConstantRegressor_MLJModels/","page":"ConstantRegressor","title":"ConstantRegressor","text":"In MLJ (or MLJBase) bind an instance model to data with","category":"page"},{"location":"models/ConstantRegressor_MLJModels/","page":"ConstantRegressor","title":"ConstantRegressor","text":"mach = machine(model, X, y)","category":"page"},{"location":"models/ConstantRegressor_MLJModels/","page":"ConstantRegressor","title":"ConstantRegressor","text":"Here:","category":"page"},{"location":"models/ConstantRegressor_MLJModels/","page":"ConstantRegressor","title":"ConstantRegressor","text":"X is any table of input features (eg, a DataFrame)\ny is the target, which can be any AbstractVector whose element scitype is Continuous; check the scitype with schema(y)","category":"page"},{"location":"models/ConstantRegressor_MLJModels/","page":"ConstantRegressor","title":"ConstantRegressor","text":"Train the machine using fit!(mach, rows=...).","category":"page"},{"location":"models/ConstantRegressor_MLJModels/#Hyper-parameters","page":"ConstantRegressor","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/ConstantRegressor_MLJModels/","page":"ConstantRegressor","title":"ConstantRegressor","text":"distribution_type=Distributions.Normal: The distribution to be fit to the target data. Must be a subtype of Distributions.ContinuousUnivariateDistribution.","category":"page"},{"location":"models/ConstantRegressor_MLJModels/#Operations","page":"ConstantRegressor","title":"Operations","text":"","category":"section"},{"location":"models/ConstantRegressor_MLJModels/","page":"ConstantRegressor","title":"ConstantRegressor","text":"predict(mach, Xnew): Return predictions of the target given features Xnew (which for this model are ignored). Predictions are probabilistic.\npredict_mean(mach, Xnew): Return instead the means of the probabilistic predictions returned above.\npredict_median(mach, Xnew): Return instead the medians of the probabilistic predictions returned above.","category":"page"},{"location":"models/ConstantRegressor_MLJModels/#Fitted-parameters","page":"ConstantRegressor","title":"Fitted parameters","text":"","category":"section"},{"location":"models/ConstantRegressor_MLJModels/","page":"ConstantRegressor","title":"ConstantRegressor","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/ConstantRegressor_MLJModels/","page":"ConstantRegressor","title":"ConstantRegressor","text":"target_distribution: The distribution fit to the supplied target data.","category":"page"},{"location":"models/ConstantRegressor_MLJModels/#Examples","page":"ConstantRegressor","title":"Examples","text":"","category":"section"},{"location":"models/ConstantRegressor_MLJModels/","page":"ConstantRegressor","title":"ConstantRegressor","text":"using MLJ\n\nX, y = make_regression(10, 2) ## synthetic data: a table and vector\nregressor = ConstantRegressor()\nmach = machine(regressor, X, y) |> fit!\n\nfitted_params(mach)\n\nXnew, _ = make_regression(3, 2)\npredict(mach, Xnew)\npredict_mean(mach, Xnew)\n","category":"page"},{"location":"models/ConstantRegressor_MLJModels/","page":"ConstantRegressor","title":"ConstantRegressor","text":"See also ConstantClassifier","category":"page"},{"location":"models/ElasticNetRegressor_MLJScikitLearnInterface/#ElasticNetRegressor_MLJScikitLearnInterface","page":"ElasticNetRegressor","title":"ElasticNetRegressor","text":"","category":"section"},{"location":"models/ElasticNetRegressor_MLJScikitLearnInterface/","page":"ElasticNetRegressor","title":"ElasticNetRegressor","text":"ElasticNetRegressor","category":"page"},{"location":"models/ElasticNetRegressor_MLJScikitLearnInterface/","page":"ElasticNetRegressor","title":"ElasticNetRegressor","text":"A model type for constructing a elastic net regressor, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/ElasticNetRegressor_MLJScikitLearnInterface/","page":"ElasticNetRegressor","title":"ElasticNetRegressor","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/ElasticNetRegressor_MLJScikitLearnInterface/","page":"ElasticNetRegressor","title":"ElasticNetRegressor","text":"ElasticNetRegressor = @load ElasticNetRegressor pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/ElasticNetRegressor_MLJScikitLearnInterface/","page":"ElasticNetRegressor","title":"ElasticNetRegressor","text":"Do model = ElasticNetRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in ElasticNetRegressor(alpha=...).","category":"page"},{"location":"models/ElasticNetRegressor_MLJScikitLearnInterface/#Hyper-parameters","page":"ElasticNetRegressor","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/ElasticNetRegressor_MLJScikitLearnInterface/","page":"ElasticNetRegressor","title":"ElasticNetRegressor","text":"alpha = 1.0\nl1_ratio = 0.5\nfit_intercept = true\nprecompute = false\nmax_iter = 1000\ncopy_X = true\ntol = 0.0001\nwarm_start = false\npositive = false\nrandom_state = nothing\nselection = cyclic","category":"page"},{"location":"models/SubspaceLDA_MultivariateStats/#SubspaceLDA_MultivariateStats","page":"SubspaceLDA","title":"SubspaceLDA","text":"","category":"section"},{"location":"models/SubspaceLDA_MultivariateStats/","page":"SubspaceLDA","title":"SubspaceLDA","text":"SubspaceLDA","category":"page"},{"location":"models/SubspaceLDA_MultivariateStats/","page":"SubspaceLDA","title":"SubspaceLDA","text":"A model type for constructing a subpace LDA model, based on MultivariateStats.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/SubspaceLDA_MultivariateStats/","page":"SubspaceLDA","title":"SubspaceLDA","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/SubspaceLDA_MultivariateStats/","page":"SubspaceLDA","title":"SubspaceLDA","text":"SubspaceLDA = @load SubspaceLDA pkg=MultivariateStats","category":"page"},{"location":"models/SubspaceLDA_MultivariateStats/","page":"SubspaceLDA","title":"SubspaceLDA","text":"Do model = SubspaceLDA() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in SubspaceLDA(normalize=...).","category":"page"},{"location":"models/SubspaceLDA_MultivariateStats/","page":"SubspaceLDA","title":"SubspaceLDA","text":"Multiclass subspace linear discriminant analysis (LDA) is a variation on ordinary LDA suitable for high dimensional data, as it avoids storing scatter matrices. For details, refer the MultivariateStats.jl documentation.","category":"page"},{"location":"models/SubspaceLDA_MultivariateStats/","page":"SubspaceLDA","title":"SubspaceLDA","text":"In addition to dimension reduction (using transform) probabilistic classification is provided (using predict). In the case of classification, the class probability for a new observation reflects the proximity of that observation to training observations associated with that class, and how far away the observation is from observations associated with other classes. Specifically, the distances, in the transformed (projected) space, of a new observation, from the centroid of each target class, is computed; the resulting vector of distances, multiplied by minus one, is passed to a softmax function to obtain a class probability prediction. Here \"distance\" is computed using a user-specified distance function.","category":"page"},{"location":"models/SubspaceLDA_MultivariateStats/#Training-data","page":"SubspaceLDA","title":"Training data","text":"","category":"section"},{"location":"models/SubspaceLDA_MultivariateStats/","page":"SubspaceLDA","title":"SubspaceLDA","text":"In MLJ or MLJBase, bind an instance model to data with","category":"page"},{"location":"models/SubspaceLDA_MultivariateStats/","page":"SubspaceLDA","title":"SubspaceLDA","text":"mach = machine(model, X, y)","category":"page"},{"location":"models/SubspaceLDA_MultivariateStats/","page":"SubspaceLDA","title":"SubspaceLDA","text":"Here:","category":"page"},{"location":"models/SubspaceLDA_MultivariateStats/","page":"SubspaceLDA","title":"SubspaceLDA","text":"X is any table of input features (eg, a DataFrame) whose columns are of scitype Continuous; check column scitypes with schema(X).\ny is the target, which can be any AbstractVector whose element scitype is OrderedFactor or Multiclass; check the scitype with scitype(y).","category":"page"},{"location":"models/SubspaceLDA_MultivariateStats/","page":"SubspaceLDA","title":"SubspaceLDA","text":"Train the machine using fit!(mach, rows=...).","category":"page"},{"location":"models/SubspaceLDA_MultivariateStats/#Hyper-parameters","page":"SubspaceLDA","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/SubspaceLDA_MultivariateStats/","page":"SubspaceLDA","title":"SubspaceLDA","text":"normalize=true: Option to normalize the between class variance for the number of observations in each class, one of true or false.\noutdim: the ouput dimension, automatically set to min(indim, nclasses-1) if equal to 0. If a non-zero outdim is passed, then the actual output dimension used is min(rank, outdim) where rank is the rank of the within-class covariance matrix.\ndist=Distances.SqEuclidean(): The distance metric to use when performing classification (to compare the distance between a new point and centroids in the transformed space); must be a subtype of Distances.SemiMetric from Distances.jl, e.g., Distances.CosineDist.","category":"page"},{"location":"models/SubspaceLDA_MultivariateStats/#Operations","page":"SubspaceLDA","title":"Operations","text":"","category":"section"},{"location":"models/SubspaceLDA_MultivariateStats/","page":"SubspaceLDA","title":"SubspaceLDA","text":"transform(mach, Xnew): Return a lower dimensional projection of the input Xnew, which should have the same scitype as X above.\npredict(mach, Xnew): Return predictions of the target given features Xnew, which should have same scitype as X above. Predictions are probabilistic but uncalibrated.\npredict_mode(mach, Xnew): Return the modes of the probabilistic predictions returned above.","category":"page"},{"location":"models/SubspaceLDA_MultivariateStats/#Fitted-parameters","page":"SubspaceLDA","title":"Fitted parameters","text":"","category":"section"},{"location":"models/SubspaceLDA_MultivariateStats/","page":"SubspaceLDA","title":"SubspaceLDA","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/SubspaceLDA_MultivariateStats/","page":"SubspaceLDA","title":"SubspaceLDA","text":"classes: The classes seen during model fitting.\nprojection_matrix: The learned projection matrix, of size (indim, outdim), where indim and outdim are the input and output dimensions respectively (See Report section below).","category":"page"},{"location":"models/SubspaceLDA_MultivariateStats/#Report","page":"SubspaceLDA","title":"Report","text":"","category":"section"},{"location":"models/SubspaceLDA_MultivariateStats/","page":"SubspaceLDA","title":"SubspaceLDA","text":"The fields of report(mach) are:","category":"page"},{"location":"models/SubspaceLDA_MultivariateStats/","page":"SubspaceLDA","title":"SubspaceLDA","text":"indim: The dimension of the input space i.e the number of training features.\noutdim: The dimension of the transformed space the model is projected to.\nmean: The mean of the untransformed training data. A vector of length indim.\nnclasses: The number of classes directly observed in the training data (which can be less than the total number of classes in the class pool)","category":"page"},{"location":"models/SubspaceLDA_MultivariateStats/","page":"SubspaceLDA","title":"SubspaceLDA","text":"class_means: The class-specific means of the training data. A matrix of size (indim, nclasses) with the ith column being the class-mean of the ith class in classes (See fitted params section above).","category":"page"},{"location":"models/SubspaceLDA_MultivariateStats/","page":"SubspaceLDA","title":"SubspaceLDA","text":"class_weights: The weights (class counts) of each class. A vector of length nclasses with the ith element being the class weight of the ith class in classes. (See fitted params section above.)\nexplained_variance_ratio: The ratio of explained variance to total variance. Each dimension corresponds to an eigenvalue.","category":"page"},{"location":"models/SubspaceLDA_MultivariateStats/#Examples","page":"SubspaceLDA","title":"Examples","text":"","category":"section"},{"location":"models/SubspaceLDA_MultivariateStats/","page":"SubspaceLDA","title":"SubspaceLDA","text":"using MLJ\n\nSubspaceLDA = @load SubspaceLDA pkg=MultivariateStats\n\nX, y = @load_iris ## a table and a vector\n\nmodel = SubspaceLDA()\nmach = machine(model, X, y) |> fit!\n\nXproj = transform(mach, X)\ny_hat = predict(mach, X)\nlabels = predict_mode(mach, X)","category":"page"},{"location":"models/SubspaceLDA_MultivariateStats/","page":"SubspaceLDA","title":"SubspaceLDA","text":"See also LDA, BayesianLDA, BayesianSubspaceLDA","category":"page"},{"location":"generating_synthetic_data/#Generating-Synthetic-Data","page":"Generating Synthetic Data","title":"Generating Synthetic Data","text":"","category":"section"},{"location":"generating_synthetic_data/","page":"Generating Synthetic Data","title":"Generating Synthetic Data","text":"Here synthetic data means artificially generated data, with no reference to a \"real world\" data set. Not to be confused \"fake data\" obtained by resampling from a distribution fit to some actual real data.","category":"page"},{"location":"generating_synthetic_data/","page":"Generating Synthetic Data","title":"Generating Synthetic Data","text":"MLJ has a set of functions - make_blobs, make_circles, make_moons and make_regression (closely resembling functions in scikit-learn of the same name) - for generating synthetic data sets. These are useful for testing machine learning models (e.g., testing user-defined composite models; see Composing Models)","category":"page"},{"location":"generating_synthetic_data/#Generating-Gaussian-blobs","page":"Generating Synthetic Data","title":"Generating Gaussian blobs","text":"","category":"section"},{"location":"generating_synthetic_data/","page":"Generating Synthetic Data","title":"Generating Synthetic Data","text":"make_blobs","category":"page"},{"location":"generating_synthetic_data/#MLJBase.make_blobs","page":"Generating Synthetic Data","title":"MLJBase.make_blobs","text":"X, y = make_blobs(n=100, p=2; kwargs...)\n\nGenerate Gaussian blobs for clustering and classification problems.\n\nReturn value\n\nBy default, a table X with p columns (features) and n rows (observations), together with a corresponding vector of n Multiclass target observations y, indicating blob membership.\n\nKeyword arguments\n\nshuffle=true: whether to shuffle the resulting points,\ncenters=3: either a number of centers or a c x p matrix with c pre-determined centers,\ncluster_std=1.0: the standard deviation(s) of each blob,\ncenter_box=(-10. => 10.): the limits of the p-dimensional cube within which the cluster centers are drawn if they are not provided,\neltype=Float64: machine type of points (any subtype of AbstractFloat).\nrng=Random.GLOBAL_RNG: any AbstractRNG object, or integer to seed a MersenneTwister (for reproducibility).\nas_table=true: whether to return the points as a table (true) or a matrix (false). If false the target y has integer element type. \n\nExample\n\nX, y = make_blobs(100, 3; centers=2, cluster_std=[1.0, 3.0])\n\n\n\n\n\n","category":"function"},{"location":"generating_synthetic_data/","page":"Generating Synthetic Data","title":"Generating Synthetic Data","text":"using MLJ, DataFrames\nX, y = make_blobs(100, 3; centers=2, cluster_std=[1.0, 3.0])\ndfBlobs = DataFrame(X)\ndfBlobs.y = y\nfirst(dfBlobs, 3)","category":"page"},{"location":"generating_synthetic_data/","page":"Generating Synthetic Data","title":"Generating Synthetic Data","text":"using VegaLite\ndfBlobs |> @vlplot(:point, x=:x1, y=:x2, color = :\"y:n\") ","category":"page"},{"location":"generating_synthetic_data/","page":"Generating Synthetic Data","title":"Generating Synthetic Data","text":"(Image: svg)","category":"page"},{"location":"generating_synthetic_data/","page":"Generating Synthetic Data","title":"Generating Synthetic Data","text":"dfBlobs |> @vlplot(:point, x=:x1, y=:x3, color = :\"y:n\") ","category":"page"},{"location":"generating_synthetic_data/","page":"Generating Synthetic Data","title":"Generating Synthetic Data","text":"(Image: svg)","category":"page"},{"location":"generating_synthetic_data/#Generating-concentric-circles","page":"Generating Synthetic Data","title":"Generating concentric circles","text":"","category":"section"},{"location":"generating_synthetic_data/","page":"Generating Synthetic Data","title":"Generating Synthetic Data","text":"make_circles","category":"page"},{"location":"generating_synthetic_data/#MLJBase.make_circles","page":"Generating Synthetic Data","title":"MLJBase.make_circles","text":"X, y = make_circles(n=100; kwargs...)\n\nGenerate n labeled points close to two concentric circles for classification and clustering models.\n\nReturn value\n\nBy default, a table X with 2 columns and n rows (observations), together with a corresponding vector of n Multiclass target observations y. The target is either 0 or 1, corresponding to membership to the smaller or larger circle, respectively.\n\nKeyword arguments\n\nshuffle=true: whether to shuffle the resulting points,\nnoise=0: standard deviation of the Gaussian noise added to the data,\nfactor=0.8: ratio of the smaller radius over the larger one,\n\neltype=Float64: machine type of points (any subtype of AbstractFloat).\nrng=Random.GLOBAL_RNG: any AbstractRNG object, or integer to seed a MersenneTwister (for reproducibility).\nas_table=true: whether to return the points as a table (true) or a matrix (false). If false the target y has integer element type. \n\nExample\n\nX, y = make_circles(100; noise=0.5, factor=0.3)\n\n\n\n\n\n","category":"function"},{"location":"generating_synthetic_data/","page":"Generating Synthetic Data","title":"Generating Synthetic Data","text":"using MLJ, DataFrames\nX, y = make_circles(100; noise=0.05, factor=0.3)\ndfCircles = DataFrame(X)\ndfCircles.y = y\nfirst(dfCircles, 3)","category":"page"},{"location":"generating_synthetic_data/","page":"Generating Synthetic Data","title":"Generating Synthetic Data","text":"using VegaLite\ndfCircles |> @vlplot(:circle, x=:x1, y=:x2, color = :\"y:n\") ","category":"page"},{"location":"generating_synthetic_data/","page":"Generating Synthetic Data","title":"Generating Synthetic Data","text":"(Image: svg)","category":"page"},{"location":"generating_synthetic_data/#Sampling-from-two-interleaved-half-circles","page":"Generating Synthetic Data","title":"Sampling from two interleaved half-circles","text":"","category":"section"},{"location":"generating_synthetic_data/","page":"Generating Synthetic Data","title":"Generating Synthetic Data","text":"make_moons","category":"page"},{"location":"generating_synthetic_data/#MLJBase.make_moons","page":"Generating Synthetic Data","title":"MLJBase.make_moons","text":" make_moons(n::Int=100; kwargs...)\n\nGenerates labeled two-dimensional points lying close to two interleaved semi-circles, for use with classification and clustering models.\n\nReturn value\n\nBy default, a table X with 2 columns and n rows (observations), together with a corresponding vector of n Multiclass target observations y. The target is either 0 or 1, corresponding to membership to the left or right semi-circle.\n\nKeyword arguments\n\nshuffle=true: whether to shuffle the resulting points,\nnoise=0.1: standard deviation of the Gaussian noise added to the data,\nxshift=1.0: horizontal translation of the second center with respect to the first one.\nyshift=0.3: vertical translation of the second center with respect to the first one. \neltype=Float64: machine type of points (any subtype of AbstractFloat).\nrng=Random.GLOBAL_RNG: any AbstractRNG object, or integer to seed a MersenneTwister (for reproducibility).\nas_table=true: whether to return the points as a table (true) or a matrix (false). If false the target y has integer element type. \n\nExample\n\nX, y = make_moons(100; noise=0.5)\n\n\n\n\n\n","category":"function"},{"location":"generating_synthetic_data/","page":"Generating Synthetic Data","title":"Generating Synthetic Data","text":"using MLJ, DataFrames\nX, y = make_moons(100; noise=0.05)\ndfHalfCircles = DataFrame(X)\ndfHalfCircles.y = y\nfirst(dfHalfCircles, 3)","category":"page"},{"location":"generating_synthetic_data/","page":"Generating Synthetic Data","title":"Generating Synthetic Data","text":"using VegaLite\ndfHalfCircles |> @vlplot(:circle, x=:x1, y=:x2, color = :\"y:n\") ","category":"page"},{"location":"generating_synthetic_data/","page":"Generating Synthetic Data","title":"Generating Synthetic Data","text":"(Image: svg)","category":"page"},{"location":"generating_synthetic_data/#Regression-data-generated-from-noisy-linear-models","page":"Generating Synthetic Data","title":"Regression data generated from noisy linear models","text":"","category":"section"},{"location":"generating_synthetic_data/","page":"Generating Synthetic Data","title":"Generating Synthetic Data","text":"make_regression","category":"page"},{"location":"generating_synthetic_data/#MLJBase.make_regression","page":"Generating Synthetic Data","title":"MLJBase.make_regression","text":"make_regression(n, p; kwargs...)\n\nGenerate Gaussian input features and a linear response with Gaussian noise, for use with regression models.\n\nReturn value\n\nBy default, a tuple (X, y) where table X has p columns and n rows (observations), together with a corresponding vector of n Continuous target observations y.\n\nKeywords\n\nintercept=true: Whether to generate data from a model with intercept.\nn_targets=1: Number of columns in the target.\nsparse=0: Proportion of the generating weight vector that is sparse.\nnoise=0.1: Standard deviation of the Gaussian noise added to the response (target).\noutliers=0: Proportion of the response vector to make as outliers by adding a random quantity with high variance. (Only applied if binary is false.)\nas_table=true: Whether X (and y, if n_targets > 1) should be a table or a matrix.\neltype=Float64: Element type for X and y. Must subtype AbstractFloat.\nbinary=false: Whether the target should be binarized (via a sigmoid).\neltype=Float64: machine type of points (any subtype of AbstractFloat).\nrng=Random.GLOBAL_RNG: any AbstractRNG object, or integer to seed a MersenneTwister (for reproducibility).\nas_table=true: whether to return the points as a table (true) or a matrix (false). \n\nExample\n\nX, y = make_regression(100, 5; noise=0.5, sparse=0.2, outliers=0.1)\n\n\n\n\n\n","category":"function"},{"location":"generating_synthetic_data/","page":"Generating Synthetic Data","title":"Generating Synthetic Data","text":"using MLJ, DataFrames\nX, y = make_regression(100, 5; noise=0.5, sparse=0.2, outliers=0.1)\ndfRegression = DataFrame(X)\ndfRegression.y = y\nfirst(dfRegression, 3)","category":"page"},{"location":"models/FeatureAgglomeration_MLJScikitLearnInterface/#FeatureAgglomeration_MLJScikitLearnInterface","page":"FeatureAgglomeration","title":"FeatureAgglomeration","text":"","category":"section"},{"location":"models/FeatureAgglomeration_MLJScikitLearnInterface/","page":"FeatureAgglomeration","title":"FeatureAgglomeration","text":"FeatureAgglomeration","category":"page"},{"location":"models/FeatureAgglomeration_MLJScikitLearnInterface/","page":"FeatureAgglomeration","title":"FeatureAgglomeration","text":"A model type for constructing a feature agglomeration, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/FeatureAgglomeration_MLJScikitLearnInterface/","page":"FeatureAgglomeration","title":"FeatureAgglomeration","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/FeatureAgglomeration_MLJScikitLearnInterface/","page":"FeatureAgglomeration","title":"FeatureAgglomeration","text":"FeatureAgglomeration = @load FeatureAgglomeration pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/FeatureAgglomeration_MLJScikitLearnInterface/","page":"FeatureAgglomeration","title":"FeatureAgglomeration","text":"Do model = FeatureAgglomeration() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in FeatureAgglomeration(n_clusters=...).","category":"page"},{"location":"models/FeatureAgglomeration_MLJScikitLearnInterface/","page":"FeatureAgglomeration","title":"FeatureAgglomeration","text":"Similar to AgglomerativeClustering, but recursively merges features instead of samples.\"","category":"page"},{"location":"models/SVMRegressor_MLJScikitLearnInterface/#SVMRegressor_MLJScikitLearnInterface","page":"SVMRegressor","title":"SVMRegressor","text":"","category":"section"},{"location":"models/SVMRegressor_MLJScikitLearnInterface/","page":"SVMRegressor","title":"SVMRegressor","text":"SVMRegressor","category":"page"},{"location":"models/SVMRegressor_MLJScikitLearnInterface/","page":"SVMRegressor","title":"SVMRegressor","text":"A model type for constructing a epsilon-support vector regressor, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/SVMRegressor_MLJScikitLearnInterface/","page":"SVMRegressor","title":"SVMRegressor","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/SVMRegressor_MLJScikitLearnInterface/","page":"SVMRegressor","title":"SVMRegressor","text":"SVMRegressor = @load SVMRegressor pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/SVMRegressor_MLJScikitLearnInterface/","page":"SVMRegressor","title":"SVMRegressor","text":"Do model = SVMRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in SVMRegressor(kernel=...).","category":"page"},{"location":"models/SVMRegressor_MLJScikitLearnInterface/#Hyper-parameters","page":"SVMRegressor","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/SVMRegressor_MLJScikitLearnInterface/","page":"SVMRegressor","title":"SVMRegressor","text":"kernel = rbf\ndegree = 3\ngamma = scale\ncoef0 = 0.0\ntol = 0.001\nC = 1.0\nepsilon = 0.1\nshrinking = true\ncache_size = 200\nmax_iter = -1","category":"page"},{"location":"models/SimpleImputer_BetaML/#SimpleImputer_BetaML","page":"SimpleImputer","title":"SimpleImputer","text":"","category":"section"},{"location":"models/SimpleImputer_BetaML/","page":"SimpleImputer","title":"SimpleImputer","text":"mutable struct SimpleImputer <: MLJModelInterface.Unsupervised","category":"page"},{"location":"models/SimpleImputer_BetaML/","page":"SimpleImputer","title":"SimpleImputer","text":"Impute missing values using feature (column) mean, with optional record normalisation (using l-norm norms), from the Beta Machine Learning Toolkit (BetaML).","category":"page"},{"location":"models/SimpleImputer_BetaML/#Hyperparameters:","page":"SimpleImputer","title":"Hyperparameters:","text":"","category":"section"},{"location":"models/SimpleImputer_BetaML/","page":"SimpleImputer","title":"SimpleImputer","text":"statistic::Function: The descriptive statistic of the column (feature) to use as imputed value [def: mean]\nnorm::Union{Nothing, Int64}: Normalise the feature mean by l-norm norm of the records [default: nothing]. Use it (e.g. norm=1 to use the l-1 norm) if the records are highly heterogeneus (e.g. quantity exports of different countries).","category":"page"},{"location":"models/SimpleImputer_BetaML/#Example:","page":"SimpleImputer","title":"Example:","text":"","category":"section"},{"location":"models/SimpleImputer_BetaML/","page":"SimpleImputer","title":"SimpleImputer","text":"julia> using MLJ\n\njulia> X = [1 10.5;1.5 missing; 1.8 8; 1.7 15; 3.2 40; missing missing; 3.3 38; missing -2.3; 5.2 -2.4] |> table ;\n\njulia> modelType = @load SimpleImputer pkg = \"BetaML\" verbosity=0\nBetaML.Imputation.SimpleImputer\n\njulia> model = modelType(norm=1)\nSimpleImputer(\n statistic = Statistics.mean, \n norm = 1)\n\njulia> mach = machine(model, X);\n\njulia> fit!(mach);\n[ Info: Training machine(SimpleImputer(statistic = mean, …), …).\n\njulia> X_full = transform(mach) |> MLJ.matrix\n9×2 Matrix{Float64}:\n 1.0 10.5\n 1.5 0.295466\n 1.8 8.0\n 1.7 15.0\n 3.2 40.0\n 0.280952 1.69524\n 3.3 38.0\n 0.0750839 -2.3\n 5.2 -2.4","category":"page"},{"location":"models/UnivariateDiscretizer_MLJModels/#UnivariateDiscretizer_MLJModels","page":"UnivariateDiscretizer","title":"UnivariateDiscretizer","text":"","category":"section"},{"location":"models/UnivariateDiscretizer_MLJModels/","page":"UnivariateDiscretizer","title":"UnivariateDiscretizer","text":"UnivariateDiscretizer","category":"page"},{"location":"models/UnivariateDiscretizer_MLJModels/","page":"UnivariateDiscretizer","title":"UnivariateDiscretizer","text":"A model type for constructing a single variable discretizer, based on MLJModels.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/UnivariateDiscretizer_MLJModels/","page":"UnivariateDiscretizer","title":"UnivariateDiscretizer","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/UnivariateDiscretizer_MLJModels/","page":"UnivariateDiscretizer","title":"UnivariateDiscretizer","text":"UnivariateDiscretizer = @load UnivariateDiscretizer pkg=MLJModels","category":"page"},{"location":"models/UnivariateDiscretizer_MLJModels/","page":"UnivariateDiscretizer","title":"UnivariateDiscretizer","text":"Do model = UnivariateDiscretizer() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in UnivariateDiscretizer(n_classes=...).","category":"page"},{"location":"models/UnivariateDiscretizer_MLJModels/","page":"UnivariateDiscretizer","title":"UnivariateDiscretizer","text":"Discretization converts a Continuous vector into an OrderedFactor vector. In particular, the output is a CategoricalVector (whose reference type is optimized).","category":"page"},{"location":"models/UnivariateDiscretizer_MLJModels/","page":"UnivariateDiscretizer","title":"UnivariateDiscretizer","text":"The transformation is chosen so that the vector on which the transformer is fit has, in transformed form, an approximately uniform distribution of values. Specifically, if n_classes is the level of discretization, then 2*n_classes - 1 ordered quantiles are computed, the odd quantiles being used for transforming (discretization) and the even quantiles for inverse transforming.","category":"page"},{"location":"models/UnivariateDiscretizer_MLJModels/#Training-data","page":"UnivariateDiscretizer","title":"Training data","text":"","category":"section"},{"location":"models/UnivariateDiscretizer_MLJModels/","page":"UnivariateDiscretizer","title":"UnivariateDiscretizer","text":"In MLJ or MLJBase, bind an instance model to data with","category":"page"},{"location":"models/UnivariateDiscretizer_MLJModels/","page":"UnivariateDiscretizer","title":"UnivariateDiscretizer","text":"mach = machine(model, x)","category":"page"},{"location":"models/UnivariateDiscretizer_MLJModels/","page":"UnivariateDiscretizer","title":"UnivariateDiscretizer","text":"where","category":"page"},{"location":"models/UnivariateDiscretizer_MLJModels/","page":"UnivariateDiscretizer","title":"UnivariateDiscretizer","text":"x: any abstract vector with Continuous element scitype; check scitype with scitype(x).","category":"page"},{"location":"models/UnivariateDiscretizer_MLJModels/","page":"UnivariateDiscretizer","title":"UnivariateDiscretizer","text":"Train the machine using fit!(mach, rows=...).","category":"page"},{"location":"models/UnivariateDiscretizer_MLJModels/#Hyper-parameters","page":"UnivariateDiscretizer","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/UnivariateDiscretizer_MLJModels/","page":"UnivariateDiscretizer","title":"UnivariateDiscretizer","text":"n_classes: number of discrete classes in the output","category":"page"},{"location":"models/UnivariateDiscretizer_MLJModels/#Operations","page":"UnivariateDiscretizer","title":"Operations","text":"","category":"section"},{"location":"models/UnivariateDiscretizer_MLJModels/","page":"UnivariateDiscretizer","title":"UnivariateDiscretizer","text":"transform(mach, xnew): discretize xnew according to the discretization learned when fitting mach\ninverse_transform(mach, z): attempt to reconstruct from z a vector that transforms to give z","category":"page"},{"location":"models/UnivariateDiscretizer_MLJModels/#Fitted-parameters","page":"UnivariateDiscretizer","title":"Fitted parameters","text":"","category":"section"},{"location":"models/UnivariateDiscretizer_MLJModels/","page":"UnivariateDiscretizer","title":"UnivariateDiscretizer","text":"The fields of fitted_params(mach).fitesult include:","category":"page"},{"location":"models/UnivariateDiscretizer_MLJModels/","page":"UnivariateDiscretizer","title":"UnivariateDiscretizer","text":"odd_quantiles: quantiles used for transforming (length is n_classes - 1)\neven_quantiles: quantiles used for inverse transforming (length is n_classes)","category":"page"},{"location":"models/UnivariateDiscretizer_MLJModels/#Example","page":"UnivariateDiscretizer","title":"Example","text":"","category":"section"},{"location":"models/UnivariateDiscretizer_MLJModels/","page":"UnivariateDiscretizer","title":"UnivariateDiscretizer","text":"using MLJ\nusing Random\nRandom.seed!(123)\n\ndiscretizer = UnivariateDiscretizer(n_classes=100)\nmach = machine(discretizer, randn(1000))\nfit!(mach)\n\njulia> x = rand(5)\n5-element Vector{Float64}:\n 0.8585244609846809\n 0.37541692370451396\n 0.6767070590395461\n 0.9208844241267105\n 0.7064611415680901\n\njulia> z = transform(mach, x)\n5-element CategoricalArrays.CategoricalArray{UInt8,1,UInt8}:\n 0x52\n 0x42\n 0x4d\n 0x54\n 0x4e\n\nx_approx = inverse_transform(mach, z)\njulia> x - x_approx\n5-element Vector{Float64}:\n 0.008224506144777322\n 0.012731354778359405\n 0.0056265330571125816\n 0.005738175684445124\n 0.006835652575801987","category":"page"},{"location":"models/GaussianNBClassifier_MLJScikitLearnInterface/#GaussianNBClassifier_MLJScikitLearnInterface","page":"GaussianNBClassifier","title":"GaussianNBClassifier","text":"","category":"section"},{"location":"models/GaussianNBClassifier_MLJScikitLearnInterface/","page":"GaussianNBClassifier","title":"GaussianNBClassifier","text":"GaussianNBClassifier","category":"page"},{"location":"models/GaussianNBClassifier_MLJScikitLearnInterface/","page":"GaussianNBClassifier","title":"GaussianNBClassifier","text":"A model type for constructing a Gaussian naive Bayes classifier, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/GaussianNBClassifier_MLJScikitLearnInterface/","page":"GaussianNBClassifier","title":"GaussianNBClassifier","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/GaussianNBClassifier_MLJScikitLearnInterface/","page":"GaussianNBClassifier","title":"GaussianNBClassifier","text":"GaussianNBClassifier = @load GaussianNBClassifier pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/GaussianNBClassifier_MLJScikitLearnInterface/","page":"GaussianNBClassifier","title":"GaussianNBClassifier","text":"Do model = GaussianNBClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in GaussianNBClassifier(priors=...).","category":"page"},{"location":"models/GaussianNBClassifier_MLJScikitLearnInterface/#Hyper-parameters","page":"GaussianNBClassifier","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/GaussianNBClassifier_MLJScikitLearnInterface/","page":"GaussianNBClassifier","title":"GaussianNBClassifier","text":"priors = nothing\nvar_smoothing = 1.0e-9","category":"page"},{"location":"models/GaussianNBClassifier_NaiveBayes/#GaussianNBClassifier_NaiveBayes","page":"GaussianNBClassifier","title":"GaussianNBClassifier","text":"","category":"section"},{"location":"models/GaussianNBClassifier_NaiveBayes/","page":"GaussianNBClassifier","title":"GaussianNBClassifier","text":"GaussianNBClassifier","category":"page"},{"location":"models/GaussianNBClassifier_NaiveBayes/","page":"GaussianNBClassifier","title":"GaussianNBClassifier","text":"A model type for constructing a Gaussian naive Bayes classifier, based on NaiveBayes.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/GaussianNBClassifier_NaiveBayes/","page":"GaussianNBClassifier","title":"GaussianNBClassifier","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/GaussianNBClassifier_NaiveBayes/","page":"GaussianNBClassifier","title":"GaussianNBClassifier","text":"GaussianNBClassifier = @load GaussianNBClassifier pkg=NaiveBayes","category":"page"},{"location":"models/GaussianNBClassifier_NaiveBayes/","page":"GaussianNBClassifier","title":"GaussianNBClassifier","text":"Do model = GaussianNBClassifier() to construct an instance with default hyper-parameters. ","category":"page"},{"location":"models/GaussianNBClassifier_NaiveBayes/","page":"GaussianNBClassifier","title":"GaussianNBClassifier","text":"Given each class taken on by the target variable y, it is supposed that the conditional probability distribution for the input variables X is a multivariate Gaussian. The mean and covariance of these Gaussian distributions are estimated using maximum likelihood, and a probability distribution for y given X is deduced by applying Bayes' rule. The required marginal for y is estimated using class frequency in the training data.","category":"page"},{"location":"models/GaussianNBClassifier_NaiveBayes/","page":"GaussianNBClassifier","title":"GaussianNBClassifier","text":"Important. The name \"naive Bayes classifier\" is perhaps misleading. Since we are learning the full multivariate Gaussian distributions for X given y, we are not applying the usual naive Bayes independence condition, which would amount to forcing the covariance matrix to be diagonal.","category":"page"},{"location":"models/GaussianNBClassifier_NaiveBayes/#Training-data","page":"GaussianNBClassifier","title":"Training data","text":"","category":"section"},{"location":"models/GaussianNBClassifier_NaiveBayes/","page":"GaussianNBClassifier","title":"GaussianNBClassifier","text":"In MLJ or MLJBase, bind an instance model to data with","category":"page"},{"location":"models/GaussianNBClassifier_NaiveBayes/","page":"GaussianNBClassifier","title":"GaussianNBClassifier","text":"mach = machine(model, X, y)","category":"page"},{"location":"models/GaussianNBClassifier_NaiveBayes/","page":"GaussianNBClassifier","title":"GaussianNBClassifier","text":"Here:","category":"page"},{"location":"models/GaussianNBClassifier_NaiveBayes/","page":"GaussianNBClassifier","title":"GaussianNBClassifier","text":"X is any table of input features (eg, a DataFrame) whose columns are of scitype Continuous; check the column scitypes with schema(X)\ny is the target, which can be any AbstractVector whose element scitype is Finite; check the scitype with schema(y)","category":"page"},{"location":"models/GaussianNBClassifier_NaiveBayes/","page":"GaussianNBClassifier","title":"GaussianNBClassifier","text":"Train the machine using fit!(mach, rows=...).","category":"page"},{"location":"models/GaussianNBClassifier_NaiveBayes/#Operations","page":"GaussianNBClassifier","title":"Operations","text":"","category":"section"},{"location":"models/GaussianNBClassifier_NaiveBayes/","page":"GaussianNBClassifier","title":"GaussianNBClassifier","text":"predict(mach, Xnew): return predictions of the target given new features Xnew, which should have the same scitype as X above. Predictions are probabilistic.\npredict_mode(mach, Xnew): Return the mode of above predictions.","category":"page"},{"location":"models/GaussianNBClassifier_NaiveBayes/#Fitted-parameters","page":"GaussianNBClassifier","title":"Fitted parameters","text":"","category":"section"},{"location":"models/GaussianNBClassifier_NaiveBayes/","page":"GaussianNBClassifier","title":"GaussianNBClassifier","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/GaussianNBClassifier_NaiveBayes/","page":"GaussianNBClassifier","title":"GaussianNBClassifier","text":"c_counts: A dictionary containing the observed count of each input class.\nc_stats: A dictionary containing observed statistics on each input class. Each class is represented by a DataStats object, with the following fields:\nn_vars: The number of variables used to describe the class's behavior.\nn_obs: The number of times the class is observed.\nobs_axis: The axis along which the observations were computed.\ngaussians: A per class dictionary of Gaussians, each representing the distribution of the class. Represented with type Distributions.MvNormal from the Distributions.jl package.\nn_obs: The total number of observations in the training data.","category":"page"},{"location":"models/GaussianNBClassifier_NaiveBayes/#Examples","page":"GaussianNBClassifier","title":"Examples","text":"","category":"section"},{"location":"models/GaussianNBClassifier_NaiveBayes/","page":"GaussianNBClassifier","title":"GaussianNBClassifier","text":"using MLJ\nGaussianNB = @load GaussianNBClassifier pkg=NaiveBayes\n\nX, y = @load_iris\nclf = GaussianNB()\nmach = machine(clf, X, y) |> fit!\n\nfitted_params(mach)\n\npreds = predict(mach, X) ## probabilistic predictions\npreds[1]\npredict_mode(mach, X) ## point predictions","category":"page"},{"location":"models/GaussianNBClassifier_NaiveBayes/","page":"GaussianNBClassifier","title":"GaussianNBClassifier","text":"See also MultinomialNBClassifier","category":"page"},{"location":"models/CatBoostRegressor_CatBoost/#CatBoostRegressor_CatBoost","page":"CatBoostRegressor","title":"CatBoostRegressor","text":"","category":"section"},{"location":"models/CatBoostRegressor_CatBoost/","page":"CatBoostRegressor","title":"CatBoostRegressor","text":"CatBoostRegressor","category":"page"},{"location":"models/CatBoostRegressor_CatBoost/","page":"CatBoostRegressor","title":"CatBoostRegressor","text":"A model type for constructing a CatBoost regressor, based on CatBoost.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/CatBoostRegressor_CatBoost/","page":"CatBoostRegressor","title":"CatBoostRegressor","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/CatBoostRegressor_CatBoost/","page":"CatBoostRegressor","title":"CatBoostRegressor","text":"CatBoostRegressor = @load CatBoostRegressor pkg=CatBoost","category":"page"},{"location":"models/CatBoostRegressor_CatBoost/","page":"CatBoostRegressor","title":"CatBoostRegressor","text":"Do model = CatBoostRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in CatBoostRegressor(iterations=...).","category":"page"},{"location":"models/CatBoostRegressor_CatBoost/#Training-data","page":"CatBoostRegressor","title":"Training data","text":"","category":"section"},{"location":"models/CatBoostRegressor_CatBoost/","page":"CatBoostRegressor","title":"CatBoostRegressor","text":"In MLJ or MLJBase, bind an instance model to data with","category":"page"},{"location":"models/CatBoostRegressor_CatBoost/","page":"CatBoostRegressor","title":"CatBoostRegressor","text":"mach = machine(model, X, y)","category":"page"},{"location":"models/CatBoostRegressor_CatBoost/","page":"CatBoostRegressor","title":"CatBoostRegressor","text":"where","category":"page"},{"location":"models/CatBoostRegressor_CatBoost/","page":"CatBoostRegressor","title":"CatBoostRegressor","text":"X: any table of input features (eg, a DataFrame) whose columns each have one of the following element scitypes: Continuous, Count, Finite, Textual; check column scitypes with schema(X). Textual columns will be passed to catboost as text_features, Multiclass columns will be passed to catboost as cat_features, and OrderedFactor columns will be converted to integers.\ny: the target, which can be any AbstractVector whose element scitype is Continuous; check the scitype with scitype(y)","category":"page"},{"location":"models/CatBoostRegressor_CatBoost/","page":"CatBoostRegressor","title":"CatBoostRegressor","text":"Train the machine with fit!(mach, rows=...).","category":"page"},{"location":"models/CatBoostRegressor_CatBoost/#Hyper-parameters","page":"CatBoostRegressor","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/CatBoostRegressor_CatBoost/","page":"CatBoostRegressor","title":"CatBoostRegressor","text":"More details on the catboost hyperparameters, here are the Python docs: https://catboost.ai/en/docs/concepts/python-reference_catboostclassifier#parameters","category":"page"},{"location":"models/CatBoostRegressor_CatBoost/#Operations","page":"CatBoostRegressor","title":"Operations","text":"","category":"section"},{"location":"models/CatBoostRegressor_CatBoost/","page":"CatBoostRegressor","title":"CatBoostRegressor","text":"predict(mach, Xnew): probabilistic predictions of the target given new features Xnew having the same scitype as X above.","category":"page"},{"location":"models/CatBoostRegressor_CatBoost/#Accessor-functions","page":"CatBoostRegressor","title":"Accessor functions","text":"","category":"section"},{"location":"models/CatBoostRegressor_CatBoost/","page":"CatBoostRegressor","title":"CatBoostRegressor","text":"feature_importances(mach): return vector of feature importances, in the form of feature::Symbol => importance::Real pairs","category":"page"},{"location":"models/CatBoostRegressor_CatBoost/#Fitted-parameters","page":"CatBoostRegressor","title":"Fitted parameters","text":"","category":"section"},{"location":"models/CatBoostRegressor_CatBoost/","page":"CatBoostRegressor","title":"CatBoostRegressor","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/CatBoostRegressor_CatBoost/","page":"CatBoostRegressor","title":"CatBoostRegressor","text":"model: The Python CatBoostRegressor model","category":"page"},{"location":"models/CatBoostRegressor_CatBoost/#Report","page":"CatBoostRegressor","title":"Report","text":"","category":"section"},{"location":"models/CatBoostRegressor_CatBoost/","page":"CatBoostRegressor","title":"CatBoostRegressor","text":"The fields of report(mach) are:","category":"page"},{"location":"models/CatBoostRegressor_CatBoost/","page":"CatBoostRegressor","title":"CatBoostRegressor","text":"feature_importances: Vector{Pair{Symbol, Float64}} of feature importances","category":"page"},{"location":"models/CatBoostRegressor_CatBoost/#Examples","page":"CatBoostRegressor","title":"Examples","text":"","category":"section"},{"location":"models/CatBoostRegressor_CatBoost/","page":"CatBoostRegressor","title":"CatBoostRegressor","text":"using CatBoost.MLJCatBoostInterface\nusing MLJ\n\nX = (\n duration = [1.5, 4.1, 5.0, 6.7], \n n_phone_calls = [4, 5, 6, 7], \n department = coerce([\"acc\", \"ops\", \"acc\", \"ops\"], Multiclass), \n)\ny = [2.0, 4.0, 6.0, 7.0]\n\nmodel = CatBoostRegressor(iterations=5)\nmach = machine(model, X, y)\nfit!(mach)\npreds = predict(mach, X)","category":"page"},{"location":"models/CatBoostRegressor_CatBoost/","page":"CatBoostRegressor","title":"CatBoostRegressor","text":"See also catboost and the unwrapped model type CatBoost.CatBoostRegressor.","category":"page"},{"location":"models/RidgeClassifier_MLJScikitLearnInterface/#RidgeClassifier_MLJScikitLearnInterface","page":"RidgeClassifier","title":"RidgeClassifier","text":"","category":"section"},{"location":"models/RidgeClassifier_MLJScikitLearnInterface/","page":"RidgeClassifier","title":"RidgeClassifier","text":"RidgeClassifier","category":"page"},{"location":"models/RidgeClassifier_MLJScikitLearnInterface/","page":"RidgeClassifier","title":"RidgeClassifier","text":"A model type for constructing a ridge regression classifier, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/RidgeClassifier_MLJScikitLearnInterface/","page":"RidgeClassifier","title":"RidgeClassifier","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/RidgeClassifier_MLJScikitLearnInterface/","page":"RidgeClassifier","title":"RidgeClassifier","text":"RidgeClassifier = @load RidgeClassifier pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/RidgeClassifier_MLJScikitLearnInterface/","page":"RidgeClassifier","title":"RidgeClassifier","text":"Do model = RidgeClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in RidgeClassifier(alpha=...).","category":"page"},{"location":"models/RidgeClassifier_MLJScikitLearnInterface/#Hyper-parameters","page":"RidgeClassifier","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/RidgeClassifier_MLJScikitLearnInterface/","page":"RidgeClassifier","title":"RidgeClassifier","text":"alpha = 1.0\nfit_intercept = true\ncopy_X = true\nmax_iter = nothing\ntol = 0.001\nclass_weight = nothing\nsolver = auto\nrandom_state = nothing","category":"page"},{"location":"models/LassoRegressor_MLJScikitLearnInterface/#LassoRegressor_MLJScikitLearnInterface","page":"LassoRegressor","title":"LassoRegressor","text":"","category":"section"},{"location":"models/LassoRegressor_MLJScikitLearnInterface/","page":"LassoRegressor","title":"LassoRegressor","text":"LassoRegressor","category":"page"},{"location":"models/LassoRegressor_MLJScikitLearnInterface/","page":"LassoRegressor","title":"LassoRegressor","text":"A model type for constructing a lasso regressor, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/LassoRegressor_MLJScikitLearnInterface/","page":"LassoRegressor","title":"LassoRegressor","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/LassoRegressor_MLJScikitLearnInterface/","page":"LassoRegressor","title":"LassoRegressor","text":"LassoRegressor = @load LassoRegressor pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/LassoRegressor_MLJScikitLearnInterface/","page":"LassoRegressor","title":"LassoRegressor","text":"Do model = LassoRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in LassoRegressor(alpha=...).","category":"page"},{"location":"models/LassoRegressor_MLJScikitLearnInterface/#Hyper-parameters","page":"LassoRegressor","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/LassoRegressor_MLJScikitLearnInterface/","page":"LassoRegressor","title":"LassoRegressor","text":"alpha = 1.0\nfit_intercept = true\nprecompute = false\ncopy_X = true\nmax_iter = 1000\ntol = 0.0001\nwarm_start = false\npositive = false\nrandom_state = nothing\nselection = cyclic","category":"page"},{"location":"models/KDEDetector_OutlierDetectionPython/#KDEDetector_OutlierDetectionPython","page":"KDEDetector","title":"KDEDetector","text":"","category":"section"},{"location":"models/KDEDetector_OutlierDetectionPython/","page":"KDEDetector","title":"KDEDetector","text":"KDEDetector(bandwidth=1.0,\n algorithm=\"auto\",\n leaf_size=30,\n metric=\"minkowski\",\n metric_params=None)","category":"page"},{"location":"models/KDEDetector_OutlierDetectionPython/","page":"KDEDetector","title":"KDEDetector","text":"https://pyod.readthedocs.io/en/latest/pyod.models.html#module-pyod.models.kde","category":"page"},{"location":"models/ConstantClassifier_MLJModels/#ConstantClassifier_MLJModels","page":"ConstantClassifier","title":"ConstantClassifier","text":"","category":"section"},{"location":"models/ConstantClassifier_MLJModels/","page":"ConstantClassifier","title":"ConstantClassifier","text":"ConstantClassifier","category":"page"},{"location":"models/ConstantClassifier_MLJModels/","page":"ConstantClassifier","title":"ConstantClassifier","text":"This \"dummy\" probabilistic predictor always returns the same distribution, irrespective of the provided input pattern. The distribution d returned is the UnivariateFinite distribution based on frequency of classes observed in the training target data. So, pdf(d, level) is the number of times the training target takes on the value level. Use predict_mode instead of predict to obtain the training target mode instead. For more on the UnivariateFinite type, see the CategoricalDistributions.jl package.","category":"page"},{"location":"models/ConstantClassifier_MLJModels/","page":"ConstantClassifier","title":"ConstantClassifier","text":"Almost any reasonable model is expected to outperform ConstantClassifier, which is used almost exclusively for testing and establishing performance baselines.","category":"page"},{"location":"models/ConstantClassifier_MLJModels/","page":"ConstantClassifier","title":"ConstantClassifier","text":"In MLJ (or MLJModels) do model = ConstantClassifier() to construct an instance.","category":"page"},{"location":"models/ConstantClassifier_MLJModels/#Training-data","page":"ConstantClassifier","title":"Training data","text":"","category":"section"},{"location":"models/ConstantClassifier_MLJModels/","page":"ConstantClassifier","title":"ConstantClassifier","text":"In MLJ or MLJBase, bind an instance model to data with","category":"page"},{"location":"models/ConstantClassifier_MLJModels/","page":"ConstantClassifier","title":"ConstantClassifier","text":"mach = machine(model, X, y)","category":"page"},{"location":"models/ConstantClassifier_MLJModels/","page":"ConstantClassifier","title":"ConstantClassifier","text":"Here:","category":"page"},{"location":"models/ConstantClassifier_MLJModels/","page":"ConstantClassifier","title":"ConstantClassifier","text":"X is any table of input features (eg, a DataFrame)\ny is the target, which can be any AbstractVector whose element scitype is Finite; check the scitype with schema(y)","category":"page"},{"location":"models/ConstantClassifier_MLJModels/","page":"ConstantClassifier","title":"ConstantClassifier","text":"Train the machine using fit!(mach, rows=...).","category":"page"},{"location":"models/ConstantClassifier_MLJModels/#Hyper-parameters","page":"ConstantClassifier","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/ConstantClassifier_MLJModels/","page":"ConstantClassifier","title":"ConstantClassifier","text":"None.","category":"page"},{"location":"models/ConstantClassifier_MLJModels/#Operations","page":"ConstantClassifier","title":"Operations","text":"","category":"section"},{"location":"models/ConstantClassifier_MLJModels/","page":"ConstantClassifier","title":"ConstantClassifier","text":"predict(mach, Xnew): Return predictions of the target given features Xnew (which for this model are ignored). Predictions are probabilistic.\npredict_mode(mach, Xnew): Return the mode of the probabilistic predictions returned above.","category":"page"},{"location":"models/ConstantClassifier_MLJModels/#Fitted-parameters","page":"ConstantClassifier","title":"Fitted parameters","text":"","category":"section"},{"location":"models/ConstantClassifier_MLJModels/","page":"ConstantClassifier","title":"ConstantClassifier","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/ConstantClassifier_MLJModels/","page":"ConstantClassifier","title":"ConstantClassifier","text":"target_distribution: The distribution fit to the supplied target data.","category":"page"},{"location":"models/ConstantClassifier_MLJModels/#Examples","page":"ConstantClassifier","title":"Examples","text":"","category":"section"},{"location":"models/ConstantClassifier_MLJModels/","page":"ConstantClassifier","title":"ConstantClassifier","text":"using MLJ\n\nclf = ConstantClassifier()\n\nX, y = @load_crabs ## a table and a categorical vector\nmach = machine(clf, X, y) |> fit!\n\nfitted_params(mach)\n\nXnew = (;FL = [8.1, 24.8, 7.2],\n RW = [5.1, 25.7, 6.4],\n CL = [15.9, 46.7, 14.3],\n CW = [18.7, 59.7, 12.2],\n BD = [6.2, 23.6, 8.4],)\n\n## probabilistic predictions:\nyhat = predict(mach, Xnew)\nyhat[1]\n\n## raw probabilities:\npdf.(yhat, \"B\")\n\n## probability matrix:\nL = levels(y)\npdf(yhat, L)\n\n## point predictions:\npredict_mode(mach, Xnew)","category":"page"},{"location":"models/ConstantClassifier_MLJModels/","page":"ConstantClassifier","title":"ConstantClassifier","text":"See also ConstantRegressor","category":"page"},{"location":"models/ClusterUndersampler_Imbalance/#ClusterUndersampler_Imbalance","page":"ClusterUndersampler","title":"ClusterUndersampler","text":"","category":"section"},{"location":"models/ClusterUndersampler_Imbalance/","page":"ClusterUndersampler","title":"ClusterUndersampler","text":"Initiate a cluster undersampling model with the given hyper-parameters.","category":"page"},{"location":"models/ClusterUndersampler_Imbalance/","page":"ClusterUndersampler","title":"ClusterUndersampler","text":"ClusterUndersampler","category":"page"},{"location":"models/ClusterUndersampler_Imbalance/","page":"ClusterUndersampler","title":"ClusterUndersampler","text":"A model type for constructing a cluster undersampler, based on Imbalance.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/ClusterUndersampler_Imbalance/","page":"ClusterUndersampler","title":"ClusterUndersampler","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/ClusterUndersampler_Imbalance/","page":"ClusterUndersampler","title":"ClusterUndersampler","text":"ClusterUndersampler = @load ClusterUndersampler pkg=Imbalance","category":"page"},{"location":"models/ClusterUndersampler_Imbalance/","page":"ClusterUndersampler","title":"ClusterUndersampler","text":"Do model = ClusterUndersampler() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in ClusterUndersampler(mode=...).","category":"page"},{"location":"models/ClusterUndersampler_Imbalance/","page":"ClusterUndersampler","title":"ClusterUndersampler","text":"ClusterUndersampler implements clustering undersampling as presented in Wei-Chao, L., Chih-Fong, T., Ya-Han, H., & Jing-Shang, J. (2017). Clustering-based undersampling in class-imbalanced data. Information Sciences, 409–410, 17–26. with K-means as the clustering algorithm.","category":"page"},{"location":"models/ClusterUndersampler_Imbalance/#Training-data","page":"ClusterUndersampler","title":"Training data","text":"","category":"section"},{"location":"models/ClusterUndersampler_Imbalance/","page":"ClusterUndersampler","title":"ClusterUndersampler","text":"In MLJ or MLJBase, wrap the model in a machine by \tmach = machine(model)","category":"page"},{"location":"models/ClusterUndersampler_Imbalance/","page":"ClusterUndersampler","title":"ClusterUndersampler","text":"There is no need to provide any data here because the model is a static transformer.","category":"page"},{"location":"models/ClusterUndersampler_Imbalance/","page":"ClusterUndersampler","title":"ClusterUndersampler","text":"Likewise, there is no need to fit!(mach). ","category":"page"},{"location":"models/ClusterUndersampler_Imbalance/","page":"ClusterUndersampler","title":"ClusterUndersampler","text":"For default values of the hyper-parameters, model can be constructed with model = ClusterUndersampler().","category":"page"},{"location":"models/ClusterUndersampler_Imbalance/#Hyperparameters","page":"ClusterUndersampler","title":"Hyperparameters","text":"","category":"section"},{"location":"models/ClusterUndersampler_Imbalance/","page":"ClusterUndersampler","title":"ClusterUndersampler","text":"mode::AbstractString=\"nearest: If \"center\" then the undersampled data will consist of the centriods of","category":"page"},{"location":"models/ClusterUndersampler_Imbalance/","page":"ClusterUndersampler","title":"ClusterUndersampler","text":"each cluster found; if `\"nearest\"` then it will consist of the nearest neighbor of each centroid.","category":"page"},{"location":"models/ClusterUndersampler_Imbalance/","page":"ClusterUndersampler","title":"ClusterUndersampler","text":"ratios=1.0: A parameter that controls the amount of undersampling to be done for each class\nCan be a float and in this case each class will be undersampled to the size of the minority class times the float. By default, all classes are undersampled to the size of the minority class\nCan be a dictionary mapping each class label to the float ratio for that class\nmaxiter::Integer=100: Maximum number of iterations to run K-means\nrng::Integer=42: Random number generator seed. Must be an integer.","category":"page"},{"location":"models/ClusterUndersampler_Imbalance/#Transform-Inputs","page":"ClusterUndersampler","title":"Transform Inputs","text":"","category":"section"},{"location":"models/ClusterUndersampler_Imbalance/","page":"ClusterUndersampler","title":"ClusterUndersampler","text":"X: A matrix or table of floats where each row is an observation from the dataset\ny: An abstract vector of labels (e.g., strings) that correspond to the observations in X","category":"page"},{"location":"models/ClusterUndersampler_Imbalance/#Transform-Outputs","page":"ClusterUndersampler","title":"Transform Outputs","text":"","category":"section"},{"location":"models/ClusterUndersampler_Imbalance/","page":"ClusterUndersampler","title":"ClusterUndersampler","text":"X_under: A matrix or table that includes the data after undersampling depending on whether the input X is a matrix or table respectively\ny_under: An abstract vector of labels corresponding to X_under","category":"page"},{"location":"models/ClusterUndersampler_Imbalance/#Operations","page":"ClusterUndersampler","title":"Operations","text":"","category":"section"},{"location":"models/ClusterUndersampler_Imbalance/","page":"ClusterUndersampler","title":"ClusterUndersampler","text":"transform(mach, X, y): resample the data X and y using ClusterUndersampler, returning the undersampled versions","category":"page"},{"location":"models/ClusterUndersampler_Imbalance/#Example","page":"ClusterUndersampler","title":"Example","text":"","category":"section"},{"location":"models/ClusterUndersampler_Imbalance/","page":"ClusterUndersampler","title":"ClusterUndersampler","text":"using MLJ\nimport Imbalance\n\n## set probability of each class\nclass_probs = [0.5, 0.2, 0.3] \nnum_rows, num_continuous_feats = 100, 5\n## generate a table and categorical vector accordingly\nX, y = Imbalance.generate_imbalanced_data(num_rows, num_continuous_feats; \n class_probs, rng=42) \n \njulia> Imbalance.checkbalance(y; ref=\"minority\")\n 1: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 19 (100.0%) \n 2: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 33 (173.7%) \n 0: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 48 (252.6%) \n\n## load cluster_undersampling\nClusterUndersampler = @load ClusterUndersampler pkg=Imbalance\n\n## wrap the model in a machine\nundersampler = ClusterUndersampler(mode=\"nearest\", \n ratios=Dict(0=>1.0, 1=> 1.0, 2=>1.0), rng=42)\nmach = machine(undersampler)\n\n## provide the data to transform (there is nothing to fit)\nX_under, y_under = transform(mach, X, y)\n\n \njulia> Imbalance.checkbalance(y_under; ref=\"minority\")\n0: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 19 (100.0%) \n2: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 19 (100.0%) \n1: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 19 (100.0%)","category":"page"},{"location":"models/MultitargetRidgeRegressor_MultivariateStats/#MultitargetRidgeRegressor_MultivariateStats","page":"MultitargetRidgeRegressor","title":"MultitargetRidgeRegressor","text":"","category":"section"},{"location":"models/MultitargetRidgeRegressor_MultivariateStats/","page":"MultitargetRidgeRegressor","title":"MultitargetRidgeRegressor","text":"MultitargetRidgeRegressor","category":"page"},{"location":"models/MultitargetRidgeRegressor_MultivariateStats/","page":"MultitargetRidgeRegressor","title":"MultitargetRidgeRegressor","text":"A model type for constructing a multitarget ridge regressor, based on MultivariateStats.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/MultitargetRidgeRegressor_MultivariateStats/","page":"MultitargetRidgeRegressor","title":"MultitargetRidgeRegressor","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/MultitargetRidgeRegressor_MultivariateStats/","page":"MultitargetRidgeRegressor","title":"MultitargetRidgeRegressor","text":"MultitargetRidgeRegressor = @load MultitargetRidgeRegressor pkg=MultivariateStats","category":"page"},{"location":"models/MultitargetRidgeRegressor_MultivariateStats/","page":"MultitargetRidgeRegressor","title":"MultitargetRidgeRegressor","text":"Do model = MultitargetRidgeRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in MultitargetRidgeRegressor(lambda=...).","category":"page"},{"location":"models/MultitargetRidgeRegressor_MultivariateStats/","page":"MultitargetRidgeRegressor","title":"MultitargetRidgeRegressor","text":"Multi-target ridge regression adds a quadratic penalty term to multi-target least squares regression, for regularization. Ridge regression is particularly useful in the case of multicollinearity. In this case, the output represents a response vector. Options exist to specify a bias term, and to adjust the strength of the penalty term.","category":"page"},{"location":"models/MultitargetRidgeRegressor_MultivariateStats/#Training-data","page":"MultitargetRidgeRegressor","title":"Training data","text":"","category":"section"},{"location":"models/MultitargetRidgeRegressor_MultivariateStats/","page":"MultitargetRidgeRegressor","title":"MultitargetRidgeRegressor","text":"In MLJ or MLJBase, bind an instance model to data with","category":"page"},{"location":"models/MultitargetRidgeRegressor_MultivariateStats/","page":"MultitargetRidgeRegressor","title":"MultitargetRidgeRegressor","text":"mach = machine(model, X, y)","category":"page"},{"location":"models/MultitargetRidgeRegressor_MultivariateStats/","page":"MultitargetRidgeRegressor","title":"MultitargetRidgeRegressor","text":"Here:","category":"page"},{"location":"models/MultitargetRidgeRegressor_MultivariateStats/","page":"MultitargetRidgeRegressor","title":"MultitargetRidgeRegressor","text":"X is any table of input features (eg, a DataFrame) whose columns are of scitype Continuous; check column scitypes with schema(X).\ny is the target, which can be any table of responses whose element scitype is Continuous; check the scitype with scitype(y).","category":"page"},{"location":"models/MultitargetRidgeRegressor_MultivariateStats/","page":"MultitargetRidgeRegressor","title":"MultitargetRidgeRegressor","text":"Train the machine using fit!(mach, rows=...).","category":"page"},{"location":"models/MultitargetRidgeRegressor_MultivariateStats/#Hyper-parameters","page":"MultitargetRidgeRegressor","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/MultitargetRidgeRegressor_MultivariateStats/","page":"MultitargetRidgeRegressor","title":"MultitargetRidgeRegressor","text":"lambda=1.0: Is the non-negative parameter for the regularization strength. If lambda is 0, ridge regression is equivalent to linear least squares regression, and as lambda approaches infinity, all the linear coefficients approach 0.\nbias=true: Include the bias term if true, otherwise fit without bias term.","category":"page"},{"location":"models/MultitargetRidgeRegressor_MultivariateStats/#Operations","page":"MultitargetRidgeRegressor","title":"Operations","text":"","category":"section"},{"location":"models/MultitargetRidgeRegressor_MultivariateStats/","page":"MultitargetRidgeRegressor","title":"MultitargetRidgeRegressor","text":"predict(mach, Xnew): Return predictions of the target given new features Xnew, which should have the same scitype as X above.","category":"page"},{"location":"models/MultitargetRidgeRegressor_MultivariateStats/#Fitted-parameters","page":"MultitargetRidgeRegressor","title":"Fitted parameters","text":"","category":"section"},{"location":"models/MultitargetRidgeRegressor_MultivariateStats/","page":"MultitargetRidgeRegressor","title":"MultitargetRidgeRegressor","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/MultitargetRidgeRegressor_MultivariateStats/","page":"MultitargetRidgeRegressor","title":"MultitargetRidgeRegressor","text":"coefficients: The linear coefficients determined by the model.\nintercept: The intercept determined by the model.","category":"page"},{"location":"models/MultitargetRidgeRegressor_MultivariateStats/#Examples","page":"MultitargetRidgeRegressor","title":"Examples","text":"","category":"section"},{"location":"models/MultitargetRidgeRegressor_MultivariateStats/","page":"MultitargetRidgeRegressor","title":"MultitargetRidgeRegressor","text":"using MLJ\nusing DataFrames\n\nRidgeRegressor = @load MultitargetRidgeRegressor pkg=MultivariateStats\n\nX, y = make_regression(100, 6; n_targets = 2) ## a table and a table (synthetic data)\n\nridge_regressor = RidgeRegressor(lambda=1.5)\nmach = machine(ridge_regressor, X, y) |> fit!\n\nXnew, _ = make_regression(3, 6)\nyhat = predict(mach, Xnew) ## new predictions","category":"page"},{"location":"models/MultitargetRidgeRegressor_MultivariateStats/","page":"MultitargetRidgeRegressor","title":"MultitargetRidgeRegressor","text":"See also LinearRegressor, MultitargetLinearRegressor, RidgeRegressor","category":"page"},{"location":"frequently_asked_questions/#Frequently-Asked-Questions","page":"FAQ","title":"Frequently Asked Questions","text":"","category":"section"},{"location":"frequently_asked_questions/#Julia-already-has-a-great-machine-learning-toolbox,-ScitkitLearn.jl.-Why-MLJ?","page":"FAQ","title":"Julia already has a great machine learning toolbox, ScitkitLearn.jl. Why MLJ?","text":"","category":"section"},{"location":"frequently_asked_questions/","page":"FAQ","title":"FAQ","text":"An alternative machine learning toolbox for Julia users is ScikitLearn.jl. Initially intended as a Julia wrapper for the popular python library scikit-learn, ML algorithms written in Julia can also implement the ScikitLearn.jl API. Meta-algorithms (systematic tuning, pipelining, etc) remain python wrapped code, however.","category":"page"},{"location":"frequently_asked_questions/","page":"FAQ","title":"FAQ","text":"While ScikitLearn.jl provides the Julia user with access to a mature and large library of machine learning models, the scikit-learn API on which it is modeled, dating back to 2007, is not likely to evolve significantly in the future. MLJ enjoys (or will enjoy) several features that should make it an attractive alternative in the longer term:","category":"page"},{"location":"frequently_asked_questions/","page":"FAQ","title":"FAQ","text":"One language. ScikitLearn.jl wraps Python code, which in turn wraps C code for performance-critical routines. A Julia machine learning algorithm that implements the MLJ model interface is 100% Julia. Writing code in Julia is almost as fast as Python and well-written Julia code runs almost as fast as C. Additionally, a single language design provides superior interoperability. For example, one can implement: (i) gradient-descent tuning of hyperparameters, using automatic differentiation libraries such as Flux.jl; and (ii) GPU performance boosts without major code refactoring, using CuArrays.jl.\nRegistry for model metadata. In ScikitLearn.jl the list of available models, as well as model metadata (whether a model handles categorical inputs, whether it can make probabilistic predictions, etc) must be gleaned from the documentation. In MLJ, this information is more structured and is accessible to MLJ via a searchable model registry (without the models needing to be loaded).\nFlexible API for model composition. Pipelines in scikit-learn are more of an afterthought than an integral part of the original design. By contrast, MLJ's user-interaction API was predicated on the requirements of a flexible \"learning network\" API, one that allows models to be connected in essentially arbitrary ways (such as Wolpert model stacks). Networks can be built and tested in stages before being exported as first-class stand-alone models. Networks feature \"smart\" training (only necessary components are retrained after parameter changes) and will eventually be trainable using a DAG scheduler.\nClean probabilistic API. The scikit-learn API does not specify a universal standard for the form of probabilistic predictions. By fixing a probabilistic API along the lines of the skpro project, MLJ aims to improve support for Bayesian statistics and probabilistic graphical models.\nUniversal adoption of categorical data types. Python's scientific array library NumPy has no dedicated data type for representing categorical data (i.e., no type that tracks the pool of all possible classes). Generally, scikit-learn models deal with this by requiring data to be relabeled as integers. However, the naive user trains a model on relabeled categorical data only to discover that evaluation on a test set crashes their code because a categorical feature takes on a value not observed in training. MLJ mitigates such issues by insisting on the use of categorical data types, and by insisting that MLJ model implementations preserve the class pools. If, for example, a training target contains classes in the pool that do not appear in the training set, a probabilistic prediction will nevertheless predict a distribution whose support includes the missing class, but which is appropriately weighted with probability zero.","category":"page"},{"location":"frequently_asked_questions/","page":"FAQ","title":"FAQ","text":"Finally, we note that a large number of ScikitLearn.jl models are now wrapped for use in MLJ.","category":"page"},{"location":"models/AffinityPropagation_MLJScikitLearnInterface/#AffinityPropagation_MLJScikitLearnInterface","page":"AffinityPropagation","title":"AffinityPropagation","text":"","category":"section"},{"location":"models/AffinityPropagation_MLJScikitLearnInterface/","page":"AffinityPropagation","title":"AffinityPropagation","text":"AffinityPropagation","category":"page"},{"location":"models/AffinityPropagation_MLJScikitLearnInterface/","page":"AffinityPropagation","title":"AffinityPropagation","text":"A model type for constructing a Affinity Propagation Clustering of data, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/AffinityPropagation_MLJScikitLearnInterface/","page":"AffinityPropagation","title":"AffinityPropagation","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/AffinityPropagation_MLJScikitLearnInterface/","page":"AffinityPropagation","title":"AffinityPropagation","text":"AffinityPropagation = @load AffinityPropagation pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/AffinityPropagation_MLJScikitLearnInterface/","page":"AffinityPropagation","title":"AffinityPropagation","text":"Do model = AffinityPropagation() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in AffinityPropagation(damping=...).","category":"page"},{"location":"models/AffinityPropagation_MLJScikitLearnInterface/#Hyper-parameters","page":"AffinityPropagation","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/AffinityPropagation_MLJScikitLearnInterface/","page":"AffinityPropagation","title":"AffinityPropagation","text":"damping = 0.5\nmax_iter = 200\nconvergence_iter = 15\ncopy = true\npreference = nothing\naffinity = euclidean\nverbose = false","category":"page"},{"location":"more_on_probabilistic_predictors/#More-on-Probabilistic-Predictors","page":"More on Probabilistic Predictors","title":"More on Probabilistic Predictors","text":"","category":"section"},{"location":"more_on_probabilistic_predictors/","page":"More on Probabilistic Predictors","title":"More on Probabilistic Predictors","text":"Although one can call predict_mode on a probabilistic binary classifier to get deterministic predictions, a more flexible strategy is to wrap the model using BinaryThresholdPredictor, as this allows the user to specify the threshold probability for predicting a positive class. This wrapping converts a probabilistic classifier into a deterministic one.","category":"page"},{"location":"more_on_probabilistic_predictors/","page":"More on Probabilistic Predictors","title":"More on Probabilistic Predictors","text":"The positive class is always the second class returned when calling levels on the training target y.","category":"page"},{"location":"more_on_probabilistic_predictors/","page":"More on Probabilistic Predictors","title":"More on Probabilistic Predictors","text":"MLJModels.BinaryThresholdPredictor","category":"page"},{"location":"more_on_probabilistic_predictors/#MLJModels.BinaryThresholdPredictor","page":"More on Probabilistic Predictors","title":"MLJModels.BinaryThresholdPredictor","text":"BinaryThresholdPredictor(model; threshold=0.5)\n\nWrap the Probabilistic model, model, assumed to support binary classification, as a Deterministic model, by applying the specified threshold to the positive class probability. In addition to conventional supervised classifiers, it can also be applied to outlier detection models that predict normalized scores - in the form of appropriate UnivariateFinite distributions - that is, models that subtype AbstractProbabilisticUnsupervisedDetector or AbstractProbabilisticSupervisedDetector.\n\nBy convention the positive class is the second class returned by levels(y), where y is the target.\n\nIf threshold=0.5 then calling predict on the wrapped model is equivalent to calling predict_mode on the atomic model.\n\nExample\n\nBelow is an application to the well-known Pima Indian diabetes dataset, including optimization of the threshold parameter, with a high balanced accuracy the objective. The target class distribution is 500 positives to 268 negatives.\n\nLoading the data:\n\nusing MLJ, Random\nrng = Xoshiro(123)\n\ndiabetes = OpenML.load(43582)\noutcome, X = unpack(diabetes, ==(:Outcome), rng=rng);\ny = coerce(Int.(outcome), OrderedFactor);\n\nChoosing a probabilistic classifier:\n\nEvoTreesClassifier = @load EvoTreesClassifier\nprob_predictor = EvoTreesClassifier()\n\nWrapping in TunedModel to get a deterministic classifier with threshold as a new hyperparameter:\n\npoint_predictor = BinaryThresholdPredictor(prob_predictor, threshold=0.6)\nXnew, _ = make_moons(3, rng=rng)\nmach = machine(point_predictor, X, y) |> fit!\npredict(mach, X)[1:3] # [0, 0, 0]\n\nEstimating performance:\n\nbalanced = BalancedAccuracy(adjusted=true)\ne = evaluate!(mach, resampling=CV(nfolds=6), measures=[balanced, accuracy])\ne.measurement[1] # 0.405 ± 0.089\n\nWrapping in tuning strategy to learn threshold that maximizes balanced accuracy:\n\nr = range(point_predictor, :threshold, lower=0.1, upper=0.9)\ntuned_point_predictor = TunedModel(\n point_predictor,\n tuning=RandomSearch(rng=rng),\n resampling=CV(nfolds=6),\n range = r,\n measure=balanced,\n n=30,\n)\nmach2 = machine(tuned_point_predictor, X, y) |> fit!\noptimized_point_predictor = report(mach2).best_model\noptimized_point_predictor.threshold # 0.260\npredict(mach2, X)[1:3] # [1, 1, 0]\n\nEstimating the performance of the auto-thresholding model (nested resampling here):\n\ne = evaluate!(mach2, resampling=CV(nfolds=6), measure=[balanced, accuracy])\ne.measurement[1] # 0.477 ± 0.110\n\n\n\n\n\n","category":"type"},{"location":"models/LogisticCVClassifier_MLJScikitLearnInterface/#LogisticCVClassifier_MLJScikitLearnInterface","page":"LogisticCVClassifier","title":"LogisticCVClassifier","text":"","category":"section"},{"location":"models/LogisticCVClassifier_MLJScikitLearnInterface/","page":"LogisticCVClassifier","title":"LogisticCVClassifier","text":"LogisticCVClassifier","category":"page"},{"location":"models/LogisticCVClassifier_MLJScikitLearnInterface/","page":"LogisticCVClassifier","title":"LogisticCVClassifier","text":"A model type for constructing a logistic regression classifier with built-in cross-validation, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/LogisticCVClassifier_MLJScikitLearnInterface/","page":"LogisticCVClassifier","title":"LogisticCVClassifier","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/LogisticCVClassifier_MLJScikitLearnInterface/","page":"LogisticCVClassifier","title":"LogisticCVClassifier","text":"LogisticCVClassifier = @load LogisticCVClassifier pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/LogisticCVClassifier_MLJScikitLearnInterface/","page":"LogisticCVClassifier","title":"LogisticCVClassifier","text":"Do model = LogisticCVClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in LogisticCVClassifier(Cs=...).","category":"page"},{"location":"models/LogisticCVClassifier_MLJScikitLearnInterface/#Hyper-parameters","page":"LogisticCVClassifier","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/LogisticCVClassifier_MLJScikitLearnInterface/","page":"LogisticCVClassifier","title":"LogisticCVClassifier","text":"Cs = 10\nfit_intercept = true\ncv = 5\ndual = false\npenalty = l2\nscoring = nothing\nsolver = lbfgs\ntol = 0.0001\nmax_iter = 100\nclass_weight = nothing\nn_jobs = nothing\nverbose = 0\nrefit = true\nintercept_scaling = 1.0\nmulti_class = auto\nrandom_state = nothing\nl1_ratios = nothing","category":"page"},{"location":"models/ROSE_Imbalance/#ROSE_Imbalance","page":"ROSE","title":"ROSE","text":"","category":"section"},{"location":"models/ROSE_Imbalance/","page":"ROSE","title":"ROSE","text":"Initiate a ROSE model with the given hyper-parameters.","category":"page"},{"location":"models/ROSE_Imbalance/","page":"ROSE","title":"ROSE","text":"ROSE","category":"page"},{"location":"models/ROSE_Imbalance/","page":"ROSE","title":"ROSE","text":"A model type for constructing a rose, based on Imbalance.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/ROSE_Imbalance/","page":"ROSE","title":"ROSE","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/ROSE_Imbalance/","page":"ROSE","title":"ROSE","text":"ROSE = @load ROSE pkg=Imbalance","category":"page"},{"location":"models/ROSE_Imbalance/","page":"ROSE","title":"ROSE","text":"Do model = ROSE() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in ROSE(s=...).","category":"page"},{"location":"models/ROSE_Imbalance/","page":"ROSE","title":"ROSE","text":"ROSE implements the ROSE (Random Oversampling Examples) algorithm to correct for class imbalance as in G Menardi, N. Torelli, “Training and assessing classification rules with imbalanced data,” Data Mining and Knowledge Discovery, 28(1), pp.92-122, 2014.","category":"page"},{"location":"models/ROSE_Imbalance/#Training-data","page":"ROSE","title":"Training data","text":"","category":"section"},{"location":"models/ROSE_Imbalance/","page":"ROSE","title":"ROSE","text":"In MLJ or MLJBase, wrap the model in a machine by mach = machine(model)","category":"page"},{"location":"models/ROSE_Imbalance/","page":"ROSE","title":"ROSE","text":"There is no need to provide any data here because the model is a static transformer.","category":"page"},{"location":"models/ROSE_Imbalance/","page":"ROSE","title":"ROSE","text":"Likewise, there is no need to fit!(mach). ","category":"page"},{"location":"models/ROSE_Imbalance/","page":"ROSE","title":"ROSE","text":"For default values of the hyper-parameters, model can be constructed by model = ROSE()","category":"page"},{"location":"models/ROSE_Imbalance/#Hyperparameters","page":"ROSE","title":"Hyperparameters","text":"","category":"section"},{"location":"models/ROSE_Imbalance/","page":"ROSE","title":"ROSE","text":"s::float: A parameter that proportionally controls the bandwidth of the Gaussian kernel\nratios=1.0: A parameter that controls the amount of oversampling to be done for each class\nCan be a float and in this case each class will be oversampled to the size of the majority class times the float. By default, all classes are oversampled to the size of the majority class\nCan be a dictionary mapping each class label to the float ratio for that class\nrng::Union{AbstractRNG, Integer}=default_rng(): Either an AbstractRNG object or an Integer seed to be used with Xoshiro if the Julia VERSION supports it. Otherwise, uses MersenneTwister`.","category":"page"},{"location":"models/ROSE_Imbalance/#Transform-Inputs","page":"ROSE","title":"Transform Inputs","text":"","category":"section"},{"location":"models/ROSE_Imbalance/","page":"ROSE","title":"ROSE","text":"X: A matrix or table of floats where each row is an observation from the dataset\ny: An abstract vector of labels (e.g., strings) that correspond to the observations in X","category":"page"},{"location":"models/ROSE_Imbalance/#Transform-Outputs","page":"ROSE","title":"Transform Outputs","text":"","category":"section"},{"location":"models/ROSE_Imbalance/","page":"ROSE","title":"ROSE","text":"Xover: A matrix or table that includes original data and the new observations due to oversampling. depending on whether the input X is a matrix or table respectively\nyover: An abstract vector of labels corresponding to Xover","category":"page"},{"location":"models/ROSE_Imbalance/#Operations","page":"ROSE","title":"Operations","text":"","category":"section"},{"location":"models/ROSE_Imbalance/","page":"ROSE","title":"ROSE","text":"transform(mach, X, y): resample the data X and y using ROSE, returning both the new and original observations","category":"page"},{"location":"models/ROSE_Imbalance/#Example","page":"ROSE","title":"Example","text":"","category":"section"},{"location":"models/ROSE_Imbalance/","page":"ROSE","title":"ROSE","text":"using MLJ\nimport Imbalance\n\n## set probability of each class\nclass_probs = [0.5, 0.2, 0.3] \nnum_rows, num_continuous_feats = 100, 5\n## generate a table and categorical vector accordingly\nX, y = Imbalance.generate_imbalanced_data(num_rows, num_continuous_feats; \n class_probs, rng=42) \n\njulia> Imbalance.checkbalance(y)\n1: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 19 (39.6%) \n2: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 33 (68.8%) \n0: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 48 (100.0%) \n\n## load ROSE\nROSE = @load ROSE pkg=Imbalance\n\n## wrap the model in a machine\noversampler = ROSE(s=0.3, ratios=Dict(0=>1.0, 1=> 0.9, 2=>0.8), rng=42)\nmach = machine(oversampler)\n\n## provide the data to transform (there is nothing to fit)\nXover, yover = transform(mach, X, y)\n\njulia> Imbalance.checkbalance(yover)\n2: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 38 (79.2%) \n1: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 43 (89.6%) \n0: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 48 (100.0%) ","category":"page"},{"location":"simple_user_defined_models/#Simple-User-Defined-Models","page":"Simple User Defined Models","title":"Simple User Defined Models","text":"","category":"section"},{"location":"simple_user_defined_models/","page":"Simple User Defined Models","title":"Simple User Defined Models","text":"To quickly implement a new supervised model in MLJ, it suffices to:","category":"page"},{"location":"simple_user_defined_models/","page":"Simple User Defined Models","title":"Simple User Defined Models","text":"Define a mutable struct to store hyperparameters. This is either a subtype of Probabilistic or Deterministic, depending on whether probabilistic or ordinary point predictions are intended. This struct is the model.\nDefine a fit method, dispatched on the model, returning learned parameters, also known as the fitresult.\nDefine a predict method, dispatched on the model, and the fitresult, to return predictions on new patterns.","category":"page"},{"location":"simple_user_defined_models/","page":"Simple User Defined Models","title":"Simple User Defined Models","text":"In the examples below, the training input X of fit, and the new input Xnew passed to predict, are tables. Each training target y is an AbstractVector.","category":"page"},{"location":"simple_user_defined_models/","page":"Simple User Defined Models","title":"Simple User Defined Models","text":"The predictions returned by predict have the same form as y for deterministic models, but are Vectors of distributions for probabilistic models.","category":"page"},{"location":"simple_user_defined_models/","page":"Simple User Defined Models","title":"Simple User Defined Models","text":"Advanced model functionality not addressed here includes: (i) optional update method to avoid redundant calculations when calling fit! on machines a second time; (ii) reporting extra training-related statistics; (iii) exposing model-specific functionality; (iv) checking the scientific type of data passed to your model in machine construction; and (iv) checking the validity of hyperparameter values. All this is described in Adding Models for General Use.","category":"page"},{"location":"simple_user_defined_models/","page":"Simple User Defined Models","title":"Simple User Defined Models","text":"For an unsupervised model, implement transform and, optionally, inverse_transform using the same signature at predict below.","category":"page"},{"location":"simple_user_defined_models/#A-simple-deterministic-regressor","page":"Simple User Defined Models","title":"A simple deterministic regressor","text":"","category":"section"},{"location":"simple_user_defined_models/","page":"Simple User Defined Models","title":"Simple User Defined Models","text":"Here's a quick-and-dirty implementation of a ridge regressor with no intercept:","category":"page"},{"location":"simple_user_defined_models/","page":"Simple User Defined Models","title":"Simple User Defined Models","text":"import MLJBase\nusing LinearAlgebra\n\nmutable struct MyRegressor <: MLJBase.Deterministic\n lambda::Float64\nend\nMyRegressor(; lambda=0.1) = MyRegressor(lambda)\n\n# fit returns coefficients minimizing a penalized rms loss function:\nfunction MLJBase.fit(model::MyRegressor, verbosity, X, y)\n x = MLJBase.matrix(X) # convert table to matrix\n fitresult = (x'x + model.lambda*I)\\(x'y) # the coefficients\n cache=nothing\n report=nothing\n return fitresult, cache, report\nend\n\n# predict uses coefficients to make a new prediction:\nMLJBase.predict(::MyRegressor, fitresult, Xnew) = MLJBase.matrix(Xnew) * fitresult","category":"page"},{"location":"simple_user_defined_models/","page":"Simple User Defined Models","title":"Simple User Defined Models","text":"using MLJ\nimport MLJBase\nusing LinearAlgebra\nMLJBase.color_off()\nmutable struct MyRegressor <: MLJBase.Deterministic\n lambda::Float64\nend\nMyRegressor(; lambda=0.1) = MyRegressor(lambda)\nfunction MLJBase.fit(model::MyRegressor, verbosity, X, y)\n x = MLJBase.matrix(X)\n fitresult = (x'x + model.lambda*I)\\(x'y)\n cache=nothing\n report=nothing\n return fitresult, cache, report\nend\nMLJBase.predict(::MyRegressor, fitresult, Xnew) = MLJBase.matrix(Xnew) * fitresult","category":"page"},{"location":"simple_user_defined_models/","page":"Simple User Defined Models","title":"Simple User Defined Models","text":"After loading this code, all MLJ's basic meta-algorithms can be applied to MyRegressor:","category":"page"},{"location":"simple_user_defined_models/","page":"Simple User Defined Models","title":"Simple User Defined Models","text":"X, y = @load_boston;\nmodel = MyRegressor(lambda=1.0)\nregressor = machine(model, X, y)\nevaluate!(regressor, resampling=CV(), measure=rms, verbosity=0)\n","category":"page"},{"location":"simple_user_defined_models/#A-simple-probabilistic-classifier","page":"Simple User Defined Models","title":"A simple probabilistic classifier","text":"","category":"section"},{"location":"simple_user_defined_models/","page":"Simple User Defined Models","title":"Simple User Defined Models","text":"The following probabilistic model simply fits a probability distribution to the MultiClass training target (i.e., ignores X) and returns this pdf for any new pattern:","category":"page"},{"location":"simple_user_defined_models/","page":"Simple User Defined Models","title":"Simple User Defined Models","text":"import MLJBase\nimport Distributions\n\nstruct MyClassifier <: MLJBase.Probabilistic\nend\n\n# `fit` ignores the inputs X and returns the training target y\n# probability distribution:\nfunction MLJBase.fit(model::MyClassifier, verbosity, X, y)\n fitresult = Distributions.fit(MLJBase.UnivariateFinite, y)\n cache = nothing\n report = nothing\n return fitresult, cache, report\nend\n\n# `predict` returns the passed fitresult (pdf) for all new patterns:\nMLJBase.predict(model::MyClassifier, fitresult, Xnew) =\n [fitresult for r in 1:nrows(Xnew)]","category":"page"},{"location":"simple_user_defined_models/","page":"Simple User Defined Models","title":"Simple User Defined Models","text":"julia> X, y = @load_iris\njulia> mach = fit!(machine(MyClassifier(), X, y))\njulia> predict(mach, selectrows(X, 1:2))\n2-element Array{UnivariateFinite{String,UInt32,Float64},1}:\n UnivariateFinite(setosa=>0.333, versicolor=>0.333, virginica=>0.333)\n UnivariateFinite(setosa=>0.333, versicolor=>0.333, virginica=>0.333)","category":"page"},{"location":"models/BayesianRidgeRegressor_MLJScikitLearnInterface/#BayesianRidgeRegressor_MLJScikitLearnInterface","page":"BayesianRidgeRegressor","title":"BayesianRidgeRegressor","text":"","category":"section"},{"location":"models/BayesianRidgeRegressor_MLJScikitLearnInterface/","page":"BayesianRidgeRegressor","title":"BayesianRidgeRegressor","text":"BayesianRidgeRegressor","category":"page"},{"location":"models/BayesianRidgeRegressor_MLJScikitLearnInterface/","page":"BayesianRidgeRegressor","title":"BayesianRidgeRegressor","text":"A model type for constructing a Bayesian ridge regressor, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/BayesianRidgeRegressor_MLJScikitLearnInterface/","page":"BayesianRidgeRegressor","title":"BayesianRidgeRegressor","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/BayesianRidgeRegressor_MLJScikitLearnInterface/","page":"BayesianRidgeRegressor","title":"BayesianRidgeRegressor","text":"BayesianRidgeRegressor = @load BayesianRidgeRegressor pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/BayesianRidgeRegressor_MLJScikitLearnInterface/","page":"BayesianRidgeRegressor","title":"BayesianRidgeRegressor","text":"Do model = BayesianRidgeRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in BayesianRidgeRegressor(n_iter=...).","category":"page"},{"location":"models/BayesianRidgeRegressor_MLJScikitLearnInterface/#Hyper-parameters","page":"BayesianRidgeRegressor","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/BayesianRidgeRegressor_MLJScikitLearnInterface/","page":"BayesianRidgeRegressor","title":"BayesianRidgeRegressor","text":"n_iter = 300\ntol = 0.001\nalpha_1 = 1.0e-6\nalpha_2 = 1.0e-6\nlambda_1 = 1.0e-6\nlambda_2 = 1.0e-6\ncompute_score = false\nfit_intercept = true\ncopy_X = true\nverbose = false","category":"page"},{"location":"models/RidgeCVClassifier_MLJScikitLearnInterface/#RidgeCVClassifier_MLJScikitLearnInterface","page":"RidgeCVClassifier","title":"RidgeCVClassifier","text":"","category":"section"},{"location":"models/RidgeCVClassifier_MLJScikitLearnInterface/","page":"RidgeCVClassifier","title":"RidgeCVClassifier","text":"RidgeCVClassifier","category":"page"},{"location":"models/RidgeCVClassifier_MLJScikitLearnInterface/","page":"RidgeCVClassifier","title":"RidgeCVClassifier","text":"A model type for constructing a ridge regression classifier with built-in cross-validation, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/RidgeCVClassifier_MLJScikitLearnInterface/","page":"RidgeCVClassifier","title":"RidgeCVClassifier","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/RidgeCVClassifier_MLJScikitLearnInterface/","page":"RidgeCVClassifier","title":"RidgeCVClassifier","text":"RidgeCVClassifier = @load RidgeCVClassifier pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/RidgeCVClassifier_MLJScikitLearnInterface/","page":"RidgeCVClassifier","title":"RidgeCVClassifier","text":"Do model = RidgeCVClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in RidgeCVClassifier(alphas=...).","category":"page"},{"location":"models/RidgeCVClassifier_MLJScikitLearnInterface/#Hyper-parameters","page":"RidgeCVClassifier","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/RidgeCVClassifier_MLJScikitLearnInterface/","page":"RidgeCVClassifier","title":"RidgeCVClassifier","text":"alphas = [0.1, 1.0, 10.0]\nfit_intercept = true\nscoring = nothing\ncv = 5\nclass_weight = nothing\nstore_cv_values = false","category":"page"},{"location":"models/ICA_MultivariateStats/#ICA_MultivariateStats","page":"ICA","title":"ICA","text":"","category":"section"},{"location":"models/ICA_MultivariateStats/","page":"ICA","title":"ICA","text":"ICA","category":"page"},{"location":"models/ICA_MultivariateStats/","page":"ICA","title":"ICA","text":"A model type for constructing a independent component analysis model, based on MultivariateStats.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/ICA_MultivariateStats/","page":"ICA","title":"ICA","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/ICA_MultivariateStats/","page":"ICA","title":"ICA","text":"ICA = @load ICA pkg=MultivariateStats","category":"page"},{"location":"models/ICA_MultivariateStats/","page":"ICA","title":"ICA","text":"Do model = ICA() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in ICA(outdim=...).","category":"page"},{"location":"models/ICA_MultivariateStats/","page":"ICA","title":"ICA","text":"Independent component analysis is a computational technique for separating a multivariate signal into additive subcomponents, with the assumption that the subcomponents are non-Gaussian and independent from each other.","category":"page"},{"location":"models/ICA_MultivariateStats/#Training-data","page":"ICA","title":"Training data","text":"","category":"section"},{"location":"models/ICA_MultivariateStats/","page":"ICA","title":"ICA","text":"In MLJ or MLJBase, bind an instance model to data with","category":"page"},{"location":"models/ICA_MultivariateStats/","page":"ICA","title":"ICA","text":"mach = machine(model, X)","category":"page"},{"location":"models/ICA_MultivariateStats/","page":"ICA","title":"ICA","text":"Here:","category":"page"},{"location":"models/ICA_MultivariateStats/","page":"ICA","title":"ICA","text":"X is any table of input features (eg, a DataFrame) whose columns are of scitype Continuous; check column scitypes with schema(X).","category":"page"},{"location":"models/ICA_MultivariateStats/","page":"ICA","title":"ICA","text":"Train the machine using fit!(mach, rows=...).","category":"page"},{"location":"models/ICA_MultivariateStats/#Hyper-parameters","page":"ICA","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/ICA_MultivariateStats/","page":"ICA","title":"ICA","text":"outdim::Int=0: The number of independent components to recover, set automatically if 0.\nalg::Symbol=:fastica: The algorithm to use (only :fastica is supported at the moment).\nfun::Symbol=:tanh: The approximate neg-entropy function, one of :tanh, :gaus.\ndo_whiten::Bool=true: Whether or not to perform pre-whitening.\nmaxiter::Int=100: The maximum number of iterations.\ntol::Real=1e-6: The convergence tolerance for change in the unmixing matrix W.\nmean::Union{Nothing, Real, Vector{Float64}}=nothing: mean to use, if nothing (default) centering is computed and applied, if zero, no centering; otherwise a vector of means can be passed.\nwinit::Union{Nothing,Matrix{<:Real}}=nothing: Initial guess for the unmixing matrix W: either an empty matrix (for random initialization of W), a matrix of size m × k (if do_whiten is true), or a matrix of size m × k. Here m is the number of components (columns) of the input.","category":"page"},{"location":"models/ICA_MultivariateStats/#Operations","page":"ICA","title":"Operations","text":"","category":"section"},{"location":"models/ICA_MultivariateStats/","page":"ICA","title":"ICA","text":"transform(mach, Xnew): Return the component-separated version of input Xnew, which should have the same scitype as X above.","category":"page"},{"location":"models/ICA_MultivariateStats/#Fitted-parameters","page":"ICA","title":"Fitted parameters","text":"","category":"section"},{"location":"models/ICA_MultivariateStats/","page":"ICA","title":"ICA","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/ICA_MultivariateStats/","page":"ICA","title":"ICA","text":"projection: The estimated component matrix.\nmean: The estimated mean vector.","category":"page"},{"location":"models/ICA_MultivariateStats/#Report","page":"ICA","title":"Report","text":"","category":"section"},{"location":"models/ICA_MultivariateStats/","page":"ICA","title":"ICA","text":"The fields of report(mach) are:","category":"page"},{"location":"models/ICA_MultivariateStats/","page":"ICA","title":"ICA","text":"indim: Dimension (number of columns) of the training data and new data to be transformed.\noutdim: Dimension of transformed data.\nmean: The mean of the untransformed training data, of length indim.","category":"page"},{"location":"models/ICA_MultivariateStats/#Examples","page":"ICA","title":"Examples","text":"","category":"section"},{"location":"models/ICA_MultivariateStats/","page":"ICA","title":"ICA","text":"using MLJ\n\nICA = @load ICA pkg=MultivariateStats\n\ntimes = range(0, 8, length=2000)\n\nsine_wave = sin.(2*times)\nsquare_wave = sign.(sin.(3*times))\nsawtooth_wave = map(t -> mod(2t, 2) - 1, times)\nsignals = hcat(sine_wave, square_wave, sawtooth_wave)\nnoisy_signals = signals + 0.2*randn(size(signals))\n\nmixing_matrix = [ 1 1 1; 0.5 2 1; 1.5 1 2]\nX = MLJ.table(noisy_signals*mixing_matrix)\n\nmodel = ICA(outdim = 3, tol=0.1)\nmach = machine(model, X) |> fit!\n\nX_unmixed = transform(mach, X)\n\nusing Plots\n\nplot(X.x2)\nplot(X.x2)\nplot(X.x3)\n\nplot(X_unmixed.x1)\nplot(X_unmixed.x2)\nplot(X_unmixed.x3)\n","category":"page"},{"location":"models/ICA_MultivariateStats/","page":"ICA","title":"ICA","text":"See also PCA, KernelPCA, FactorAnalysis, PPCA","category":"page"},{"location":"models/LarsCVRegressor_MLJScikitLearnInterface/#LarsCVRegressor_MLJScikitLearnInterface","page":"LarsCVRegressor","title":"LarsCVRegressor","text":"","category":"section"},{"location":"models/LarsCVRegressor_MLJScikitLearnInterface/","page":"LarsCVRegressor","title":"LarsCVRegressor","text":"LarsCVRegressor","category":"page"},{"location":"models/LarsCVRegressor_MLJScikitLearnInterface/","page":"LarsCVRegressor","title":"LarsCVRegressor","text":"A model type for constructing a least angle regressor with built-in cross-validation, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/LarsCVRegressor_MLJScikitLearnInterface/","page":"LarsCVRegressor","title":"LarsCVRegressor","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/LarsCVRegressor_MLJScikitLearnInterface/","page":"LarsCVRegressor","title":"LarsCVRegressor","text":"LarsCVRegressor = @load LarsCVRegressor pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/LarsCVRegressor_MLJScikitLearnInterface/","page":"LarsCVRegressor","title":"LarsCVRegressor","text":"Do model = LarsCVRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in LarsCVRegressor(fit_intercept=...).","category":"page"},{"location":"models/LarsCVRegressor_MLJScikitLearnInterface/#Hyper-parameters","page":"LarsCVRegressor","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/LarsCVRegressor_MLJScikitLearnInterface/","page":"LarsCVRegressor","title":"LarsCVRegressor","text":"fit_intercept = true\nverbose = false\nmax_iter = 500\nnormalize = false\nprecompute = auto\ncv = 5\nmax_n_alphas = 1000\nn_jobs = nothing\neps = 2.220446049250313e-16\ncopy_X = true","category":"page"},{"location":"models/LogisticClassifier_MLJLinearModels/#LogisticClassifier_MLJLinearModels","page":"LogisticClassifier","title":"LogisticClassifier","text":"","category":"section"},{"location":"models/LogisticClassifier_MLJLinearModels/","page":"LogisticClassifier","title":"LogisticClassifier","text":"LogisticClassifier","category":"page"},{"location":"models/LogisticClassifier_MLJLinearModels/","page":"LogisticClassifier","title":"LogisticClassifier","text":"A model type for constructing a logistic classifier, based on MLJLinearModels.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/LogisticClassifier_MLJLinearModels/","page":"LogisticClassifier","title":"LogisticClassifier","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/LogisticClassifier_MLJLinearModels/","page":"LogisticClassifier","title":"LogisticClassifier","text":"LogisticClassifier = @load LogisticClassifier pkg=MLJLinearModels","category":"page"},{"location":"models/LogisticClassifier_MLJLinearModels/","page":"LogisticClassifier","title":"LogisticClassifier","text":"Do model = LogisticClassifier() to construct an instance with default hyper-parameters.","category":"page"},{"location":"models/LogisticClassifier_MLJLinearModels/","page":"LogisticClassifier","title":"LogisticClassifier","text":"This model is more commonly known as \"logistic regression\". It is a standard classifier for both binary and multiclass classification. The objective function applies either a logistic loss (binary target) or multinomial (softmax) loss, and has a mixed L1/L2 penalty:","category":"page"},{"location":"models/LogisticClassifier_MLJLinearModels/","page":"LogisticClassifier","title":"LogisticClassifier","text":"$","category":"page"},{"location":"models/LogisticClassifier_MLJLinearModels/","page":"LogisticClassifier","title":"LogisticClassifier","text":"L(y, Xθ) + n⋅λ|θ|₂²/2 + n⋅γ|θ|₁ $","category":"page"},{"location":"models/LogisticClassifier_MLJLinearModels/","page":"LogisticClassifier","title":"LogisticClassifier","text":".","category":"page"},{"location":"models/LogisticClassifier_MLJLinearModels/","page":"LogisticClassifier","title":"LogisticClassifier","text":"Here L is either MLJLinearModels.LogisticLoss or MLJLinearModels.MultiClassLoss, λ and γ indicate the strength of the L2 (resp. L1) regularization components and n is the number of training observations.","category":"page"},{"location":"models/LogisticClassifier_MLJLinearModels/","page":"LogisticClassifier","title":"LogisticClassifier","text":"With scale_penalty_with_samples = false the objective function is instead","category":"page"},{"location":"models/LogisticClassifier_MLJLinearModels/","page":"LogisticClassifier","title":"LogisticClassifier","text":"$","category":"page"},{"location":"models/LogisticClassifier_MLJLinearModels/","page":"LogisticClassifier","title":"LogisticClassifier","text":"L(y, Xθ) + λ|θ|₂²/2 + γ|θ|₁ $","category":"page"},{"location":"models/LogisticClassifier_MLJLinearModels/","page":"LogisticClassifier","title":"LogisticClassifier","text":".","category":"page"},{"location":"models/LogisticClassifier_MLJLinearModels/#Training-data","page":"LogisticClassifier","title":"Training data","text":"","category":"section"},{"location":"models/LogisticClassifier_MLJLinearModels/","page":"LogisticClassifier","title":"LogisticClassifier","text":"In MLJ or MLJBase, bind an instance model to data with","category":"page"},{"location":"models/LogisticClassifier_MLJLinearModels/","page":"LogisticClassifier","title":"LogisticClassifier","text":"mach = machine(model, X, y)","category":"page"},{"location":"models/LogisticClassifier_MLJLinearModels/","page":"LogisticClassifier","title":"LogisticClassifier","text":"where:","category":"page"},{"location":"models/LogisticClassifier_MLJLinearModels/","page":"LogisticClassifier","title":"LogisticClassifier","text":"X is any table of input features (eg, a DataFrame) whose columns have Continuous scitype; check column scitypes with schema(X)\ny is the target, which can be any AbstractVector whose element scitype is <:OrderedFactor or <:Multiclass; check the scitype with scitype(y)","category":"page"},{"location":"models/LogisticClassifier_MLJLinearModels/","page":"LogisticClassifier","title":"LogisticClassifier","text":"Train the machine using fit!(mach, rows=...).","category":"page"},{"location":"models/LogisticClassifier_MLJLinearModels/#Hyperparameters","page":"LogisticClassifier","title":"Hyperparameters","text":"","category":"section"},{"location":"models/LogisticClassifier_MLJLinearModels/","page":"LogisticClassifier","title":"LogisticClassifier","text":"lambda::Real: strength of the regularizer if penalty is :l2 or :l1 and strength of the L2 regularizer if penalty is :en. Default: eps()\ngamma::Real: strength of the L1 regularizer if penalty is :en. Default: 0.0\npenalty::Union{String, Symbol}: the penalty to use, either :l2, :l1, :en (elastic net) or :none. Default: :l2\nfit_intercept::Bool: whether to fit the intercept or not. Default: true\npenalize_intercept::Bool: whether to penalize the intercept. Default: false\nscale_penalty_with_samples::Bool: whether to scale the penalty with the number of samples. Default: true\nsolver::Union{Nothing, MLJLinearModels.Solver}: some instance of MLJLinearModels.S where S is one of: LBFGS, Newton, NewtonCG, ProxGrad; but subject to the following restrictions:\nIf penalty = :l2, ProxGrad is disallowed. Otherwise, ProxGrad is the only option.\nUnless scitype(y) <: Finite{2} (binary target) Newton is disallowed.\nIf solver = nothing (default) then ProxGrad(accel=true) (FISTA) is used, unless gamma = 0, in which case LBFGS() is used.\nSolver aliases: FISTA(; kwargs...) = ProxGrad(accel=true, kwargs...), ISTA(; kwargs...) = ProxGrad(accel=false, kwargs...) Default: nothing","category":"page"},{"location":"models/LogisticClassifier_MLJLinearModels/#Example","page":"LogisticClassifier","title":"Example","text":"","category":"section"},{"location":"models/LogisticClassifier_MLJLinearModels/","page":"LogisticClassifier","title":"LogisticClassifier","text":"using MLJ\nX, y = make_blobs(centers = 2)\nmach = fit!(machine(LogisticClassifier(), X, y))\npredict(mach, X)\nfitted_params(mach)","category":"page"},{"location":"models/LogisticClassifier_MLJLinearModels/","page":"LogisticClassifier","title":"LogisticClassifier","text":"See also MultinomialClassifier.","category":"page"},{"location":"models/BaggingRegressor_MLJScikitLearnInterface/#BaggingRegressor_MLJScikitLearnInterface","page":"BaggingRegressor","title":"BaggingRegressor","text":"","category":"section"},{"location":"models/BaggingRegressor_MLJScikitLearnInterface/","page":"BaggingRegressor","title":"BaggingRegressor","text":"BaggingRegressor","category":"page"},{"location":"models/BaggingRegressor_MLJScikitLearnInterface/","page":"BaggingRegressor","title":"BaggingRegressor","text":"A model type for constructing a bagging ensemble regressor, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/BaggingRegressor_MLJScikitLearnInterface/","page":"BaggingRegressor","title":"BaggingRegressor","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/BaggingRegressor_MLJScikitLearnInterface/","page":"BaggingRegressor","title":"BaggingRegressor","text":"BaggingRegressor = @load BaggingRegressor pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/BaggingRegressor_MLJScikitLearnInterface/","page":"BaggingRegressor","title":"BaggingRegressor","text":"Do model = BaggingRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in BaggingRegressor(estimator=...).","category":"page"},{"location":"models/BaggingRegressor_MLJScikitLearnInterface/","page":"BaggingRegressor","title":"BaggingRegressor","text":"A Bagging regressor is an ensemble meta-estimator that fits base regressors each on random subsets of the original dataset and then aggregate their individual predictions (either by voting or by averaging) to form a final prediction. Such a meta-estimator can typically be used as a way to reduce the variance of a black-box estimator (e.g., a decision tree), by introducing randomization into its construction procedure and then making an ensemble out of it.","category":"page"},{"location":"models/KNNClassifier_NearestNeighborModels/#KNNClassifier_NearestNeighborModels","page":"KNNClassifier","title":"KNNClassifier","text":"","category":"section"},{"location":"models/KNNClassifier_NearestNeighborModels/","page":"KNNClassifier","title":"KNNClassifier","text":"KNNClassifier","category":"page"},{"location":"models/KNNClassifier_NearestNeighborModels/","page":"KNNClassifier","title":"KNNClassifier","text":"A model type for constructing a K-nearest neighbor classifier, based on NearestNeighborModels.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/KNNClassifier_NearestNeighborModels/","page":"KNNClassifier","title":"KNNClassifier","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/KNNClassifier_NearestNeighborModels/","page":"KNNClassifier","title":"KNNClassifier","text":"KNNClassifier = @load KNNClassifier pkg=NearestNeighborModels","category":"page"},{"location":"models/KNNClassifier_NearestNeighborModels/","page":"KNNClassifier","title":"KNNClassifier","text":"Do model = KNNClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in KNNClassifier(K=...).","category":"page"},{"location":"models/KNNClassifier_NearestNeighborModels/","page":"KNNClassifier","title":"KNNClassifier","text":"KNNClassifier implements K-Nearest Neighbors classifier which is non-parametric algorithm that predicts a discrete class distribution associated with a new point by taking a vote over the classes of the k-nearest points. Each neighbor vote is assigned a weight based on proximity of the neighbor point to the test point according to a specified distance metric.","category":"page"},{"location":"models/KNNClassifier_NearestNeighborModels/","page":"KNNClassifier","title":"KNNClassifier","text":"For more information about the weighting kernels, see the paper by Geler et.al Comparison of different weighting schemes for the kNN classifier on time-series data. ","category":"page"},{"location":"models/KNNClassifier_NearestNeighborModels/#Training-data","page":"KNNClassifier","title":"Training data","text":"","category":"section"},{"location":"models/KNNClassifier_NearestNeighborModels/","page":"KNNClassifier","title":"KNNClassifier","text":"In MLJ or MLJBase, bind an instance model to data with","category":"page"},{"location":"models/KNNClassifier_NearestNeighborModels/","page":"KNNClassifier","title":"KNNClassifier","text":"mach = machine(model, X, y)","category":"page"},{"location":"models/KNNClassifier_NearestNeighborModels/","page":"KNNClassifier","title":"KNNClassifier","text":"OR","category":"page"},{"location":"models/KNNClassifier_NearestNeighborModels/","page":"KNNClassifier","title":"KNNClassifier","text":"mach = machine(model, X, y, w)","category":"page"},{"location":"models/KNNClassifier_NearestNeighborModels/","page":"KNNClassifier","title":"KNNClassifier","text":"Here:","category":"page"},{"location":"models/KNNClassifier_NearestNeighborModels/","page":"KNNClassifier","title":"KNNClassifier","text":"X is any table of input features (eg, a DataFrame) whose columns are of scitype Continuous; check column scitypes with schema(X).\ny is the target, which can be any AbstractVector whose element scitype is <:Finite (<:Multiclass or <:OrderedFactor will do); check the scitype with scitype(y)\nw is the observation weights which can either be nothing (default) or an AbstractVector whose element scitype is Count or Continuous. This is different from weights kernel which is a model hyperparameter, see below.","category":"page"},{"location":"models/KNNClassifier_NearestNeighborModels/","page":"KNNClassifier","title":"KNNClassifier","text":"Train the machine using fit!(mach, rows=...).","category":"page"},{"location":"models/KNNClassifier_NearestNeighborModels/#Hyper-parameters","page":"KNNClassifier","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/KNNClassifier_NearestNeighborModels/","page":"KNNClassifier","title":"KNNClassifier","text":"K::Int=5 : number of neighbors\nalgorithm::Symbol = :kdtree : one of (:kdtree, :brutetree, :balltree)\nmetric::Metric = Euclidean() : any Metric from Distances.jl for the distance between points. For algorithm = :kdtree only metrics which are instances of Union{Distances.Chebyshev, Distances.Cityblock, Distances.Euclidean, Distances.Minkowski, Distances.WeightedCityblock, Distances.WeightedEuclidean, Distances.WeightedMinkowski} are supported.\nleafsize::Int = algorithm == 10 : determines the number of points at which to stop splitting the tree. This option is ignored and always taken as 0 for algorithm = :brutetree, since brutetree isn't actually a tree.\nreorder::Bool = true : if true then points which are close in distance are placed close in memory. In this case, a copy of the original data will be made so that the original data is left unmodified. Setting this to true can significantly improve performance of the specified algorithm (except :brutetree). This option is ignored and always taken as false for algorithm = :brutetree.\nweights::KNNKernel=Uniform() : kernel used in assigning weights to the k-nearest neighbors for each observation. An instance of one of the types in list_kernels(). User-defined weighting functions can be passed by wrapping the function in a UserDefinedKernel kernel (do ?NearestNeighborModels.UserDefinedKernel for more info). If observation weights w are passed during machine construction then the weight assigned to each neighbor vote is the product of the kernel generated weight for that neighbor and the corresponding observation weight.","category":"page"},{"location":"models/KNNClassifier_NearestNeighborModels/#Operations","page":"KNNClassifier","title":"Operations","text":"","category":"section"},{"location":"models/KNNClassifier_NearestNeighborModels/","page":"KNNClassifier","title":"KNNClassifier","text":"predict(mach, Xnew): Return predictions of the target given features Xnew, which should have same scitype as X above. Predictions are probabilistic but uncalibrated.\npredict_mode(mach, Xnew): Return the modes of the probabilistic predictions returned above.","category":"page"},{"location":"models/KNNClassifier_NearestNeighborModels/#Fitted-parameters","page":"KNNClassifier","title":"Fitted parameters","text":"","category":"section"},{"location":"models/KNNClassifier_NearestNeighborModels/","page":"KNNClassifier","title":"KNNClassifier","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/KNNClassifier_NearestNeighborModels/","page":"KNNClassifier","title":"KNNClassifier","text":"tree: An instance of either KDTree, BruteTree or BallTree depending on the value of the algorithm hyperparameter (See hyper-parameters section above). These are data structures that stores the training data with the view of making quicker nearest neighbor searches on test data points.","category":"page"},{"location":"models/KNNClassifier_NearestNeighborModels/#Examples","page":"KNNClassifier","title":"Examples","text":"","category":"section"},{"location":"models/KNNClassifier_NearestNeighborModels/","page":"KNNClassifier","title":"KNNClassifier","text":"using MLJ\nKNNClassifier = @load KNNClassifier pkg=NearestNeighborModels\nX, y = @load_crabs; ## a table and a vector from the crabs dataset\n## view possible kernels\nNearestNeighborModels.list_kernels()\n## KNNClassifier instantiation\nmodel = KNNClassifier(weights = NearestNeighborModels.Inverse())\nmach = machine(model, X, y) |> fit! ## wrap model and required data in an MLJ machine and fit\ny_hat = predict(mach, X)\nlabels = predict_mode(mach, X)\n","category":"page"},{"location":"models/KNNClassifier_NearestNeighborModels/","page":"KNNClassifier","title":"KNNClassifier","text":"See also MultitargetKNNClassifier","category":"page"},{"location":"models/KMedoids_Clustering/#KMedoids_Clustering","page":"KMedoids","title":"KMedoids","text":"","category":"section"},{"location":"models/KMedoids_Clustering/","page":"KMedoids","title":"KMedoids","text":"KMedoids","category":"page"},{"location":"models/KMedoids_Clustering/","page":"KMedoids","title":"KMedoids","text":"A model type for constructing a K-medoids clusterer, based on Clustering.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/KMedoids_Clustering/","page":"KMedoids","title":"KMedoids","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/KMedoids_Clustering/","page":"KMedoids","title":"KMedoids","text":"KMedoids = @load KMedoids pkg=Clustering","category":"page"},{"location":"models/KMedoids_Clustering/","page":"KMedoids","title":"KMedoids","text":"Do model = KMedoids() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in KMedoids(k=...).","category":"page"},{"location":"models/KMedoids_Clustering/","page":"KMedoids","title":"KMedoids","text":"K-medoids is a clustering algorithm that works by finding k data points (called medoids) such that the total distance between each data point and the closest medoid is minimal.","category":"page"},{"location":"models/KMedoids_Clustering/#Training-data","page":"KMedoids","title":"Training data","text":"","category":"section"},{"location":"models/KMedoids_Clustering/","page":"KMedoids","title":"KMedoids","text":"In MLJ or MLJBase, bind an instance model to data with","category":"page"},{"location":"models/KMedoids_Clustering/","page":"KMedoids","title":"KMedoids","text":"mach = machine(model, X)","category":"page"},{"location":"models/KMedoids_Clustering/","page":"KMedoids","title":"KMedoids","text":"Here:","category":"page"},{"location":"models/KMedoids_Clustering/","page":"KMedoids","title":"KMedoids","text":"X is any table of input features (eg, a DataFrame) whose columns are of scitype Continuous; check column scitypes with schema(X)","category":"page"},{"location":"models/KMedoids_Clustering/","page":"KMedoids","title":"KMedoids","text":"Train the machine using fit!(mach, rows=...).","category":"page"},{"location":"models/KMedoids_Clustering/#Hyper-parameters","page":"KMedoids","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/KMedoids_Clustering/","page":"KMedoids","title":"KMedoids","text":"k=3: The number of centroids to use in clustering.\nmetric::SemiMetric=Distances.SqEuclidean: The metric used to calculate the clustering. Must have type PreMetric from Distances.jl.\ninit (defaults to :kmpp): how medoids should be initialized, could be one of the following:\n:kmpp: KMeans++\n:kmenc: K-medoids initialization based on centrality\n:rand: random\nan instance of Clustering.SeedingAlgorithm from Clustering.jl\nan integer vector of length k that provides the indices of points to use as initial medoids.\nSee documentation of Clustering.jl.","category":"page"},{"location":"models/KMedoids_Clustering/#Operations","page":"KMedoids","title":"Operations","text":"","category":"section"},{"location":"models/KMedoids_Clustering/","page":"KMedoids","title":"KMedoids","text":"predict(mach, Xnew): return cluster label assignments, given new features Xnew having the same Scitype as X above.\ntransform(mach, Xnew): instead return the mean pairwise distances from new samples to the cluster centers.","category":"page"},{"location":"models/KMedoids_Clustering/#Fitted-parameters","page":"KMedoids","title":"Fitted parameters","text":"","category":"section"},{"location":"models/KMedoids_Clustering/","page":"KMedoids","title":"KMedoids","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/KMedoids_Clustering/","page":"KMedoids","title":"KMedoids","text":"medoids: The coordinates of the cluster medoids.","category":"page"},{"location":"models/KMedoids_Clustering/#Report","page":"KMedoids","title":"Report","text":"","category":"section"},{"location":"models/KMedoids_Clustering/","page":"KMedoids","title":"KMedoids","text":"The fields of report(mach) are:","category":"page"},{"location":"models/KMedoids_Clustering/","page":"KMedoids","title":"KMedoids","text":"assignments: The cluster assignments of each point in the training data.\ncluster_labels: The labels assigned to each cluster.","category":"page"},{"location":"models/KMedoids_Clustering/#Examples","page":"KMedoids","title":"Examples","text":"","category":"section"},{"location":"models/KMedoids_Clustering/","page":"KMedoids","title":"KMedoids","text":"using MLJ\nKMedoids = @load KMedoids pkg=Clustering\n\ntable = load_iris()\ny, X = unpack(table, ==(:target), rng=123)\nmodel = KMedoids(k=3)\nmach = machine(model, X) |> fit!\n\nyhat = predict(mach, X)\n@assert yhat == report(mach).assignments\n\ncompare = zip(yhat, y) |> collect;\ncompare[1:8] ## clusters align with classes\n\ncenter_dists = transform(mach, fitted_params(mach).medoids')\n\n@assert center_dists[1][1] == 0.0\n@assert center_dists[2][2] == 0.0\n@assert center_dists[3][3] == 0.0","category":"page"},{"location":"models/KMedoids_Clustering/","page":"KMedoids","title":"KMedoids","text":"See also KMeans","category":"page"},{"location":"models/RandomWalkOversampler_Imbalance/#RandomWalkOversampler_Imbalance","page":"RandomWalkOversampler","title":"RandomWalkOversampler","text":"","category":"section"},{"location":"models/RandomWalkOversampler_Imbalance/","page":"RandomWalkOversampler","title":"RandomWalkOversampler","text":"Initiate a RandomWalkOversampler model with the given hyper-parameters.","category":"page"},{"location":"models/RandomWalkOversampler_Imbalance/","page":"RandomWalkOversampler","title":"RandomWalkOversampler","text":"RandomWalkOversampler","category":"page"},{"location":"models/RandomWalkOversampler_Imbalance/","page":"RandomWalkOversampler","title":"RandomWalkOversampler","text":"A model type for constructing a random walk oversampler, based on Imbalance.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/RandomWalkOversampler_Imbalance/","page":"RandomWalkOversampler","title":"RandomWalkOversampler","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/RandomWalkOversampler_Imbalance/","page":"RandomWalkOversampler","title":"RandomWalkOversampler","text":"RandomWalkOversampler = @load RandomWalkOversampler pkg=Imbalance","category":"page"},{"location":"models/RandomWalkOversampler_Imbalance/","page":"RandomWalkOversampler","title":"RandomWalkOversampler","text":"Do model = RandomWalkOversampler() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in RandomWalkOversampler(ratios=...).","category":"page"},{"location":"models/RandomWalkOversampler_Imbalance/","page":"RandomWalkOversampler","title":"RandomWalkOversampler","text":"RandomWalkOversampler implements the random walk oversampling algorithm to correct for class imbalance as in Zhang, H., & Li, M. (2014). RWO-Sampling: A random walk over-sampling approach to imbalanced data classification. Information Fusion, 25, 4-20.","category":"page"},{"location":"models/RandomWalkOversampler_Imbalance/#Training-data","page":"RandomWalkOversampler","title":"Training data","text":"","category":"section"},{"location":"models/RandomWalkOversampler_Imbalance/","page":"RandomWalkOversampler","title":"RandomWalkOversampler","text":"In MLJ or MLJBase, wrap the model in a machine by","category":"page"},{"location":"models/RandomWalkOversampler_Imbalance/","page":"RandomWalkOversampler","title":"RandomWalkOversampler","text":"mach = machine(model)","category":"page"},{"location":"models/RandomWalkOversampler_Imbalance/","page":"RandomWalkOversampler","title":"RandomWalkOversampler","text":"There is no need to provide any data here because the model is a static transformer.","category":"page"},{"location":"models/RandomWalkOversampler_Imbalance/","page":"RandomWalkOversampler","title":"RandomWalkOversampler","text":"Likewise, there is no need to fit!(mach).","category":"page"},{"location":"models/RandomWalkOversampler_Imbalance/","page":"RandomWalkOversampler","title":"RandomWalkOversampler","text":"For default values of the hyper-parameters, model can be constructed by","category":"page"},{"location":"models/RandomWalkOversampler_Imbalance/","page":"RandomWalkOversampler","title":"RandomWalkOversampler","text":"model = RandomWalkOversampler()","category":"page"},{"location":"models/RandomWalkOversampler_Imbalance/#Hyperparameters","page":"RandomWalkOversampler","title":"Hyperparameters","text":"","category":"section"},{"location":"models/RandomWalkOversampler_Imbalance/","page":"RandomWalkOversampler","title":"RandomWalkOversampler","text":"ratios=1.0: A parameter that controls the amount of oversampling to be done for each class\nCan be a float and in this case each class will be oversampled to the size of the majority class times the float. By default, all classes are oversampled to the size of the majority class\nCan be a dictionary mapping each class label to the float ratio for that class\nrng::Union{AbstractRNG, Integer}=default_rng(): Either an AbstractRNG object or an Integer seed to be used with Xoshiro if the Julia VERSION supports it. Otherwise, uses MersenneTwister`.","category":"page"},{"location":"models/RandomWalkOversampler_Imbalance/#Transform-Inputs","page":"RandomWalkOversampler","title":"Transform Inputs","text":"","category":"section"},{"location":"models/RandomWalkOversampler_Imbalance/","page":"RandomWalkOversampler","title":"RandomWalkOversampler","text":"X: A table with element scitypes that subtype Union{Finite, Infinite}. Elements in nominal columns should subtype Finite (i.e., have scitype OrderedFactor or Multiclass) and","category":"page"},{"location":"models/RandomWalkOversampler_Imbalance/","page":"RandomWalkOversampler","title":"RandomWalkOversampler","text":" elements in continuous columns should subtype `Infinite` (i.e., have \n [scitype](https://juliaai.github.io/ScientificTypes.jl/) `Count` or `Continuous`).","category":"page"},{"location":"models/RandomWalkOversampler_Imbalance/","page":"RandomWalkOversampler","title":"RandomWalkOversampler","text":"y: An abstract vector of labels (e.g., strings) that correspond to the observations in X","category":"page"},{"location":"models/RandomWalkOversampler_Imbalance/#Transform-Outputs","page":"RandomWalkOversampler","title":"Transform Outputs","text":"","category":"section"},{"location":"models/RandomWalkOversampler_Imbalance/","page":"RandomWalkOversampler","title":"RandomWalkOversampler","text":"Xover: A matrix or table that includes original data and the new observations due to oversampling. depending on whether the input X is a matrix or table respectively\nyover: An abstract vector of labels corresponding to Xover","category":"page"},{"location":"models/RandomWalkOversampler_Imbalance/#Operations","page":"RandomWalkOversampler","title":"Operations","text":"","category":"section"},{"location":"models/RandomWalkOversampler_Imbalance/","page":"RandomWalkOversampler","title":"RandomWalkOversampler","text":"transform(mach, X, y): resample the data X and y using RandomWalkOversampler, returning both the new and original observations","category":"page"},{"location":"models/RandomWalkOversampler_Imbalance/#Example","page":"RandomWalkOversampler","title":"Example","text":"","category":"section"},{"location":"models/RandomWalkOversampler_Imbalance/","page":"RandomWalkOversampler","title":"RandomWalkOversampler","text":"using MLJ\nusing ScientificTypes\nimport Imbalance\n\n## set probability of each class\nclass_probs = [0.5, 0.2, 0.3] \nnum_rows = 100\nnum_continuous_feats = 3\n## want two categorical features with three and two possible values respectively\nnum_vals_per_category = [3, 2]\n\n## generate a table and categorical vector accordingly\nX, y = Imbalance.generate_imbalanced_data(num_rows, num_continuous_feats; \n class_probs, num_vals_per_category, rng=42) \njulia> Imbalance.checkbalance(y)\n1: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 19 (39.6%) \n2: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 33 (68.8%) \n0: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 48 (100.0%) \n\n\njulia> ScientificTypes.schema(X).scitypes\n(Continuous, Continuous, Continuous, Continuous, Continuous)\n## coerce nominal columns to a finite scitype (multiclass or ordered factor)\nX = coerce(X, :Column4=>Multiclass, :Column5=>Multiclass)\n\n## load RandomWalkOversampler model type:\nRandomWalkOversampler = @load RandomWalkOversampler pkg=Imbalance\n\n## oversample the minority classes to sizes relative to the majority class:\noversampler = RandomWalkOversampler(ratios = Dict(0=>1.0, 1=> 0.9, 2=>0.8), rng = 42)\nmach = machine(oversampler)\nXover, yover = transform(mach, X, y)\n\njulia> Imbalance.checkbalance(yover)\n2: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 38 (79.2%) \n1: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 43 (89.6%) \n0: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 48 (100.0%)","category":"page"},{"location":"models/EvoTreeRegressor_EvoTrees/#EvoTreeRegressor_EvoTrees","page":"EvoTreeRegressor","title":"EvoTreeRegressor","text":"","category":"section"},{"location":"models/EvoTreeRegressor_EvoTrees/","page":"EvoTreeRegressor","title":"EvoTreeRegressor","text":"EvoTreeRegressor(;kwargs...)","category":"page"},{"location":"models/EvoTreeRegressor_EvoTrees/","page":"EvoTreeRegressor","title":"EvoTreeRegressor","text":"A model type for constructing a EvoTreeRegressor, based on EvoTrees.jl, and implementing both an internal API and the MLJ model interface.","category":"page"},{"location":"models/EvoTreeRegressor_EvoTrees/#Hyper-parameters","page":"EvoTreeRegressor","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/EvoTreeRegressor_EvoTrees/","page":"EvoTreeRegressor","title":"EvoTreeRegressor","text":"loss=:mse: Loss to be be minimized during training. One of:\n:mse\n:logloss\n:gamma\n:tweedie\n:quantile\n:l1\nnrounds=100: Number of rounds. It corresponds to the number of trees that will be sequentially stacked. Must be >= 1.\neta=0.1: Learning rate. Each tree raw predictions are scaled by eta prior to be added to the stack of predictions. Must be > 0. A lower eta results in slower learning, requiring a higher nrounds but typically improves model performance.\nL2::T=0.0: L2 regularization factor on aggregate gain. Must be >= 0. Higher L2 can result in a more robust model.\nlambda::T=0.0: L2 regularization factor on individual gain. Must be >= 0. Higher lambda can result in a more robust model.\ngamma::T=0.0: Minimum gain improvement needed to perform a node split. Higher gamma can result in a more robust model. Must be >= 0.\nalpha::T=0.5: Loss specific parameter in the [0, 1] range: - :quantile: target quantile for the regression. - :l1: weighting parameters to positive vs negative residuals. - Positive residual weights = alpha - Negative residual weights = (1 - alpha)\nmax_depth=6: Maximum depth of a tree. Must be >= 1. A tree of depth 1 is made of a single prediction leaf. A complete tree of depth N contains 2^(N - 1) terminal leaves and 2^(N - 1) - 1 split nodes. Compute cost is proportional to 2^max_depth. Typical optimal values are in the 3 to 9 range.\nmin_weight=1.0: Minimum weight needed in a node to perform a split. Matches the number of observations by default or the sum of weights as provided by the weights vector. Must be > 0.\nrowsample=1.0: Proportion of rows that are sampled at each iteration to build the tree. Should be in ]0, 1].\ncolsample=1.0: Proportion of columns / features that are sampled at each iteration to build the tree. Should be in ]0, 1].\nnbins=64: Number of bins into which each feature is quantized. Buckets are defined based on quantiles, hence resulting in equal weight bins. Should be between 2 and 255.\nmonotone_constraints=Dict{Int, Int}(): Specify monotonic constraints using a dict where the key is the feature index and the value the applicable constraint (-1=decreasing, 0=none, 1=increasing). Only :linear, :logistic, :gamma and tweedie losses are supported at the moment.\ntree_type=\"binary\" Tree structure to be used. One of:\nbinary: Each node of a tree is grown independently. Tree are built depthwise until max depth is reach or if min weight or gain (see gamma) stops further node splits.\noblivious: A common splitting condition is imposed to all nodes of a given depth.\nrng=123: Either an integer used as a seed to the random number generator or an actual random number generator (::Random.AbstractRNG).","category":"page"},{"location":"models/EvoTreeRegressor_EvoTrees/#Internal-API","page":"EvoTreeRegressor","title":"Internal API","text":"","category":"section"},{"location":"models/EvoTreeRegressor_EvoTrees/","page":"EvoTreeRegressor","title":"EvoTreeRegressor","text":"Do config = EvoTreeRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in EvoTreeRegressor(loss=...).","category":"page"},{"location":"models/EvoTreeRegressor_EvoTrees/#Training-model","page":"EvoTreeRegressor","title":"Training model","text":"","category":"section"},{"location":"models/EvoTreeRegressor_EvoTrees/","page":"EvoTreeRegressor","title":"EvoTreeRegressor","text":"A model is built using fit_evotree:","category":"page"},{"location":"models/EvoTreeRegressor_EvoTrees/","page":"EvoTreeRegressor","title":"EvoTreeRegressor","text":"model = fit_evotree(config; x_train, y_train, kwargs...)","category":"page"},{"location":"models/EvoTreeRegressor_EvoTrees/#Inference","page":"EvoTreeRegressor","title":"Inference","text":"","category":"section"},{"location":"models/EvoTreeRegressor_EvoTrees/","page":"EvoTreeRegressor","title":"EvoTreeRegressor","text":"Predictions are obtained using predict which returns a Vector of length nobs:","category":"page"},{"location":"models/EvoTreeRegressor_EvoTrees/","page":"EvoTreeRegressor","title":"EvoTreeRegressor","text":"EvoTrees.predict(model, X)","category":"page"},{"location":"models/EvoTreeRegressor_EvoTrees/","page":"EvoTreeRegressor","title":"EvoTreeRegressor","text":"Alternatively, models act as a functor, returning predictions when called as a function with features as argument:","category":"page"},{"location":"models/EvoTreeRegressor_EvoTrees/","page":"EvoTreeRegressor","title":"EvoTreeRegressor","text":"model(X)","category":"page"},{"location":"models/EvoTreeRegressor_EvoTrees/#MLJ-Interface","page":"EvoTreeRegressor","title":"MLJ Interface","text":"","category":"section"},{"location":"models/EvoTreeRegressor_EvoTrees/","page":"EvoTreeRegressor","title":"EvoTreeRegressor","text":"From MLJ, the type can be imported using:","category":"page"},{"location":"models/EvoTreeRegressor_EvoTrees/","page":"EvoTreeRegressor","title":"EvoTreeRegressor","text":"EvoTreeRegressor = @load EvoTreeRegressor pkg=EvoTrees","category":"page"},{"location":"models/EvoTreeRegressor_EvoTrees/","page":"EvoTreeRegressor","title":"EvoTreeRegressor","text":"Do model = EvoTreeRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in EvoTreeRegressor(loss=...).","category":"page"},{"location":"models/EvoTreeRegressor_EvoTrees/#Training-model-2","page":"EvoTreeRegressor","title":"Training model","text":"","category":"section"},{"location":"models/EvoTreeRegressor_EvoTrees/","page":"EvoTreeRegressor","title":"EvoTreeRegressor","text":"In MLJ or MLJBase, bind an instance model to data with mach = machine(model, X, y) where","category":"page"},{"location":"models/EvoTreeRegressor_EvoTrees/","page":"EvoTreeRegressor","title":"EvoTreeRegressor","text":"X: any table of input features (eg, a DataFrame) whose columns each have one of the following element scitypes: Continuous, Count, or <:OrderedFactor; check column scitypes with schema(X)\ny: is the target, which can be any AbstractVector whose element scitype is <:Continuous; check the scitype with scitype(y)","category":"page"},{"location":"models/EvoTreeRegressor_EvoTrees/","page":"EvoTreeRegressor","title":"EvoTreeRegressor","text":"Train the machine using fit!(mach, rows=...).","category":"page"},{"location":"models/EvoTreeRegressor_EvoTrees/#Operations","page":"EvoTreeRegressor","title":"Operations","text":"","category":"section"},{"location":"models/EvoTreeRegressor_EvoTrees/","page":"EvoTreeRegressor","title":"EvoTreeRegressor","text":"predict(mach, Xnew): return predictions of the target given features Xnew having the same scitype as X above. Predictions are deterministic.","category":"page"},{"location":"models/EvoTreeRegressor_EvoTrees/#Fitted-parameters","page":"EvoTreeRegressor","title":"Fitted parameters","text":"","category":"section"},{"location":"models/EvoTreeRegressor_EvoTrees/","page":"EvoTreeRegressor","title":"EvoTreeRegressor","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/EvoTreeRegressor_EvoTrees/","page":"EvoTreeRegressor","title":"EvoTreeRegressor","text":":fitresult: The GBTree object returned by EvoTrees.jl fitting algorithm.","category":"page"},{"location":"models/EvoTreeRegressor_EvoTrees/#Report","page":"EvoTreeRegressor","title":"Report","text":"","category":"section"},{"location":"models/EvoTreeRegressor_EvoTrees/","page":"EvoTreeRegressor","title":"EvoTreeRegressor","text":"The fields of report(mach) are:","category":"page"},{"location":"models/EvoTreeRegressor_EvoTrees/","page":"EvoTreeRegressor","title":"EvoTreeRegressor","text":":features: The names of the features encountered in training.","category":"page"},{"location":"models/EvoTreeRegressor_EvoTrees/#Examples","page":"EvoTreeRegressor","title":"Examples","text":"","category":"section"},{"location":"models/EvoTreeRegressor_EvoTrees/","page":"EvoTreeRegressor","title":"EvoTreeRegressor","text":"## Internal API\nusing EvoTrees\nconfig = EvoTreeRegressor(max_depth=5, nbins=32, nrounds=100)\nnobs, nfeats = 1_000, 5\nx_train, y_train = randn(nobs, nfeats), rand(nobs)\nmodel = fit_evotree(config; x_train, y_train)\npreds = EvoTrees.predict(model, x_train)","category":"page"},{"location":"models/EvoTreeRegressor_EvoTrees/","page":"EvoTreeRegressor","title":"EvoTreeRegressor","text":"## MLJ Interface\nusing MLJ\nEvoTreeRegressor = @load EvoTreeRegressor pkg=EvoTrees\nmodel = EvoTreeRegressor(max_depth=5, nbins=32, nrounds=100)\nX, y = @load_boston\nmach = machine(model, X, y) |> fit!\npreds = predict(mach, X)","category":"page"},{"location":"models/KNNDetector_OutlierDetectionNeighbors/#KNNDetector_OutlierDetectionNeighbors","page":"KNNDetector","title":"KNNDetector","text":"","category":"section"},{"location":"models/KNNDetector_OutlierDetectionNeighbors/","page":"KNNDetector","title":"KNNDetector","text":"KNNDetector(k=5,\n metric=Euclidean,\n algorithm=:kdtree,\n leafsize=10,\n reorder=true,\n reduction=:maximum)","category":"page"},{"location":"models/KNNDetector_OutlierDetectionNeighbors/","page":"KNNDetector","title":"KNNDetector","text":"Calculate the anomaly score of an instance based on the distance to its k-nearest neighbors.","category":"page"},{"location":"models/KNNDetector_OutlierDetectionNeighbors/#Parameters","page":"KNNDetector","title":"Parameters","text":"","category":"section"},{"location":"models/KNNDetector_OutlierDetectionNeighbors/","page":"KNNDetector","title":"KNNDetector","text":"k::Integer","category":"page"},{"location":"models/KNNDetector_OutlierDetectionNeighbors/","page":"KNNDetector","title":"KNNDetector","text":"Number of neighbors (must be greater than 0).","category":"page"},{"location":"models/KNNDetector_OutlierDetectionNeighbors/","page":"KNNDetector","title":"KNNDetector","text":"metric::Metric","category":"page"},{"location":"models/KNNDetector_OutlierDetectionNeighbors/","page":"KNNDetector","title":"KNNDetector","text":"This is one of the Metric types defined in the Distances.jl package. It is possible to define your own metrics by creating new types that are subtypes of Metric.","category":"page"},{"location":"models/KNNDetector_OutlierDetectionNeighbors/","page":"KNNDetector","title":"KNNDetector","text":"algorithm::Symbol","category":"page"},{"location":"models/KNNDetector_OutlierDetectionNeighbors/","page":"KNNDetector","title":"KNNDetector","text":"One of (:kdtree, :balltree). In a kdtree, points are recursively split into groups using hyper-planes. Therefore a KDTree only works with axis aligned metrics which are: Euclidean, Chebyshev, Minkowski and Cityblock. A brutetree linearly searches all points in a brute force fashion and works with any Metric. A balltree recursively splits points into groups bounded by hyper-spheres and works with any Metric.","category":"page"},{"location":"models/KNNDetector_OutlierDetectionNeighbors/","page":"KNNDetector","title":"KNNDetector","text":"static::Union{Bool, Symbol}","category":"page"},{"location":"models/KNNDetector_OutlierDetectionNeighbors/","page":"KNNDetector","title":"KNNDetector","text":"One of (true, false, :auto). Whether the input data for fitting and transform should be statically or dynamically allocated. If true, the data is statically allocated. If false, the data is dynamically allocated. If :auto, the data is dynamically allocated if the product of all dimensions except the last is greater than 100.","category":"page"},{"location":"models/KNNDetector_OutlierDetectionNeighbors/","page":"KNNDetector","title":"KNNDetector","text":"leafsize::Int","category":"page"},{"location":"models/KNNDetector_OutlierDetectionNeighbors/","page":"KNNDetector","title":"KNNDetector","text":"Determines at what number of points to stop splitting the tree further. There is a trade-off between traversing the tree and having to evaluate the metric function for increasing number of points.","category":"page"},{"location":"models/KNNDetector_OutlierDetectionNeighbors/","page":"KNNDetector","title":"KNNDetector","text":"reorder::Bool","category":"page"},{"location":"models/KNNDetector_OutlierDetectionNeighbors/","page":"KNNDetector","title":"KNNDetector","text":"While building the tree this will put points close in distance close in memory since this helps with cache locality. In this case, a copy of the original data will be made so that the original data is left unmodified. This can have a significant impact on performance and is by default set to true.","category":"page"},{"location":"models/KNNDetector_OutlierDetectionNeighbors/","page":"KNNDetector","title":"KNNDetector","text":"parallel::Bool","category":"page"},{"location":"models/KNNDetector_OutlierDetectionNeighbors/","page":"KNNDetector","title":"KNNDetector","text":"Parallelize score and predict using all threads available. The number of threads can be set with the JULIA_NUM_THREADS environment variable. Note: fit is not parallel.","category":"page"},{"location":"models/KNNDetector_OutlierDetectionNeighbors/","page":"KNNDetector","title":"KNNDetector","text":"reduction::Symbol","category":"page"},{"location":"models/KNNDetector_OutlierDetectionNeighbors/","page":"KNNDetector","title":"KNNDetector","text":"One of (:maximum, :median, :mean). (reduction=:maximum) was proposed by [1]. Angiulli et al. [2] proposed sum to reduce the distances, but mean has been implemented for numerical stability.","category":"page"},{"location":"models/KNNDetector_OutlierDetectionNeighbors/#Examples","page":"KNNDetector","title":"Examples","text":"","category":"section"},{"location":"models/KNNDetector_OutlierDetectionNeighbors/","page":"KNNDetector","title":"KNNDetector","text":"using OutlierDetection: KNNDetector, fit, transform\ndetector = KNNDetector()\nX = rand(10, 100)\nmodel, result = fit(detector, X; verbosity=0)\ntest_scores = transform(detector, model, X)","category":"page"},{"location":"models/KNNDetector_OutlierDetectionNeighbors/#References","page":"KNNDetector","title":"References","text":"","category":"section"},{"location":"models/KNNDetector_OutlierDetectionNeighbors/","page":"KNNDetector","title":"KNNDetector","text":"[1] Ramaswamy, Sridhar; Rastogi, Rajeev; Shim, Kyuseok (2000): Efficient Algorithms for Mining Outliers from Large Data Sets.","category":"page"},{"location":"models/KNNDetector_OutlierDetectionNeighbors/","page":"KNNDetector","title":"KNNDetector","text":"[2] Angiulli, Fabrizio; Pizzuti, Clara (2002): Fast Outlier Detection in High Dimensional Spaces.","category":"page"},{"location":"models/RANSACRegressor_MLJScikitLearnInterface/#RANSACRegressor_MLJScikitLearnInterface","page":"RANSACRegressor","title":"RANSACRegressor","text":"","category":"section"},{"location":"models/RANSACRegressor_MLJScikitLearnInterface/","page":"RANSACRegressor","title":"RANSACRegressor","text":"RANSACRegressor","category":"page"},{"location":"models/RANSACRegressor_MLJScikitLearnInterface/","page":"RANSACRegressor","title":"RANSACRegressor","text":"A model type for constructing a ransac regressor, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/RANSACRegressor_MLJScikitLearnInterface/","page":"RANSACRegressor","title":"RANSACRegressor","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/RANSACRegressor_MLJScikitLearnInterface/","page":"RANSACRegressor","title":"RANSACRegressor","text":"RANSACRegressor = @load RANSACRegressor pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/RANSACRegressor_MLJScikitLearnInterface/","page":"RANSACRegressor","title":"RANSACRegressor","text":"Do model = RANSACRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in RANSACRegressor(estimator=...).","category":"page"},{"location":"models/RANSACRegressor_MLJScikitLearnInterface/#Hyper-parameters","page":"RANSACRegressor","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/RANSACRegressor_MLJScikitLearnInterface/","page":"RANSACRegressor","title":"RANSACRegressor","text":"estimator = nothing\nmin_samples = 5\nresidual_threshold = nothing\nis_data_valid = nothing\nis_model_valid = nothing\nmax_trials = 100\nmax_skips = 9223372036854775807\nstop_n_inliers = 9223372036854775807\nstop_score = Inf\nstop_probability = 0.99\nloss = absolute_error\nrandom_state = nothing","category":"page"},{"location":"models/NuSVR_LIBSVM/#NuSVR_LIBSVM","page":"NuSVR","title":"NuSVR","text":"","category":"section"},{"location":"models/NuSVR_LIBSVM/","page":"NuSVR","title":"NuSVR","text":"NuSVR","category":"page"},{"location":"models/NuSVR_LIBSVM/","page":"NuSVR","title":"NuSVR","text":"A model type for constructing a ν-support vector regressor, based on LIBSVM.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/NuSVR_LIBSVM/","page":"NuSVR","title":"NuSVR","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/NuSVR_LIBSVM/","page":"NuSVR","title":"NuSVR","text":"NuSVR = @load NuSVR pkg=LIBSVM","category":"page"},{"location":"models/NuSVR_LIBSVM/","page":"NuSVR","title":"NuSVR","text":"Do model = NuSVR() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in NuSVR(kernel=...).","category":"page"},{"location":"models/NuSVR_LIBSVM/","page":"NuSVR","title":"NuSVR","text":"Reference for algorithm and core C-library: C.-C. Chang and C.-J. Lin (2011): \"LIBSVM: a library for support vector machines.\" ACM Transactions on Intelligent Systems and Technology, 2(3):27:1–27:27. Updated at https://www.csie.ntu.edu.tw/~cjlin/papers/libsvm.pdf. ","category":"page"},{"location":"models/NuSVR_LIBSVM/","page":"NuSVR","title":"NuSVR","text":"This model is a re-parameterization of EpsilonSVR in which the epsilon hyper-parameter is replaced with a new parameter nu (denoted ν in the cited reference) which attempts to control the number of support vectors directly.","category":"page"},{"location":"models/NuSVR_LIBSVM/#Training-data","page":"NuSVR","title":"Training data","text":"","category":"section"},{"location":"models/NuSVR_LIBSVM/","page":"NuSVR","title":"NuSVR","text":"In MLJ or MLJBase, bind an instance model to data with:","category":"page"},{"location":"models/NuSVR_LIBSVM/","page":"NuSVR","title":"NuSVR","text":"mach = machine(model, X, y)","category":"page"},{"location":"models/NuSVR_LIBSVM/","page":"NuSVR","title":"NuSVR","text":"where","category":"page"},{"location":"models/NuSVR_LIBSVM/","page":"NuSVR","title":"NuSVR","text":"X: any table of input features (eg, a DataFrame) whose columns each have Continuous element scitype; check column scitypes with schema(X)\ny: is the target, which can be any AbstractVector whose element scitype is Continuous; check the scitype with scitype(y)","category":"page"},{"location":"models/NuSVR_LIBSVM/","page":"NuSVR","title":"NuSVR","text":"Train the machine using fit!(mach, rows=...).","category":"page"},{"location":"models/NuSVR_LIBSVM/#Hyper-parameters","page":"NuSVR","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/NuSVR_LIBSVM/","page":"NuSVR","title":"NuSVR","text":"kernel=LIBSVM.Kernel.RadialBasis: either an object that can be\ncalled, as in kernel(x1, x2), or one of the built-in kernels from the LIBSVM.jl package listed below. Here x1 and x2 are vectors whose lengths match the number of columns of the training data X (see \"Examples\" below).\nLIBSVM.Kernel.Linear: (x1, x2) -> x1'*x2\nLIBSVM.Kernel.Polynomial: (x1, x2) -> gamma*x1'*x2 + coef0)^degree\nLIBSVM.Kernel.RadialBasis: (x1, x2) -> (exp(-gamma*norm(x1 - x2)^2))\nLIBSVM.Kernel.Sigmoid: (x1, x2) - > tanh(gamma*x1'*x2 + coef0)\nHere gamma, coef0, degree are other hyper-parameters. Serialization of models with user-defined kernels comes with some restrictions. See LIVSVM.jl issue91\ngamma = 0.0: kernel parameter (see above); if gamma==-1.0 then gamma = 1/nfeatures is used in training, where nfeatures is the number of features (columns of X). If gamma==0.0 then gamma = 1/(var(Tables.matrix(X))*nfeatures) is used. Actual value used appears in the report (see below).\ncoef0 = 0.0: kernel parameter (see above)\ndegree::Int32 = Int32(3): degree in polynomial kernel (see above)\ncost=1.0 (range (0, Inf)): the parameter denoted C in the cited reference; for greater regularization, decrease cost\nnu=0.5 (range (0, 1]): An upper bound on the fraction of training errors and a lower bound of the fraction of support vectors. Denoted ν in the cited paper. Changing nu changes the thickness of some neighborhood of the graph of the prediction function (\"tube\" or \"slab\") and a training error is said to occur when a data point (x, y) lies outside of that neighborhood.\ncachesize=200.0 cache memory size in MB\ntolerance=0.001: tolerance for the stopping criterion\nshrinking=true: whether to use shrinking heuristics","category":"page"},{"location":"models/NuSVR_LIBSVM/#Operations","page":"NuSVR","title":"Operations","text":"","category":"section"},{"location":"models/NuSVR_LIBSVM/","page":"NuSVR","title":"NuSVR","text":"predict(mach, Xnew): return predictions of the target given features Xnew having the same scitype as X above.","category":"page"},{"location":"models/NuSVR_LIBSVM/#Fitted-parameters","page":"NuSVR","title":"Fitted parameters","text":"","category":"section"},{"location":"models/NuSVR_LIBSVM/","page":"NuSVR","title":"NuSVR","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/NuSVR_LIBSVM/","page":"NuSVR","title":"NuSVR","text":"libsvm_model: the trained model object created by the LIBSVM.jl package","category":"page"},{"location":"models/NuSVR_LIBSVM/#Report","page":"NuSVR","title":"Report","text":"","category":"section"},{"location":"models/NuSVR_LIBSVM/","page":"NuSVR","title":"NuSVR","text":"The fields of report(mach) are:","category":"page"},{"location":"models/NuSVR_LIBSVM/","page":"NuSVR","title":"NuSVR","text":"gamma: actual value of the kernel parameter gamma used in training","category":"page"},{"location":"models/NuSVR_LIBSVM/#Examples","page":"NuSVR","title":"Examples","text":"","category":"section"},{"location":"models/NuSVR_LIBSVM/#Using-a-built-in-kernel","page":"NuSVR","title":"Using a built-in kernel","text":"","category":"section"},{"location":"models/NuSVR_LIBSVM/","page":"NuSVR","title":"NuSVR","text":"using MLJ\nimport LIBSVM\n\nNuSVR = @load NuSVR pkg=LIBSVM ## model type\nmodel = NuSVR(kernel=LIBSVM.Kernel.Polynomial) ## instance\n\nX, y = make_regression(rng=123) ## table, vector\nmach = machine(model, X, y) |> fit!\n\nXnew, _ = make_regression(3, rng=123)\n\njulia> yhat = predict(mach, Xnew)\n3-element Vector{Float64}:\n 0.2008156459920009\n 0.1131520519131709\n -0.2076156254934889","category":"page"},{"location":"models/NuSVR_LIBSVM/#User-defined-kernels","page":"NuSVR","title":"User-defined kernels","text":"","category":"section"},{"location":"models/NuSVR_LIBSVM/","page":"NuSVR","title":"NuSVR","text":"k(x1, x2) = x1'*x2 ## equivalent to `LIBSVM.Kernel.Linear`\nmodel = NuSVR(kernel=k)\nmach = machine(model, X, y) |> fit!\n\njulia> yhat = predict(mach, Xnew)\n3-element Vector{Float64}:\n 1.1211558175964662\n 0.06677125944808422\n -0.6817578942749346","category":"page"},{"location":"models/NuSVR_LIBSVM/","page":"NuSVR","title":"NuSVR","text":"See also EpsilonSVR, LIVSVM.jl and the original C implementation documentation.","category":"page"},{"location":"models/CatBoostClassifier_CatBoost/#CatBoostClassifier_CatBoost","page":"CatBoostClassifier","title":"CatBoostClassifier","text":"","category":"section"},{"location":"models/CatBoostClassifier_CatBoost/","page":"CatBoostClassifier","title":"CatBoostClassifier","text":"CatBoostClassifier","category":"page"},{"location":"models/CatBoostClassifier_CatBoost/","page":"CatBoostClassifier","title":"CatBoostClassifier","text":"A model type for constructing a CatBoost classifier, based on CatBoost.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/CatBoostClassifier_CatBoost/","page":"CatBoostClassifier","title":"CatBoostClassifier","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/CatBoostClassifier_CatBoost/","page":"CatBoostClassifier","title":"CatBoostClassifier","text":"CatBoostClassifier = @load CatBoostClassifier pkg=CatBoost","category":"page"},{"location":"models/CatBoostClassifier_CatBoost/","page":"CatBoostClassifier","title":"CatBoostClassifier","text":"Do model = CatBoostClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in CatBoostClassifier(iterations=...).","category":"page"},{"location":"models/CatBoostClassifier_CatBoost/#Training-data","page":"CatBoostClassifier","title":"Training data","text":"","category":"section"},{"location":"models/CatBoostClassifier_CatBoost/","page":"CatBoostClassifier","title":"CatBoostClassifier","text":"In MLJ or MLJBase, bind an instance model to data with","category":"page"},{"location":"models/CatBoostClassifier_CatBoost/","page":"CatBoostClassifier","title":"CatBoostClassifier","text":"mach = machine(model, X, y)","category":"page"},{"location":"models/CatBoostClassifier_CatBoost/","page":"CatBoostClassifier","title":"CatBoostClassifier","text":"where","category":"page"},{"location":"models/CatBoostClassifier_CatBoost/","page":"CatBoostClassifier","title":"CatBoostClassifier","text":"X: any table of input features (eg, a DataFrame) whose columns each have one of the following element scitypes: Continuous, Count, Finite, Textual; check column scitypes with schema(X). Textual columns will be passed to catboost as text_features, Multiclass columns will be passed to catboost as cat_features, and OrderedFactor columns will be converted to integers.\ny: the target, which can be any AbstractVector whose element scitype is Finite; check the scitype with scitype(y)","category":"page"},{"location":"models/CatBoostClassifier_CatBoost/","page":"CatBoostClassifier","title":"CatBoostClassifier","text":"Train the machine with fit!(mach, rows=...).","category":"page"},{"location":"models/CatBoostClassifier_CatBoost/#Hyper-parameters","page":"CatBoostClassifier","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/CatBoostClassifier_CatBoost/","page":"CatBoostClassifier","title":"CatBoostClassifier","text":"More details on the catboost hyperparameters, here are the Python docs: https://catboost.ai/en/docs/concepts/python-reference_catboostclassifier#parameters","category":"page"},{"location":"models/CatBoostClassifier_CatBoost/#Operations","page":"CatBoostClassifier","title":"Operations","text":"","category":"section"},{"location":"models/CatBoostClassifier_CatBoost/","page":"CatBoostClassifier","title":"CatBoostClassifier","text":"predict(mach, Xnew): probabilistic predictions of the target given new features Xnew having the same scitype as X above.\npredict_mode(mach, Xnew): returns the mode of each of the prediction above.","category":"page"},{"location":"models/CatBoostClassifier_CatBoost/#Accessor-functions","page":"CatBoostClassifier","title":"Accessor functions","text":"","category":"section"},{"location":"models/CatBoostClassifier_CatBoost/","page":"CatBoostClassifier","title":"CatBoostClassifier","text":"feature_importances(mach): return vector of feature importances, in the form of feature::Symbol => importance::Real pairs","category":"page"},{"location":"models/CatBoostClassifier_CatBoost/#Fitted-parameters","page":"CatBoostClassifier","title":"Fitted parameters","text":"","category":"section"},{"location":"models/CatBoostClassifier_CatBoost/","page":"CatBoostClassifier","title":"CatBoostClassifier","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/CatBoostClassifier_CatBoost/","page":"CatBoostClassifier","title":"CatBoostClassifier","text":"model: The Python CatBoostClassifier model","category":"page"},{"location":"models/CatBoostClassifier_CatBoost/#Report","page":"CatBoostClassifier","title":"Report","text":"","category":"section"},{"location":"models/CatBoostClassifier_CatBoost/","page":"CatBoostClassifier","title":"CatBoostClassifier","text":"The fields of report(mach) are:","category":"page"},{"location":"models/CatBoostClassifier_CatBoost/","page":"CatBoostClassifier","title":"CatBoostClassifier","text":"feature_importances: Vector{Pair{Symbol, Float64}} of feature importances","category":"page"},{"location":"models/CatBoostClassifier_CatBoost/#Examples","page":"CatBoostClassifier","title":"Examples","text":"","category":"section"},{"location":"models/CatBoostClassifier_CatBoost/","page":"CatBoostClassifier","title":"CatBoostClassifier","text":"using CatBoost.MLJCatBoostInterface\nusing MLJ\n\nX = (\n duration = [1.5, 4.1, 5.0, 6.7], \n n_phone_calls = [4, 5, 6, 7], \n department = coerce([\"acc\", \"ops\", \"acc\", \"ops\"], Multiclass), \n)\ny = coerce([0, 0, 1, 1], Multiclass)\n\nmodel = CatBoostClassifier(iterations=5)\nmach = machine(model, X, y)\nfit!(mach)\nprobs = predict(mach, X)\npreds = predict_mode(mach, X)","category":"page"},{"location":"models/CatBoostClassifier_CatBoost/","page":"CatBoostClassifier","title":"CatBoostClassifier","text":"See also catboost and the unwrapped model type CatBoost.CatBoostClassifier.","category":"page"},{"location":"getting_started/#Getting-Started","page":"Getting Started","title":"Getting Started","text":"","category":"section"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"For an outline of MLJ's goals and features, see About MLJ.","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"This page introduces some MLJ basics, assuming some familiarity with machine learning. For a complete list of other MLJ learning resources, see Learning MLJ. ","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"MLJ collects together the functionality provided by mutliple packages. To learn how to install components separately, run using MLJ; @doc MLJ.","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"This section introduces only the most basic MLJ operations and concepts. It assumes MLJ has been successfully installed. See Installation if this is not the case. ","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"import Random.seed!\nusing MLJ\nusing InteractiveUtils\nMLJ.color_off()\nseed!(1234)","category":"page"},{"location":"getting_started/#Choosing-and-evaluating-a-model","page":"Getting Started","title":"Choosing and evaluating a model","text":"","category":"section"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"The following code loads Fisher's famous iris data set as a named tuple of column vectors:","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"using MLJ\niris = load_iris();\nselectrows(iris, 1:3) |> pretty\nschema(iris)","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"Because this data format is compatible with Tables.jl (and satisfies Tables.istable(iris) == true) many MLJ methods (such as selectrows, pretty and schema used above) as well as many MLJ models can work with it. However, as most new users are already familiar with the access methods particular to DataFrames (also compatible with Tables.jl) we'll put our data into that format here:","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"import DataFrames\niris = DataFrames.DataFrame(iris);\nnothing # hide","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"Next, let's split the data \"horizontally\" into input and target parts, and specify an RNG seed, to force observations to be shuffled:","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"y, X = unpack(iris, ==(:target); rng=123);\nfirst(X, 3) |> pretty","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"This call to unpack splits off any column with name == to :target into something called y, and all the remaining columns into X.","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"To list all models available in MLJ's model registry do models(). Listing the models compatible with the present data:","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"models(matching(X,y))","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"In MLJ a model is a struct storing the hyperparameters of the learning algorithm indicated by the struct name (and nothing else). For common problems matching data to models, see Model Search and Preparing Data.","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"To see the documentation for DecisionTreeClassifier (without loading its defining code) do","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"doc(\"DecisionTreeClassifier\", pkg=\"DecisionTree\")","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"Assuming the MLJDecisionTreeInterface.jl package is in your load path (see Installation) we can use @load to import the DecisionTreeClassifier model type, which we will bind to Tree:","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"Tree = @load DecisionTreeClassifier pkg=DecisionTree","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"(In this case, we need to specify pkg=... because multiple packages provide a model type with the name DecisionTreeClassifier.) Now we can instantiate a model with default hyperparameters:","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"tree = Tree()","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"Important: DecisionTree.jl and most other packages implementing machine learning algorithms for use in MLJ are not MLJ dependencies. If such a package is not in your load path you will receive an error explaining how to add the package to your current environment. Alternatively, you can use the interactive macro @iload. For more on importing model types, see Loading Model Code.","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"Once instantiated, a model's performance can be evaluated with the evaluate method. Our classifier is a probabilistic predictor (check prediction_type(tree) == :probabilistic) which means we can specify a probabilistic measure (metric) like log_loss, as well deterministic measures like accuracy (which are applied after computing the mode of each prediction):","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"evaluate(tree, X, y,\n resampling=CV(shuffle=true),\n measures=[log_loss, accuracy],\n verbosity=0)","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"Under the hood, evaluate calls lower level functions predict or predict_mode according to the type of measure, as shown in the output. We shall call these operations directly below.","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"For more on performance evaluation, see Evaluating Model Performance for details.","category":"page"},{"location":"getting_started/#A-preview-of-data-type-specification-in-MLJ","page":"Getting Started","title":"A preview of data type specification in MLJ","text":"","category":"section"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"The target y above is a categorical vector, which is appropriate because our model is a decision tree classifier:","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"typeof(y)","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"However, MLJ models do not prescribe the machine types for the data they operate on. Rather, they specify a scientific type, which refers to the way data is to be interpreted, as opposed to how it is encoded:","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"target_scitype(tree)","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"Here Finite is an example of a \"scalar\" scientific type with two subtypes:","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"subtypes(Finite)","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"We use the scitype function to check how MLJ is going to interpret given data. Our choice of encoding for y works for DecisionTreeClassifier, because we have:","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"scitype(y)","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"and Multiclass{3} <: Finite. If we would encode with integers instead, we obtain:","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"yint = int.(y);\nscitype(yint)","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"and using yint in place of y in classification problems will fail. See also Working with Categorical Data.","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"For more on scientific types, see Data containers and scientific types below.","category":"page"},{"location":"getting_started/#Fit-and-predict","page":"Getting Started","title":"Fit and predict","text":"","category":"section"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"To illustrate MLJ's fit and predict interface, let's perform our performance evaluations by hand, but using a simple holdout set, instead of cross-validation.","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"Wrapping the model in data creates a machine which will store training outcomes:","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"mach = machine(tree, X, y)","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"Training and testing on a hold-out set:","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"train, test = partition(eachindex(y), 0.7); # 70:30 split\nfit!(mach, rows=train);\nyhat = predict(mach, X[test,:]);\nyhat[3:5]\nlog_loss(yhat, y[test])","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"Note that log_loss and cross_entropy are aliases for LogLoss() (which can be passed an optional keyword parameter, as in LogLoss(tol=0.001)). For a list of all losses and scores, and their aliases, run measures().","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"Notice that yhat is a vector of Distribution objects, because DecisionTreeClassifier makes probabilistic predictions. The methods of the Distributions.jl package can be applied to such distributions:","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"broadcast(pdf, yhat[3:5], \"virginica\") # predicted probabilities of virginica\nbroadcast(pdf, yhat, y[test])[3:5] # predicted probability of observed class\nmode.(yhat[3:5])","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"Or, one can explicitly get modes by using predict_mode instead of predict:","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"predict_mode(mach, X[test[3:5],:])","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"Finally, we note that pdf() is overloaded to allow the retrieval of probabilities for all levels at once:","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"L = levels(y)\npdf(yhat[3:5], L)","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"Unsupervised models have a transform method instead of predict, and may optionally implement an inverse_transform method:","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"v = Float64[1, 2, 3, 4]\nstand = Standardizer() # this type is built-in\nmach2 = machine(stand, v)\nfit!(mach2)\nw = transform(mach2, v)\ninverse_transform(mach2, w)","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"Machines have an internal state which allows them to avoid redundant calculations when retrained, in certain conditions - for example when increasing the number of trees in a random forest, or the number of epochs in a neural network. The machine-building syntax also anticipates a more general syntax for composing multiple models, an advanced feature explained in Learning Networks.","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"There is a version of evaluate for machines as well as models. This time we'll use a simple holdout strategy as above. (An exclamation point is added to the method name because machines are generally mutated when trained.)","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"evaluate!(mach, resampling=Holdout(fraction_train=0.7),\n measures=[log_loss, accuracy],\n verbosity=0)","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"Changing a hyperparameter and re-evaluating:","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"tree.max_depth = 3\nevaluate!(mach, resampling=Holdout(fraction_train=0.7),\n measures=[log_loss, accuracy],\n verbosity=0)","category":"page"},{"location":"getting_started/#Next-steps","page":"Getting Started","title":"Next steps","text":"","category":"section"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"For next steps, consult the Learning MLJ section. At the least, we recommned you read the remainder of this page before considering serious use of MLJ.","category":"page"},{"location":"getting_started/#Data-containers-and-scientific-types","page":"Getting Started","title":"Data containers and scientific types","text":"","category":"section"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"The MLJ user should acquaint themselves with some basic assumptions about the form of data expected by MLJ, as outlined below. The basic machine constructors look like this (see also Constructing machines):","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"machine(model::Unsupervised, X)\nmachine(model::Supervised, X, y)","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"Each supervised model in MLJ declares the permitted scientific type of the inputs X and targets y that can be bound to it in the first constructor above, rather than specifying specific machine types (such as Array{Float32, 2}). Similar remarks apply to the input X of an unsupervised model.","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"Scientific types are julia types defined in the package ScientificTypesBase.jl; the package ScientificTypes.jl implements the particular convention used in the MLJ universe for assigning a specific scientific type (interpretation) to each julia object (see the scitype examples below).","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"The basic \"scalar\" scientific types are Continuous, Multiclass{N}, OrderedFactor{N}, Count and Textual. Missing and Nothing are also considered scientific types. Be sure you read Scalar scientific types below to guarantee your scalar data is interpreted correctly. Tools exist to coerce the data to have the appropriate scientific type; see ScientificTypes.jl or run ?coerce for details.","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"Additionally, most data containers - such as tuples, vectors, matrices and tables - have a scientific type parameterized by scitype of the elements they contain.","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"(Image: )","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"Figure 1. Part of the scientific type hierarchy in ScientificTypesBase.jl.","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"scitype(4.6)\nscitype(42)\nx1 = coerce([\"yes\", \"no\", \"yes\", \"maybe\"], Multiclass);\nscitype(x1)\nX = (x1=x1, x2=rand(4), x3=rand(4)) # a \"column table\"\nscitype(X)","category":"page"},{"location":"getting_started/#Two-dimensional-data","page":"Getting Started","title":"Two-dimensional data","text":"","category":"section"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"Generally, two-dimensional data in MLJ is expected to be tabular. All data containers X compatible with the Tables.jl interface and sastisfying Tables.istable(X) == true (most of the formats in this list) have the scientific type Table{K}, where K depends on the scientific types of the columns, which can be individually inspected using schema:","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"schema(X)","category":"page"},{"location":"getting_started/#Matrix-data","page":"Getting Started","title":"Matrix data","text":"","category":"section"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"MLJ models expecting a table do not generally accept a matrix instead. However, a matrix can be wrapped as a table, using MLJ.table:","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"matrix_table = MLJ.table(rand(2,3));\nschema(matrix_table)","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"The matrix is not copied, only wrapped. To manifest a table as a matrix, use MLJ.matrix.","category":"page"},{"location":"getting_started/#Observations-correspond-to-rows,-not-columns","page":"Getting Started","title":"Observations correspond to rows, not columns","text":"","category":"section"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"When supplying models with matrices, or wrapping them in tables, each row should correspond to a different observation. That is, the matrix should be n x p, where n is the number of observations and p the number of features. However, some models may perform better if supplied the adjoint of a p x n matrix instead, and observation resampling is always more efficient in this case.","category":"page"},{"location":"getting_started/#Inputs","page":"Getting Started","title":"Inputs","text":"","category":"section"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"Since an MLJ model only specifies the scientific type of data, if that type is Table - which is the case for the majority of MLJ models - then any Tables.jl container X is permitted, so long as Tables.istable(X) == true.","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"Specifically, the requirement for an arbitrary model's input is scitype(X) <: input_scitype(model).","category":"page"},{"location":"getting_started/#Targets","page":"Getting Started","title":"Targets","text":"","category":"section"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"The target y expected by MLJ models is generally an AbstractVector. A multivariate target y will generally be a table.","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"Specifically, the type requirement for a model target is scitype(y) <: target_scitype(model).","category":"page"},{"location":"getting_started/#Querying-a-model-for-acceptable-data-types","page":"Getting Started","title":"Querying a model for acceptable data types","text":"","category":"section"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"Given a model instance, one can inspect the admissible scientific types of its input and target, and without loading the code defining the model;","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"tree = @load DecisionTreeClassifier pkg=DecisionTree","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"i = info(\"DecisionTreeClassifier\", pkg=\"DecisionTree\")\ni.input_scitype\ni.target_scitype","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"This output indicates that any table with Continuous, Count or OrderedFactor columns is acceptable as the input X, and that any vector with element scitype <: Finite is acceptable as the target y.","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"For more on matching models to data, see Model Search.","category":"page"},{"location":"getting_started/#Scalar-scientific-types","page":"Getting Started","title":"Scalar scientific types","text":"","category":"section"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"Models in MLJ will always apply the MLJ convention described in ScientificTypes.jl to decide how to interpret the elements of your container types. Here are the key features of that convention:","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"Any AbstractFloat is interpreted as Continuous.\nAny Integer is interpreted as Count.\nAny CategoricalValue x, is interpreted as Multiclass or OrderedFactor, depending on the value of isordered(x).\nStrings and Chars are not interpreted as Multiclass or OrderedFactor (they have scitypes Textual and Unknown respectively).\nIn particular, integers (including Bools) cannot be used to represent categorical data. Use the preceding coerce operations to coerce to a Finite scitype.\nThe scientific types of nothing and missing are Nothing and Missing, native types we also regard as scientific.","category":"page"},{"location":"getting_started/","page":"Getting Started","title":"Getting Started","text":"Use coerce(v, OrderedFactor) or coerce(v, Multiclass) to coerce a vector v of integers, strings or characters to a vector with an appropriate Finite (categorical) scitype. See also Working with Categorical Data, and the ScientificTypes.jl documentation.","category":"page"},{"location":"transformers/#Transformers-and-Other-Unsupervised-Models","page":"Transformers and Other Unsupervised models","title":"Transformers and Other Unsupervised Models","text":"","category":"section"},{"location":"transformers/","page":"Transformers and Other Unsupervised models","title":"Transformers and Other Unsupervised models","text":"Several unsupervised models used to perform common transformations, such as one-hot encoding, are available in MLJ out-of-the-box. These are detailed in Built-in transformers below.","category":"page"},{"location":"transformers/","page":"Transformers and Other Unsupervised models","title":"Transformers and Other Unsupervised models","text":"A transformer is static if it has no learned parameters. While such a transformer is tantamount to an ordinary function, realizing it as an MLJ static transformer (a subtype of Static <: Unsupervised) can be useful, especially if the function depends on parameters the user would like to manipulate (which become hyper-parameters of the model). The necessary syntax for defining your own static transformers is described in Static transformers below.","category":"page"},{"location":"transformers/","page":"Transformers and Other Unsupervised models","title":"Transformers and Other Unsupervised models","text":"Some unsupervised models, such as clustering algorithms, have a predict method in addition to a transform method. We give an example of this in Transformers that also predict","category":"page"},{"location":"transformers/","page":"Transformers and Other Unsupervised models","title":"Transformers and Other Unsupervised models","text":"Finally, we note that models that fit a distribution, or more generally a sampler object, to some data, which are sometimes viewed as unsupervised, are treated in MLJ as supervised models. See Models that learn a probability distribution for an example.","category":"page"},{"location":"transformers/#Built-in-transformers","page":"Transformers and Other Unsupervised models","title":"Built-in transformers","text":"","category":"section"},{"location":"transformers/","page":"Transformers and Other Unsupervised models","title":"Transformers and Other Unsupervised models","text":"MLJModels.Standardizer\nMLJModels.OneHotEncoder\nMLJModels.ContinuousEncoder\nMLJModels.FillImputer\nMLJModels.UnivariateFillImputer\nMLJModels.FeatureSelector\nMLJModels.UnivariateBoxCoxTransformer\nMLJModels.UnivariateDiscretizer\nMLJModels.UnivariateTimeTypeToContinuous","category":"page"},{"location":"transformers/#MLJModels.Standardizer","page":"Transformers and Other Unsupervised models","title":"MLJModels.Standardizer","text":"Standardizer\n\nA model type for constructing a standardizer, based on MLJModels.jl, and implementing the MLJ model interface.\n\nFrom MLJ, the type can be imported using\n\nStandardizer = @load Standardizer pkg=MLJModels\n\nDo model = Standardizer() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in Standardizer(features=...).\n\nUse this model to standardize (whiten) a Continuous vector, or relevant columns of a table. The rescalings applied by this transformer to new data are always those learned during the training phase, which are generally different from what would actually standardize the new data.\n\nTraining data\n\nIn MLJ or MLJBase, bind an instance model to data with\n\nmach = machine(model, X)\n\nwhere\n\nX: any Tables.jl compatible table or any abstract vector with Continuous element scitype (any abstract float vector). Only features in a table with Continuous scitype can be standardized; check column scitypes with schema(X).\n\nTrain the machine using fit!(mach, rows=...).\n\nHyper-parameters\n\nfeatures: one of the following, with the behavior indicated below:\n[] (empty, the default): standardize all features (columns) having Continuous element scitype\nnon-empty vector of feature names (symbols): standardize only the Continuous features in the vector (if ignore=false) or Continuous features not named in the vector (ignore=true).\nfunction or other callable: standardize a feature if the callable returns true on its name. For example, Standardizer(features = name -> name in [:x1, :x3], ignore = true, count=true) has the same effect as Standardizer(features = [:x1, :x3], ignore = true, count=true), namely to standardize all Continuous and Count features, with the exception of :x1 and :x3.\nNote this behavior is further modified if the ordered_factor or count flags are set to true; see below\nignore=false: whether to ignore or standardize specified features, as explained above\nordered_factor=false: if true, standardize any OrderedFactor feature wherever a Continuous feature would be standardized, as described above\ncount=false: if true, standardize any Count feature wherever a Continuous feature would be standardized, as described above\n\nOperations\n\ntransform(mach, Xnew): return Xnew with relevant features standardized according to the rescalings learned during fitting of mach.\ninverse_transform(mach, Z): apply the inverse transformation to Z, so that inverse_transform(mach, transform(mach, Xnew)) is approximately the same as Xnew; unavailable if ordered_factor or count flags were set to true.\n\nFitted parameters\n\nThe fields of fitted_params(mach) are:\n\nfeatures_fit - the names of features that will be standardized\nmeans - the corresponding untransformed mean values\nstds - the corresponding untransformed standard deviations\n\nReport\n\nThe fields of report(mach) are:\n\nfeatures_fit: the names of features that will be standardized\n\nExamples\n\nusing MLJ\n\nX = (ordinal1 = [1, 2, 3],\n ordinal2 = coerce([:x, :y, :x], OrderedFactor),\n ordinal3 = [10.0, 20.0, 30.0],\n ordinal4 = [-20.0, -30.0, -40.0],\n nominal = coerce([\"Your father\", \"he\", \"is\"], Multiclass));\n\njulia> schema(X)\n┌──────────┬──────────────────┐\n│ names │ scitypes │\n├──────────┼──────────────────┤\n│ ordinal1 │ Count │\n│ ordinal2 │ OrderedFactor{2} │\n│ ordinal3 │ Continuous │\n│ ordinal4 │ Continuous │\n│ nominal │ Multiclass{3} │\n└──────────┴──────────────────┘\n\nstand1 = Standardizer();\n\njulia> transform(fit!(machine(stand1, X)), X)\n(ordinal1 = [1, 2, 3],\n ordinal2 = CategoricalValue{Symbol,UInt32}[:x, :y, :x],\n ordinal3 = [-1.0, 0.0, 1.0],\n ordinal4 = [1.0, 0.0, -1.0],\n nominal = CategoricalValue{String,UInt32}[\"Your father\", \"he\", \"is\"],)\n\nstand2 = Standardizer(features=[:ordinal3, ], ignore=true, count=true);\n\njulia> transform(fit!(machine(stand2, X)), X)\n(ordinal1 = [-1.0, 0.0, 1.0],\n ordinal2 = CategoricalValue{Symbol,UInt32}[:x, :y, :x],\n ordinal3 = [10.0, 20.0, 30.0],\n ordinal4 = [1.0, 0.0, -1.0],\n nominal = CategoricalValue{String,UInt32}[\"Your father\", \"he\", \"is\"],)\n\nSee also OneHotEncoder, ContinuousEncoder.\n\n\n\n\n\n","category":"type"},{"location":"transformers/#MLJModels.OneHotEncoder","page":"Transformers and Other Unsupervised models","title":"MLJModels.OneHotEncoder","text":"OneHotEncoder\n\nA model type for constructing a one-hot encoder, based on MLJModels.jl, and implementing the MLJ model interface.\n\nFrom MLJ, the type can be imported using\n\nOneHotEncoder = @load OneHotEncoder pkg=MLJModels\n\nDo model = OneHotEncoder() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in OneHotEncoder(features=...).\n\nUse this model to one-hot encode the Multiclass and OrderedFactor features (columns) of some table, leaving other columns unchanged.\n\nNew data to be transformed may lack features present in the fit data, but no new features can be present.\n\nWarning: This transformer assumes that levels(col) for any Multiclass or OrderedFactor column, col, is the same for training data and new data to be transformed.\n\nTo ensure all features are transformed into Continuous features, or dropped, use ContinuousEncoder instead.\n\nTraining data\n\nIn MLJ or MLJBase, bind an instance model to data with\n\nmach = machine(model, X)\n\nwhere\n\nX: any Tables.jl compatible table. Columns can be of mixed type but only those with element scitype Multiclass or OrderedFactor can be encoded. Check column scitypes with schema(X).\n\nTrain the machine using fit!(mach, rows=...).\n\nHyper-parameters\n\nfeatures: a vector of symbols (column names). If empty (default) then all Multiclass and OrderedFactor features are encoded. Otherwise, encoding is further restricted to the specified features (ignore=false) or the unspecified features (ignore=true). This default behavior can be modified by the ordered_factor flag.\nordered_factor=false: when true, OrderedFactor features are universally excluded\ndrop_last=true: whether to drop the column corresponding to the final class of encoded features. For example, a three-class feature is spawned into three new features if drop_last=false, but just two features otherwise.\n\nFitted parameters\n\nThe fields of fitted_params(mach) are:\n\nall_features: names of all features encountered in training\nfitted_levels_given_feature: dictionary of the levels associated with each feature encoded, keyed on the feature name\nref_name_pairs_given_feature: dictionary of pairs r => ftr (such as 0x00000001 => :grad__A) where r is a CategoricalArrays.jl reference integer representing a level, and ftr the corresponding new feature name; the dictionary is keyed on the names of features that are encoded\n\nReport\n\nThe fields of report(mach) are:\n\nfeatures_to_be_encoded: names of input features to be encoded\nnew_features: names of all output features\n\nExample\n\nusing MLJ\n\nX = (name=categorical([\"Danesh\", \"Lee\", \"Mary\", \"John\"]),\n grade=categorical([\"A\", \"B\", \"A\", \"C\"], ordered=true),\n height=[1.85, 1.67, 1.5, 1.67],\n n_devices=[3, 2, 4, 3])\n\njulia> schema(X)\n┌───────────┬──────────────────┐\n│ names │ scitypes │\n├───────────┼──────────────────┤\n│ name │ Multiclass{4} │\n│ grade │ OrderedFactor{3} │\n│ height │ Continuous │\n│ n_devices │ Count │\n└───────────┴──────────────────┘\n\nhot = OneHotEncoder(drop_last=true)\nmach = fit!(machine(hot, X))\nW = transform(mach, X)\n\njulia> schema(W)\n┌──────────────┬────────────┐\n│ names │ scitypes │\n├──────────────┼────────────┤\n│ name__Danesh │ Continuous │\n│ name__John │ Continuous │\n│ name__Lee │ Continuous │\n│ grade__A │ Continuous │\n│ grade__B │ Continuous │\n│ height │ Continuous │\n│ n_devices │ Count │\n└──────────────┴────────────┘\n\nSee also ContinuousEncoder.\n\n\n\n\n\n","category":"type"},{"location":"transformers/#MLJModels.ContinuousEncoder","page":"Transformers and Other Unsupervised models","title":"MLJModels.ContinuousEncoder","text":"ContinuousEncoder\n\nA model type for constructing a continuous encoder, based on MLJModels.jl, and implementing the MLJ model interface.\n\nFrom MLJ, the type can be imported using\n\nContinuousEncoder = @load ContinuousEncoder pkg=MLJModels\n\nDo model = ContinuousEncoder() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in ContinuousEncoder(drop_last=...).\n\nUse this model to arrange all features (columns) of a table to have Continuous element scitype, by applying the following protocol to each feature ftr:\n\nIf ftr is already Continuous retain it.\nIf ftr is Multiclass, one-hot encode it.\nIf ftr is OrderedFactor, replace it with coerce(ftr, Continuous) (vector of floating point integers), unless ordered_factors=false is specified, in which case one-hot encode it.\nIf ftr is Count, replace it with coerce(ftr, Continuous).\nIf ftr has some other element scitype, or was not observed in fitting the encoder, drop it from the table.\n\nWarning: This transformer assumes that levels(col) for any Multiclass or OrderedFactor column, col, is the same for training data and new data to be transformed.\n\nTo selectively one-hot-encode categorical features (without dropping columns) use OneHotEncoder instead.\n\nTraining data\n\nIn MLJ or MLJBase, bind an instance model to data with\n\nmach = machine(model, X)\n\nwhere\n\nX: any Tables.jl compatible table. Columns can be of mixed type but only those with element scitype Multiclass or OrderedFactor can be encoded. Check column scitypes with schema(X).\n\nTrain the machine using fit!(mach, rows=...).\n\nHyper-parameters\n\ndrop_last=true: whether to drop the column corresponding to the final class of one-hot encoded features. For example, a three-class feature is spawned into three new features if drop_last=false, but two just features otherwise.\none_hot_ordered_factors=false: whether to one-hot any feature with OrderedFactor element scitype, or to instead coerce it directly to a (single) Continuous feature using the order\n\nFitted parameters\n\nThe fields of fitted_params(mach) are:\n\nfeatures_to_keep: names of features that will not be dropped from the table\none_hot_encoder: the OneHotEncoder model instance for handling the one-hot encoding\none_hot_encoder_fitresult: the fitted parameters of the OneHotEncoder model\n\nReport\n\nfeatures_to_keep: names of input features that will not be dropped from the table\nnew_features: names of all output features\n\nExample\n\nX = (name=categorical([\"Danesh\", \"Lee\", \"Mary\", \"John\"]),\n grade=categorical([\"A\", \"B\", \"A\", \"C\"], ordered=true),\n height=[1.85, 1.67, 1.5, 1.67],\n n_devices=[3, 2, 4, 3],\n comments=[\"the force\", \"be\", \"with you\", \"too\"])\n\njulia> schema(X)\n┌───────────┬──────────────────┐\n│ names │ scitypes │\n├───────────┼──────────────────┤\n│ name │ Multiclass{4} │\n│ grade │ OrderedFactor{3} │\n│ height │ Continuous │\n│ n_devices │ Count │\n│ comments │ Textual │\n└───────────┴──────────────────┘\n\nencoder = ContinuousEncoder(drop_last=true)\nmach = fit!(machine(encoder, X))\nW = transform(mach, X)\n\njulia> schema(W)\n┌──────────────┬────────────┐\n│ names │ scitypes │\n├──────────────┼────────────┤\n│ name__Danesh │ Continuous │\n│ name__John │ Continuous │\n│ name__Lee │ Continuous │\n│ grade │ Continuous │\n│ height │ Continuous │\n│ n_devices │ Continuous │\n└──────────────┴────────────┘\n\njulia> setdiff(schema(X).names, report(mach).features_to_keep) # dropped features\n1-element Vector{Symbol}:\n :comments\n\n\nSee also OneHotEncoder\n\n\n\n\n\n","category":"type"},{"location":"transformers/#MLJModels.FillImputer","page":"Transformers and Other Unsupervised models","title":"MLJModels.FillImputer","text":"FillImputer\n\nA model type for constructing a fill imputer, based on MLJModels.jl, and implementing the MLJ model interface.\n\nFrom MLJ, the type can be imported using\n\nFillImputer = @load FillImputer pkg=MLJModels\n\nDo model = FillImputer() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in FillImputer(features=...).\n\nUse this model to impute missing values in tabular data. A fixed \"filler\" value is learned from the training data, one for each column of the table.\n\nFor imputing missing values in a vector, use UnivariateFillImputer instead.\n\nTraining data\n\nIn MLJ or MLJBase, bind an instance model to data with\n\nmach = machine(model, X)\n\nwhere\n\nX: any table of input features (eg, a DataFrame) whose columns each have element scitypes Union{Missing, T}, where T is a subtype of Continuous, Multiclass, OrderedFactor or Count. Check scitypes with schema(X).\n\nTrain the machine using fit!(mach, rows=...).\n\nHyper-parameters\n\nfeatures: a vector of names of features (symbols) for which imputation is to be attempted; default is empty, which is interpreted as \"impute all\".\ncontinuous_fill: function or other callable to determine value to be imputed in the case of Continuous (abstract float) data; default is to apply median after skipping missing values\ncount_fill: function or other callable to determine value to be imputed in the case of Count (integer) data; default is to apply rounded median after skipping missing values\nfinite_fill: function or other callable to determine value to be imputed in the case of Multiclass or OrderedFactor data (categorical vectors); default is to apply mode after skipping missing values\n\nOperations\n\ntransform(mach, Xnew): return Xnew with missing values imputed with the fill values learned when fitting mach\n\nFitted parameters\n\nThe fields of fitted_params(mach) are:\n\nfeatures_seen_in_fit: the names of features (columns) encountered during training\nunivariate_transformer: the univariate model applied to determine the fillers (it's fields contain the functions defining the filler computations)\nfiller_given_feature: dictionary of filler values, keyed on feature (column) names\n\nExamples\n\nusing MLJ\nimputer = FillImputer()\n\nX = (a = [1.0, 2.0, missing, 3.0, missing],\n b = coerce([\"y\", \"n\", \"y\", missing, \"y\"], Multiclass),\n c = [1, 1, 2, missing, 3])\n\nschema(X)\njulia> schema(X)\n┌───────┬───────────────────────────────┐\n│ names │ scitypes │\n├───────┼───────────────────────────────┤\n│ a │ Union{Missing, Continuous} │\n│ b │ Union{Missing, Multiclass{2}} │\n│ c │ Union{Missing, Count} │\n└───────┴───────────────────────────────┘\n\nmach = machine(imputer, X)\nfit!(mach)\n\njulia> fitted_params(mach).filler_given_feature\n(filler = 2.0,)\n\njulia> fitted_params(mach).filler_given_feature\nDict{Symbol, Any} with 3 entries:\n :a => 2.0\n :b => \"y\"\n :c => 2\n\njulia> transform(mach, X)\n(a = [1.0, 2.0, 2.0, 3.0, 2.0],\n b = CategoricalValue{String, UInt32}[\"y\", \"n\", \"y\", \"y\", \"y\"],\n c = [1, 1, 2, 2, 3],)\n\nSee also UnivariateFillImputer.\n\n\n\n\n\n","category":"type"},{"location":"transformers/#MLJModels.UnivariateFillImputer","page":"Transformers and Other Unsupervised models","title":"MLJModels.UnivariateFillImputer","text":"UnivariateFillImputer\n\nA model type for constructing a single variable fill imputer, based on MLJModels.jl, and implementing the MLJ model interface.\n\nFrom MLJ, the type can be imported using\n\nUnivariateFillImputer = @load UnivariateFillImputer pkg=MLJModels\n\nDo model = UnivariateFillImputer() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in UnivariateFillImputer(continuous_fill=...).\n\nUse this model to imputing missing values in a vector with a fixed value learned from the non-missing values of training vector.\n\nFor imputing missing values in tabular data, use FillImputer instead.\n\nTraining data\n\nIn MLJ or MLJBase, bind an instance model to data with\n\nmach = machine(model, x)\n\nwhere\n\nx: any abstract vector with element scitype Union{Missing, T} where T is a subtype of Continuous, Multiclass, OrderedFactor or Count; check scitype using scitype(x)\n\nTrain the machine using fit!(mach, rows=...).\n\nHyper-parameters\n\ncontinuous_fill: function or other callable to determine value to be imputed in the case of Continuous (abstract float) data; default is to apply median after skipping missing values\ncount_fill: function or other callable to determine value to be imputed in the case of Count (integer) data; default is to apply rounded median after skipping missing values\nfinite_fill: function or other callable to determine value to be imputed in the case of Multiclass or OrderedFactor data (categorical vectors); default is to apply mode after skipping missing values\n\nOperations\n\ntransform(mach, xnew): return xnew with missing values imputed with the fill values learned when fitting mach\n\nFitted parameters\n\nThe fields of fitted_params(mach) are:\n\nfiller: the fill value to be imputed in all new data\n\nExamples\n\nusing MLJ\nimputer = UnivariateFillImputer()\n\nx_continuous = [1.0, 2.0, missing, 3.0]\nx_multiclass = coerce([\"y\", \"n\", \"y\", missing, \"y\"], Multiclass)\nx_count = [1, 1, 1, 2, missing, 3, 3]\n\nmach = machine(imputer, x_continuous)\nfit!(mach)\n\njulia> fitted_params(mach)\n(filler = 2.0,)\n\njulia> transform(mach, [missing, missing, 101.0])\n3-element Vector{Float64}:\n 2.0\n 2.0\n 101.0\n\nmach2 = machine(imputer, x_multiclass) |> fit!\n\njulia> transform(mach2, x_multiclass)\n5-element CategoricalArray{String,1,UInt32}:\n \"y\"\n \"n\"\n \"y\"\n \"y\"\n \"y\"\n\nmach3 = machine(imputer, x_count) |> fit!\n\njulia> transform(mach3, [missing, missing, 5])\n3-element Vector{Int64}:\n 2\n 2\n 5\n\nFor imputing tabular data, use FillImputer.\n\n\n\n\n\n","category":"type"},{"location":"transformers/#MLJModels.FeatureSelector","page":"Transformers and Other Unsupervised models","title":"MLJModels.FeatureSelector","text":"FeatureSelector\n\nA model type for constructing a feature selector, based on MLJModels.jl, and implementing the MLJ model interface.\n\nFrom MLJ, the type can be imported using\n\nFeatureSelector = @load FeatureSelector pkg=MLJModels\n\nDo model = FeatureSelector() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in FeatureSelector(features=...).\n\nUse this model to select features (columns) of a table, usually as part of a model Pipeline.\n\nTraining data\n\nIn MLJ or MLJBase, bind an instance model to data with\n\nmach = machine(model, X)\n\nwhere\n\nX: any table of input features, where \"table\" is in the sense of Tables.jl\n\nTrain the machine using fit!(mach, rows=...).\n\nHyper-parameters\n\nfeatures: one of the following, with the behavior indicated:\n[] (empty, the default): filter out all features (columns) which were not encountered in training\nnon-empty vector of feature names (symbols): keep only the specified features (ignore=false) or keep only unspecified features (ignore=true)\nfunction or other callable: keep a feature if the callable returns true on its name. For example, specifying FeatureSelector(features = name -> name in [:x1, :x3], ignore = true) has the same effect as FeatureSelector(features = [:x1, :x3], ignore = true), namely to select all features, with the exception of :x1 and :x3.\nignore: whether to ignore or keep specified features, as explained above\n\nOperations\n\ntransform(mach, Xnew): select features from the table Xnew as specified by the model, taking features seen during training into account, if relevant\n\nFitted parameters\n\nThe fields of fitted_params(mach) are:\n\nfeatures_to_keep: the features that will be selected\n\nExample\n\nusing MLJ\n\nX = (ordinal1 = [1, 2, 3],\n ordinal2 = coerce([\"x\", \"y\", \"x\"], OrderedFactor),\n ordinal3 = [10.0, 20.0, 30.0],\n ordinal4 = [-20.0, -30.0, -40.0],\n nominal = coerce([\"Your father\", \"he\", \"is\"], Multiclass));\n\nselector = FeatureSelector(features=[:ordinal3, ], ignore=true);\n\njulia> transform(fit!(machine(selector, X)), X)\n(ordinal1 = [1, 2, 3],\n ordinal2 = CategoricalValue{Symbol,UInt32}[\"x\", \"y\", \"x\"],\n ordinal4 = [-20.0, -30.0, -40.0],\n nominal = CategoricalValue{String,UInt32}[\"Your father\", \"he\", \"is\"],)\n\n\n\n\n\n\n","category":"type"},{"location":"transformers/#MLJModels.UnivariateBoxCoxTransformer","page":"Transformers and Other Unsupervised models","title":"MLJModels.UnivariateBoxCoxTransformer","text":"UnivariateBoxCoxTransformer\n\nA model type for constructing a single variable Box-Cox transformer, based on MLJModels.jl, and implementing the MLJ model interface.\n\nFrom MLJ, the type can be imported using\n\nUnivariateBoxCoxTransformer = @load UnivariateBoxCoxTransformer pkg=MLJModels\n\nDo model = UnivariateBoxCoxTransformer() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in UnivariateBoxCoxTransformer(n=...).\n\nBox-Cox transformations attempt to make data look more normally distributed. This can improve performance and assist in the interpretation of models which suppose that data is generated by a normal distribution.\n\nA Box-Cox transformation (with shift) is of the form\n\nx -> ((x + c)^λ - 1)/λ\n\nfor some constant c and real λ, unless λ = 0, in which case the above is replaced with\n\nx -> log(x + c)\n\nGiven user-specified hyper-parameters n::Integer and shift::Bool, the present implementation learns the parameters c and λ from the training data as follows: If shift=true and zeros are encountered in the data, then c is set to 0.2 times the data mean. If there are no zeros, then no shift is applied. Finally, n different values of λ between -0.4 and 3 are considered, with λ fixed to the value maximizing normality of the transformed data.\n\nReference: Wikipedia entry for power transform.\n\nTraining data\n\nIn MLJ or MLJBase, bind an instance model to data with\n\nmach = machine(model, x)\n\nwhere\n\nx: any abstract vector with element scitype Continuous; check the scitype with scitype(x)\n\nTrain the machine using fit!(mach, rows=...).\n\nHyper-parameters\n\nn=171: number of values of the exponent λ to try\nshift=false: whether to include a preliminary constant translation in transformations, in the presence of zeros\n\nOperations\n\ntransform(mach, xnew): apply the Box-Cox transformation learned when fitting mach\ninverse_transform(mach, z): reconstruct the vector z whose transformation learned by mach is z\n\nFitted parameters\n\nThe fields of fitted_params(mach) are:\n\nλ: the learned Box-Cox exponent\nc: the learned shift\n\nExamples\n\nusing MLJ\nusing UnicodePlots\nusing Random\nRandom.seed!(123)\n\ntransf = UnivariateBoxCoxTransformer()\n\nx = randn(1000).^2\n\nmach = machine(transf, x)\nfit!(mach)\n\nz = transform(mach, x)\n\njulia> histogram(x)\n ┌ ┐\n [ 0.0, 2.0) ┤███████████████████████████████████ 848\n [ 2.0, 4.0) ┤████▌ 109\n [ 4.0, 6.0) ┤█▍ 33\n [ 6.0, 8.0) ┤▍ 7\n [ 8.0, 10.0) ┤▏ 2\n [10.0, 12.0) ┤ 0\n [12.0, 14.0) ┤▏ 1\n └ ┘\n Frequency\n\njulia> histogram(z)\n ┌ ┐\n [-5.0, -4.0) ┤█▎ 8\n [-4.0, -3.0) ┤████████▊ 64\n [-3.0, -2.0) ┤█████████████████████▊ 159\n [-2.0, -1.0) ┤█████████████████████████████▊ 216\n [-1.0, 0.0) ┤███████████████████████████████████ 254\n [ 0.0, 1.0) ┤█████████████████████████▊ 188\n [ 1.0, 2.0) ┤████████████▍ 90\n [ 2.0, 3.0) ┤██▊ 20\n [ 3.0, 4.0) ┤▎ 1\n └ ┘\n Frequency\n\n\n\n\n\n\n","category":"type"},{"location":"transformers/#MLJModels.UnivariateDiscretizer","page":"Transformers and Other Unsupervised models","title":"MLJModels.UnivariateDiscretizer","text":"UnivariateDiscretizer\n\nA model type for constructing a single variable discretizer, based on MLJModels.jl, and implementing the MLJ model interface.\n\nFrom MLJ, the type can be imported using\n\nUnivariateDiscretizer = @load UnivariateDiscretizer pkg=MLJModels\n\nDo model = UnivariateDiscretizer() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in UnivariateDiscretizer(n_classes=...).\n\nDiscretization converts a Continuous vector into an OrderedFactor vector. In particular, the output is a CategoricalVector (whose reference type is optimized).\n\nThe transformation is chosen so that the vector on which the transformer is fit has, in transformed form, an approximately uniform distribution of values. Specifically, if n_classes is the level of discretization, then 2*n_classes - 1 ordered quantiles are computed, the odd quantiles being used for transforming (discretization) and the even quantiles for inverse transforming.\n\nTraining data\n\nIn MLJ or MLJBase, bind an instance model to data with\n\nmach = machine(model, x)\n\nwhere\n\nx: any abstract vector with Continuous element scitype; check scitype with scitype(x).\n\nTrain the machine using fit!(mach, rows=...).\n\nHyper-parameters\n\nn_classes: number of discrete classes in the output\n\nOperations\n\ntransform(mach, xnew): discretize xnew according to the discretization learned when fitting mach\ninverse_transform(mach, z): attempt to reconstruct from z a vector that transforms to give z\n\nFitted parameters\n\nThe fields of fitted_params(mach).fitesult include:\n\nodd_quantiles: quantiles used for transforming (length is n_classes - 1)\neven_quantiles: quantiles used for inverse transforming (length is n_classes)\n\nExample\n\nusing MLJ\nusing Random\nRandom.seed!(123)\n\ndiscretizer = UnivariateDiscretizer(n_classes=100)\nmach = machine(discretizer, randn(1000))\nfit!(mach)\n\njulia> x = rand(5)\n5-element Vector{Float64}:\n 0.8585244609846809\n 0.37541692370451396\n 0.6767070590395461\n 0.9208844241267105\n 0.7064611415680901\n\njulia> z = transform(mach, x)\n5-element CategoricalArrays.CategoricalArray{UInt8,1,UInt8}:\n 0x52\n 0x42\n 0x4d\n 0x54\n 0x4e\n\nx_approx = inverse_transform(mach, z)\njulia> x - x_approx\n5-element Vector{Float64}:\n 0.008224506144777322\n 0.012731354778359405\n 0.0056265330571125816\n 0.005738175684445124\n 0.006835652575801987\n\n\n\n\n\n","category":"type"},{"location":"transformers/#MLJModels.UnivariateTimeTypeToContinuous","page":"Transformers and Other Unsupervised models","title":"MLJModels.UnivariateTimeTypeToContinuous","text":"UnivariateTimeTypeToContinuous\n\nA model type for constructing a single variable transformer that creates continuous representations of temporally typed data, based on MLJModels.jl, and implementing the MLJ model interface.\n\nFrom MLJ, the type can be imported using\n\nUnivariateTimeTypeToContinuous = @load UnivariateTimeTypeToContinuous pkg=MLJModels\n\nDo model = UnivariateTimeTypeToContinuous() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in UnivariateTimeTypeToContinuous(zero_time=...).\n\nUse this model to convert vectors with a TimeType element type to vectors of Float64 type (Continuous element scitype).\n\nTraining data\n\nIn MLJ or MLJBase, bind an instance model to data with\n\nmach = machine(model, x)\n\nwhere\n\nx: any abstract vector whose element type is a subtype of Dates.TimeType\n\nTrain the machine using fit!(mach, rows=...).\n\nHyper-parameters\n\nzero_time: the time that is to correspond to 0.0 under transformations, with the type coinciding with the training data element type. If unspecified, the earliest time encountered in training is used.\nstep::Period=Hour(24): time interval to correspond to one unit under transformation\n\nOperations\n\ntransform(mach, xnew): apply the encoding inferred when mach was fit\n\nFitted parameters\n\nfitted_params(mach).fitresult is the tuple (zero_time, step) actually used in transformations, which may differ from the user-specified hyper-parameters.\n\nExample\n\nusing MLJ\nusing Dates\n\nx = [Date(2001, 1, 1) + Day(i) for i in 0:4]\n\nencoder = UnivariateTimeTypeToContinuous(zero_time=Date(2000, 1, 1),\n step=Week(1))\n\nmach = machine(encoder, x)\nfit!(mach)\njulia> transform(mach, x)\n5-element Vector{Float64}:\n 52.285714285714285\n 52.42857142857143\n 52.57142857142857\n 52.714285714285715\n 52.857142\n\n\n\n\n\n","category":"type"},{"location":"transformers/#Static-transformers","page":"Transformers and Other Unsupervised models","title":"Static transformers","text":"","category":"section"},{"location":"transformers/","page":"Transformers and Other Unsupervised models","title":"Transformers and Other Unsupervised models","text":"A static transformer is a model for transforming data that does not generalize to new data (does not \"learn\") but which nevertheless has hyperparameters. For example, the DBSAN clustering model from Clustering.jl can assign labels to some collection of observations, cannot directly assign a label to some new observation.","category":"page"},{"location":"transformers/","page":"Transformers and Other Unsupervised models","title":"Transformers and Other Unsupervised models","text":"The general user may define their own static models. The main use-case is insertion into a Linear Pipelines some parameter-dependent transformation. (If a static transformer has no hyper-parameters, it is tantamount to an ordinary function. An ordinary function can be inserted directly into a pipeline; the situation for learning networks is only slightly more complicated.","category":"page"},{"location":"transformers/","page":"Transformers and Other Unsupervised models","title":"Transformers and Other Unsupervised models","text":"The following example defines a new model type Averager to perform the weighted average of two vectors (target predictions, for example). We suppose the weighting is normalized, and therefore controlled by a single hyper-parameter, mix.","category":"page"},{"location":"transformers/","page":"Transformers and Other Unsupervised models","title":"Transformers and Other Unsupervised models","text":"using MLJ","category":"page"},{"location":"transformers/","page":"Transformers and Other Unsupervised models","title":"Transformers and Other Unsupervised models","text":"mutable struct Averager <: Static\n mix::Float64\nend\n\nMLJ.transform(a::Averager, _, y1, y2) = (1 - a.mix)*y1 + a.mix*y2","category":"page"},{"location":"transformers/","page":"Transformers and Other Unsupervised models","title":"Transformers and Other Unsupervised models","text":"Important. Note the sub-typing <: Static.","category":"page"},{"location":"transformers/","page":"Transformers and Other Unsupervised models","title":"Transformers and Other Unsupervised models","text":"Such static transformers with (unlearned) parameters can have arbitrarily many inputs, but only one output. In the single input case, an inverse_transform can also be defined. Since they have no real learned parameters, you bind a static transformer to a machine without specifying training arguments; there is no need to fit! the machine:","category":"page"},{"location":"transformers/","page":"Transformers and Other Unsupervised models","title":"Transformers and Other Unsupervised models","text":"mach = machine(Averager(0.5))\ntransform(mach, [1, 2, 3], [3, 2, 1])","category":"page"},{"location":"transformers/","page":"Transformers and Other Unsupervised models","title":"Transformers and Other Unsupervised models","text":"Let's see how we can include our Averager in a learning network to mix the predictions of two regressors, with one-hot encoding of the inputs. Here's two regressors for mixing, and some dummy data for testing our learning network:","category":"page"},{"location":"transformers/","page":"Transformers and Other Unsupervised models","title":"Transformers and Other Unsupervised models","text":"ridge = (@load RidgeRegressor pkg=MultivariateStats)()\nknn = (@load KNNRegressor)()\n\nimport Random.seed!\nseed!(112)\nX = (\n x1=coerce(rand(\"ab\", 100), Multiclass),\n x2=rand(100),\n)\ny = X.x2 + 0.05*rand(100)\nschema(X)","category":"page"},{"location":"transformers/","page":"Transformers and Other Unsupervised models","title":"Transformers and Other Unsupervised models","text":"And the learning network:","category":"page"},{"location":"transformers/","page":"Transformers and Other Unsupervised models","title":"Transformers and Other Unsupervised models","text":"Xs = source(X)\nys = source(y)\n\naverager = Averager(0.5)\n\nmach0 = machine(OneHotEncoder(), Xs)\nW = transform(mach0, Xs) # one-hot encode the input\n\nmach1 = machine(ridge, W, ys)\ny1 = predict(mach1, W)\n\nmach2 = machine(knn, W, ys)\ny2 = predict(mach2, W)\n\nmach4= machine(averager)\nyhat = transform(mach4, y1, y2)\n\n# test:\nfit!(yhat)\nXnew = selectrows(X, 1:3)\nyhat(Xnew)","category":"page"},{"location":"transformers/","page":"Transformers and Other Unsupervised models","title":"Transformers and Other Unsupervised models","text":"We next \"export\" the learning network as a standalone composite model type. First we need a struct for the composite model. Since we are restricting to Deterministic component regressors, the composite will also make deterministic predictions, and so gets the supertype DeterministicNetworkComposite:","category":"page"},{"location":"transformers/","page":"Transformers and Other Unsupervised models","title":"Transformers and Other Unsupervised models","text":"mutable struct DoubleRegressor <: DeterministicNetworkComposite\n regressor1\n regressor2\n averager\nend","category":"page"},{"location":"transformers/","page":"Transformers and Other Unsupervised models","title":"Transformers and Other Unsupervised models","text":"As described in Learning Networks, we next paste the learning network into a prefit declaration, replace the component models with symbolic placeholders, and add a learning network \"interface\":","category":"page"},{"location":"transformers/","page":"Transformers and Other Unsupervised models","title":"Transformers and Other Unsupervised models","text":"import MLJBase\nfunction MLJBase.prefit(composite::DoubleRegressor, verbosity, X, y)\n Xs = source(X)\n ys = source(y)\n\n mach0 = machine(OneHotEncoder(), Xs)\n W = transform(mach0, Xs) # one-hot encode the input\n\n mach1 = machine(:regressor1, W, ys)\n y1 = predict(mach1, W)\n\n mach2 = machine(:regressor2, W, ys)\n y2 = predict(mach2, W)\n\n mach4= machine(:averager)\n yhat = transform(mach4, y1, y2)\n\n # learning network interface:\n (; predict=yhat)\nend","category":"page"},{"location":"transformers/","page":"Transformers and Other Unsupervised models","title":"Transformers and Other Unsupervised models","text":"The new model type can be evaluated like any other supervised model:","category":"page"},{"location":"transformers/","page":"Transformers and Other Unsupervised models","title":"Transformers and Other Unsupervised models","text":"X, y = @load_reduced_ames;\ncomposite = DoubleRegressor(ridge, knn, Averager(0.5))","category":"page"},{"location":"transformers/","page":"Transformers and Other Unsupervised models","title":"Transformers and Other Unsupervised models","text":"composite.averager.mix = 0.25 # adjust mix from default of 0.5\nevaluate(composite, X, y, measure=l1)","category":"page"},{"location":"transformers/","page":"Transformers and Other Unsupervised models","title":"Transformers and Other Unsupervised models","text":"A static transformer can also expose byproducts of the transform computation in the report of any associated machine. See Static models (models that do not generalize) for details.","category":"page"},{"location":"transformers/#Transformers-that-also-predict","page":"Transformers and Other Unsupervised models","title":"Transformers that also predict","text":"","category":"section"},{"location":"transformers/","page":"Transformers and Other Unsupervised models","title":"Transformers and Other Unsupervised models","text":"Some clustering algorithms learn to label data by identifying a collection of \"centroids\" in the training data. Any new input observation is labeled with the cluster to which it is closest (this is the output of predict) while the vector of all distances from the centroids defines a lower-dimensional representation of the observation (the output of transform). In the following example a K-means clustering algorithm assigns one of three labels 1, 2, 3 to the input features of the iris data set and compares them with the actual species recorded in the target (not seen by the algorithm).","category":"page"},{"location":"transformers/","page":"Transformers and Other Unsupervised models","title":"Transformers and Other Unsupervised models","text":"import Random.seed!\nseed!(123)\n\nX, y = @load_iris;\nKMeans = @load KMeans pkg=ParallelKMeans\nkmeans = KMeans()\nmach = machine(kmeans, X) |> fit!\n\n# transforming:\nXsmall = transform(mach);\nselectrows(Xsmall, 1:4) |> pretty\njulia> selectrows(Xsmall, 1:4) |> pretty\n┌─────────────────────┬────────────────────┬────────────────────┐\n│ x1 │ x2 │ x3 │\n│ Float64 │ Float64 │ Float64 │\n│ Continuous │ Continuous │ Continuous │\n├─────────────────────┼────────────────────┼────────────────────┤\n│ 0.0215920000000267 │ 25.314260355029603 │ 11.645232464391299 │\n│ 0.19199200000001326 │ 25.882721893491123 │ 11.489658693899486 │\n│ 0.1699920000000077 │ 27.58656804733728 │ 12.674412792260142 │\n│ 0.26919199999998966 │ 26.28656804733727 │ 11.64392098898145 │\n└─────────────────────┴────────────────────┴────────────────────┘\n\n# predicting:\nyhat = predict(mach);\ncompare = zip(yhat, y) |> collect;\ncompare[1:8]\n8-element Array{Tuple{CategoricalValue{Int64,UInt32},CategoricalString{UInt32}},1}:\n (1, \"setosa\")\n (1, \"setosa\")\n (1, \"setosa\")\n (1, \"setosa\")\n (1, \"setosa\")\n (1, \"setosa\")\n (1, \"setosa\")\n (1, \"setosa\")\n\ncompare[51:58]\n8-element Array{Tuple{CategoricalValue{Int64,UInt32},CategoricalString{UInt32}},1}:\n (2, \"versicolor\")\n (3, \"versicolor\")\n (2, \"versicolor\")\n (3, \"versicolor\")\n (3, \"versicolor\")\n (3, \"versicolor\")\n (3, \"versicolor\")\n (3, \"versicolor\")\n\ncompare[101:108]\n8-element Array{Tuple{CategoricalValue{Int64,UInt32},CategoricalString{UInt32}},1}:\n (2, \"virginica\")\n (3, \"virginica\")\n (2, \"virginica\")\n (2, \"virginica\")\n (2, \"virginica\")\n (2, \"virginica\")\n (3, \"virginica\")\n (2, \"virginica\")","category":"page"},{"location":"models/GaussianProcessRegressor_MLJScikitLearnInterface/#GaussianProcessRegressor_MLJScikitLearnInterface","page":"GaussianProcessRegressor","title":"GaussianProcessRegressor","text":"","category":"section"},{"location":"models/GaussianProcessRegressor_MLJScikitLearnInterface/","page":"GaussianProcessRegressor","title":"GaussianProcessRegressor","text":"GaussianProcessRegressor","category":"page"},{"location":"models/GaussianProcessRegressor_MLJScikitLearnInterface/","page":"GaussianProcessRegressor","title":"GaussianProcessRegressor","text":"A model type for constructing a Gaussian process regressor, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/GaussianProcessRegressor_MLJScikitLearnInterface/","page":"GaussianProcessRegressor","title":"GaussianProcessRegressor","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/GaussianProcessRegressor_MLJScikitLearnInterface/","page":"GaussianProcessRegressor","title":"GaussianProcessRegressor","text":"GaussianProcessRegressor = @load GaussianProcessRegressor pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/GaussianProcessRegressor_MLJScikitLearnInterface/","page":"GaussianProcessRegressor","title":"GaussianProcessRegressor","text":"Do model = GaussianProcessRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in GaussianProcessRegressor(kernel=...).","category":"page"},{"location":"models/GaussianProcessRegressor_MLJScikitLearnInterface/#Hyper-parameters","page":"GaussianProcessRegressor","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/GaussianProcessRegressor_MLJScikitLearnInterface/","page":"GaussianProcessRegressor","title":"GaussianProcessRegressor","text":"kernel = nothing\nalpha = 1.0e-10\noptimizer = fmin_l_bfgs_b\nn_restarts_optimizer = 0\nnormalize_y = false\ncopy_X_train = true\nrandom_state = nothing","category":"page"},{"location":"models/MeanShift_MLJScikitLearnInterface/#MeanShift_MLJScikitLearnInterface","page":"MeanShift","title":"MeanShift","text":"","category":"section"},{"location":"models/MeanShift_MLJScikitLearnInterface/","page":"MeanShift","title":"MeanShift","text":"MeanShift","category":"page"},{"location":"models/MeanShift_MLJScikitLearnInterface/","page":"MeanShift","title":"MeanShift","text":"A model type for constructing a mean shift, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/MeanShift_MLJScikitLearnInterface/","page":"MeanShift","title":"MeanShift","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/MeanShift_MLJScikitLearnInterface/","page":"MeanShift","title":"MeanShift","text":"MeanShift = @load MeanShift pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/MeanShift_MLJScikitLearnInterface/","page":"MeanShift","title":"MeanShift","text":"Do model = MeanShift() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in MeanShift(bandwidth=...).","category":"page"},{"location":"models/MeanShift_MLJScikitLearnInterface/","page":"MeanShift","title":"MeanShift","text":"Mean shift clustering using a flat kernel. Mean shift clustering aims to discover \"blobs\" in a smooth density of samples. It is a centroid-based algorithm, which works by updating candidates for centroids to be the mean of the points within a given region. These candidates are then filtered in a post-processing stage to eliminate near-duplicates to form the final set of centroids.\"","category":"page"},{"location":"models/StableRulesRegressor_SIRUS/#StableRulesRegressor_SIRUS","page":"StableRulesRegressor","title":"StableRulesRegressor","text":"","category":"section"},{"location":"models/StableRulesRegressor_SIRUS/","page":"StableRulesRegressor","title":"StableRulesRegressor","text":"StableRulesRegressor","category":"page"},{"location":"models/StableRulesRegressor_SIRUS/","page":"StableRulesRegressor","title":"StableRulesRegressor","text":"A model type for constructing a stable rules regressor, based on SIRUS.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/StableRulesRegressor_SIRUS/","page":"StableRulesRegressor","title":"StableRulesRegressor","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/StableRulesRegressor_SIRUS/","page":"StableRulesRegressor","title":"StableRulesRegressor","text":"StableRulesRegressor = @load StableRulesRegressor pkg=SIRUS","category":"page"},{"location":"models/StableRulesRegressor_SIRUS/","page":"StableRulesRegressor","title":"StableRulesRegressor","text":"Do model = StableRulesRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in StableRulesRegressor(rng=...).","category":"page"},{"location":"models/StableRulesRegressor_SIRUS/","page":"StableRulesRegressor","title":"StableRulesRegressor","text":"StableRulesRegressor implements the explainable rule-based regression model based on a random forest.","category":"page"},{"location":"models/StableRulesRegressor_SIRUS/#Training-data","page":"StableRulesRegressor","title":"Training data","text":"","category":"section"},{"location":"models/StableRulesRegressor_SIRUS/","page":"StableRulesRegressor","title":"StableRulesRegressor","text":"In MLJ or MLJBase, bind an instance model to data with","category":"page"},{"location":"models/StableRulesRegressor_SIRUS/","page":"StableRulesRegressor","title":"StableRulesRegressor","text":"mach = machine(model, X, y)","category":"page"},{"location":"models/StableRulesRegressor_SIRUS/","page":"StableRulesRegressor","title":"StableRulesRegressor","text":"where","category":"page"},{"location":"models/StableRulesRegressor_SIRUS/","page":"StableRulesRegressor","title":"StableRulesRegressor","text":"X: any table of input features (eg, a DataFrame) whose columns each have one of the following element scitypes: Continuous, Count, or <:OrderedFactor; check column scitypes with schema(X)\ny: the target, which can be any AbstractVector whose element scitype is <:OrderedFactor or <:Multiclass; check the scitype with scitype(y)","category":"page"},{"location":"models/StableRulesRegressor_SIRUS/","page":"StableRulesRegressor","title":"StableRulesRegressor","text":"Train the machine with fit!(mach, rows=...).","category":"page"},{"location":"models/StableRulesRegressor_SIRUS/#Hyperparameters","page":"StableRulesRegressor","title":"Hyperparameters","text":"","category":"section"},{"location":"models/StableRulesRegressor_SIRUS/","page":"StableRulesRegressor","title":"StableRulesRegressor","text":"rng::AbstractRNG=default_rng(): Random number generator. Using a StableRNG from StableRNGs.jl is advised.\npartial_sampling::Float64=0.7: Ratio of samples to use in each subset of the data. The default should be fine for most cases.\nn_trees::Int=1000: The number of trees to use. It is advisable to use at least thousand trees to for a better rule selection, and in turn better predictive performance.\nmax_depth::Int=2: The depth of the tree. A lower depth decreases model complexity and can therefore improve accuracy when the sample size is small (reduce overfitting).\nq::Int=10: Number of cutpoints to use per feature. The default value should be fine for most situations.\nmin_data_in_leaf::Int=5: Minimum number of data points per leaf.\nmax_rules::Int=10: This is the most important hyperparameter after lambda. The more rules, the more accurate the model should be. If this is not the case, tune lambda first. However, more rules will also decrease model interpretability. So, it is important to find a good balance here. In most cases, 10 to 40 rules should provide reasonable accuracy while remaining interpretable.\nlambda::Float64=1.0: The weights of the final rules are determined via a regularized regression over each rule as a binary feature. This hyperparameter specifies the strength of the ridge (L2) regularizer. SIRUS is very sensitive to the choice of this hyperparameter. Ensure that you try the full range from 10^-4 to 10^4 (e.g., 0.001, 0.01, ..., 100). When trying the range, one good check is to verify that an increase in max_rules increases performance. If this is not the case, then try a different value for lambda.","category":"page"},{"location":"models/StableRulesRegressor_SIRUS/#Fitted-parameters","page":"StableRulesRegressor","title":"Fitted parameters","text":"","category":"section"},{"location":"models/StableRulesRegressor_SIRUS/","page":"StableRulesRegressor","title":"StableRulesRegressor","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/StableRulesRegressor_SIRUS/","page":"StableRulesRegressor","title":"StableRulesRegressor","text":"fitresult: A StableRules object.","category":"page"},{"location":"models/StableRulesRegressor_SIRUS/#Operations","page":"StableRulesRegressor","title":"Operations","text":"","category":"section"},{"location":"models/StableRulesRegressor_SIRUS/","page":"StableRulesRegressor","title":"StableRulesRegressor","text":"predict(mach, Xnew): Return a vector of predictions for each row of Xnew.","category":"page"},{"location":"correcting_class_imbalance/#Correcting-Class-Imbalance","page":"Correcting Class Imbalance","title":"Correcting Class Imbalance","text":"","category":"section"},{"location":"correcting_class_imbalance/#Oversampling-and-undersampling-methods","page":"Correcting Class Imbalance","title":"Oversampling and undersampling methods","text":"","category":"section"},{"location":"correcting_class_imbalance/","page":"Correcting Class Imbalance","title":"Correcting Class Imbalance","text":"Models providing oversampling or undersampling methods, to correct for class imbalance, are listed under Class Imbalance. In particular, several popular algorithms are provided by the Imbalance.jl package, which includes detailed documentation and tutorials.","category":"page"},{"location":"correcting_class_imbalance/#Incorporating-class-imbalance-in-supervised-learning-pipelines","page":"Correcting Class Imbalance","title":"Incorporating class imbalance in supervised learning pipelines","text":"","category":"section"},{"location":"correcting_class_imbalance/","page":"Correcting Class Imbalance","title":"Correcting Class Imbalance","text":"One or more oversampling/undersampling algorithms can be fused with an MLJ classifier using the BalancedModel wrapper. This creates a new classifier which can be treated like any other; resampling to correct for class imbalance, relevant only for training of the atomic classifier, is then carried out internally. If, for example, one applies cross-validation to the wrapped classifier (using evaluate!, say) then this means over/undersampling is then repeated for each training fold automatically.","category":"page"},{"location":"correcting_class_imbalance/","page":"Correcting Class Imbalance","title":"Correcting Class Imbalance","text":"Refer to the MLJBalancing.jl documentation for further details.","category":"page"},{"location":"correcting_class_imbalance/","page":"Correcting Class Imbalance","title":"Correcting Class Imbalance","text":"MLJBalancing.BalancedModel","category":"page"},{"location":"correcting_class_imbalance/#MLJBalancing.BalancedModel","page":"Correcting Class Imbalance","title":"MLJBalancing.BalancedModel","text":"BalancedModel(; model=nothing, balancer1=balancer_model1, balancer2=balancer_model2, ...)\nBalancedModel(model; balancer1=balancer_model1, balancer2=balancer_model2, ...)\n\nGiven a classification model, and one or more balancer models that all implement the MLJModelInterface, BalancedModel allows constructing a sequential pipeline that wraps an arbitrary number of balancing models and a classifier together in a sequential pipeline.\n\nOperation\n\nDuring training, data is first passed to balancer1 and the result is passed to balancer2 and so on, the result from the final balancer is then passed to the classifier for training.\nDuring prediction, the balancers have no effect.\n\nArguments\n\nmodel::Supervised: A classification model that implements the MLJModelInterface. \nbalancer1::Static=...: The first balancer model to pass the data to. This keyword argument can have any name.\nbalancer2::Static=...: The second balancer model to pass the data to. This keyword argument can have any name.\nand so on for an arbitrary number of balancers.\n\nReturns\n\nAn instance of type ProbabilisticBalancedModel or DeterministicBalancedModel, depending on the prediction type of model.\n\nExample\n\nusing MLJ\nusing Imbalance\n\n# generate data\nX, y = Imbalance.generate_imbalanced_data(1000, 5; class_probs=[0.2, 0.3, 0.5])\n\n# prepare classification and balancing models\nSMOTENC = @load SMOTENC pkg=Imbalance verbosity=0\nTomekUndersampler = @load TomekUndersampler pkg=Imbalance verbosity=0\nLogisticClassifier = @load LogisticClassifier pkg=MLJLinearModels verbosity=0\n\noversampler = SMOTENC(k=5, ratios=1.0, rng=42)\nundersampler = TomekUndersampler(min_ratios=0.5, rng=42)\nlogistic_model = LogisticClassifier()\n\n# wrap them in a BalancedModel\nbalanced_model = BalancedModel(model=logistic_model, balancer1=oversampler, balancer2=undersampler)\n\n# now this behaves as a unified model that can be trained, validated, fine-tuned, etc.\nmach = machine(balanced_model, X, y)\nfit!(mach)\n\n\n\n\n\n","category":"function"},{"location":"working_with_categorical_data/#Working-with-Categorical-Data","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"","category":"section"},{"location":"working_with_categorical_data/#Scientific-types-for-discrete-data","page":"Working with Categorical Data","title":"Scientific types for discrete data","text":"","category":"section"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"Recall that models articulate their data requirements using scientific types (see Getting Started or the ScientificTypes.jl documentation). There are three scientific types discrete data can have: Count, OrderedFactor and Multiclass.","category":"page"},{"location":"working_with_categorical_data/#Count-data","page":"Working with Categorical Data","title":"Count data","text":"","category":"section"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"In MLJ you cannot use integers to represent (finite) categorical data. Integers are reserved for discrete data you want interpreted as Count <: Infinite:","category":"page"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"using MLJ # hide\nscitype([1, 4, 5, 6])","category":"page"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"The Count scientific type includes things like the number of phone calls, or city populations, and other \"frequency\" data of a generally unbounded nature.","category":"page"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"That said, you may have data that is theoretically Count, but which you coerce to OrderedFactor to enable the use of more models, trusting to your knowledge of how those models work to inform an appropriate interpretation.","category":"page"},{"location":"working_with_categorical_data/#OrderedFactor-and-Multiclass-data","page":"Working with Categorical Data","title":"OrderedFactor and Multiclass data","text":"","category":"section"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"Other integer data, such as the number of an animal's legs, or number of rooms in homes, are, generally, coerced to OrderedFactor <: Finite. The other categorical scientific type is Multiclass <: Finite, which is for unordered categorical data. Coercing data to one of these two forms is discussed under Detecting and coercing improperly represented categorical data below.","category":"page"},{"location":"working_with_categorical_data/#Binary-data","page":"Working with Categorical Data","title":"Binary data","text":"","category":"section"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"There is no separate scientific type for binary data. Binary data is either OrderedFactor{2} if ordered, and Multiclass{2} otherwise. Data with type OrderedFactor{2} is considered to have an intrinsic \"positive\" class, e.g., the outcome of a medical test, and the \"pass/fail\" outcome of an exam. MLJ measures, such as true_positive assume the second class in the ordering is the \"positive\" class. Inspecting and changing order are discussed in the next section.","category":"page"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"If data has type Bool it is considered Count data (as Bool <: Integer) and, generally, users will want to coerce such data to Multiclass or OrderedFactor.","category":"page"},{"location":"working_with_categorical_data/#Detecting-and-coercing-improperly-represented-categorical-data","page":"Working with Categorical Data","title":"Detecting and coercing improperly represented categorical data","text":"","category":"section"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"One inspects the scientific type of data using scitype as shown above. To inspect all column scientific types in a table simultaneously, use schema. (The scitype(X) of a table X contains a condensed form of this information used in type dispatch; see here.)","category":"page"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"import DataFrames.DataFrame\nX = DataFrame(\n name = [\"Siri\", \"Robo\", \"Alexa\", \"Cortana\"],\n gender = [\"male\", \"male\", \"Female\", \"female\"],\n likes_soup = [true, false, false, true],\n height = [152, missing, 148, 163],\n rating = [2, 5, 2, 1],\n outcome = [\"rejected\", \"accepted\", \"accepted\", \"rejected\"])\nschema(X)","category":"page"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"Coercing a single column:","category":"page"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"X.outcome = coerce(X.outcome, OrderedFactor)","category":"page"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"The machine type of the result is a CategoricalArray. For more on this type see Under the hood: CategoricalValue and CategoricalArray below.","category":"page"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"Inspecting the order of the levels:","category":"page"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"levels(X.outcome)","category":"page"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"Since we wish to regard \"accepted\" as the positive class, it should appear second, which we correct with the levels! function:","category":"page"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"levels!(X.outcome, [\"rejected\", \"accepted\"])\nlevels(X.outcome)","category":"page"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"warning: Changing levels of categorical data\nThe order of levels should generally be changed early in your data science workflow and then not again. Similar remarks apply to adding levels (which is possible; see the CategorialArrays.jl documentation). MLJ supervised and unsupervised models assume levels and their order do not change.","category":"page"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"Coercing all remaining types simultaneously:","category":"page"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"Xnew = coerce(X, :gender => Multiclass,\n :likes_soup => OrderedFactor,\n :height => Continuous,\n :rating => OrderedFactor)\nschema(Xnew)","category":"page"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"For DataFrames there is also in-place coercion, using coerce!.","category":"page"},{"location":"working_with_categorical_data/#Tracking-all-levels","page":"Working with Categorical Data","title":"Tracking all levels","text":"","category":"section"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"The key property of vectors of scientific type OrderedFactor and Multiclass is that the pool of all levels is not lost when separating out one or more elements:","category":"page"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"v = Xnew.rating","category":"page"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"levels(v)","category":"page"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"levels(v[1:2])","category":"page"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"levels(v[2])","category":"page"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"By tracking all classes in this way, MLJ avoids common pain points around categorical data, such as evaluating models on an evaluation set, only to crash your code because classes appear there which were not seen during training.","category":"page"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"By drawing test, validation and training data from a common data structure (as described in Getting Started, for example) one ensures that all possible classes of categorical variables are tracked at all times. However, this does not mitigate problems with new production data, if categorical features there are missing classes or contain previously unseen classes.","category":"page"},{"location":"working_with_categorical_data/#New-or-missing-levels-in-production-data","page":"Working with Categorical Data","title":"New or missing levels in production data","text":"","category":"section"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"warning: Warning\nUnpredictable behavior may result whenever Finite categorical data presents in a production set with different classes (levels) from those presented during training","category":"page"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"Consider, for example, the following naive workflow:","category":"page"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"# train a one-hot encoder on some data:\nx = coerce([\"black\", \"white\", \"white\", \"black\"], Multiclass)\nX = DataFrame(x=x)\n\nmodel = OneHotEncoder()\nmach = machine(model, X) |> fit!\n\n# one-hot encode new data with missing classes:\nxproduction = coerce([\"white\", \"white\"], Multiclass)\nXproduction = DataFrame(x=xproduction)\nXproduction == X[2:3,:]","category":"page"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"So far, so good. But the following operation throws an error:","category":"page"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"julia> transform(mach, Xproduction) == transform(mach, X[2:3,:])\nERROR: Found category level mismatch in feature `x`. Consider using `levels!` to ensure fitted and transforming features have the same category levels.","category":"page"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"The problem here is that levels(X.x) and levels(Xproduction.x) are different:","category":"page"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"levels(X.x)","category":"page"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"levels(Xproduction.x)","category":"page"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"This could be anticipated by the fact that the training and production data have different schema:","category":"page"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"schema(X)","category":"page"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"schema(Xproduction)","category":"page"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"One fix is to manually correct the levels of the production data:","category":"page"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"levels!(Xproduction.x, levels(x))\ntransform(mach, Xproduction) == transform(mach, X[2:3,:])","category":"page"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"Another solution is to pack all production data with dummy rows based on the training data (subsequently dropped) to ensure there are no missing classes. Currently, MLJ contains no general tooling to check and fix categorical levels in production data (although one can check that training data and production data have the same schema, to ensure the number of classes in categorical data is consistent).","category":"page"},{"location":"working_with_categorical_data/#Extracting-an-integer-representation-of-Finite-data","page":"Working with Categorical Data","title":"Extracting an integer representation of Finite data","text":"","category":"section"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"Occasionally, you may really want an integer representation of data that currently has scitype Finite. For example, you are a developer wrapping an algorithm from an external package for use in MLJ, and that algorithm uses integer representations. Use the int method for this purpose, and use decoder to construct decoders for reversing the transformation:","category":"page"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"v = coerce([\"one\", \"two\", \"three\", \"one\"], OrderedFactor);\nlevels!(v, [\"one\", \"two\", \"three\"]);\nv_int = int(v)","category":"page"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"d = decoder(v); # or decoder(v[1])\nd.(v_int)","category":"page"},{"location":"working_with_categorical_data/#Under-the-hood:-CategoricalValue-and-CategoricalArray","page":"Working with Categorical Data","title":"Under the hood: CategoricalValue and CategoricalArray","text":"","category":"section"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"In MLJ the objects with OrderedFactor or Multiclass scientific type have machine type CategoricalValue, from the CategoricalArrays.jl package. In some sense CategoricalValues are an implementation detail users can ignore for the most part, as shown above. However, you may want some basic understanding of these types, and those implementing MLJ's model interface for new algorithms will have to understand them. For the complete API, see the CategoricalArrays.jl documentation. Here are the basics:","category":"page"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"To construct an OrderedFactor or Multiclass vector directly from raw labels, one uses categorical:","category":"page"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"using CategoricalArrays # hide\nv = categorical(['A', 'B', 'A', 'A', 'C'])\ntypeof(v)","category":"page"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"(Equivalent to the idiomatically MLJ v = coerce(['A', 'B', 'A', 'A', 'C']), Multiclass).)","category":"page"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"scitype(v)","category":"page"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"v = categorical(['A', 'B', 'A', 'A', 'C'], ordered=true, compress=true)","category":"page"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"scitype(v)","category":"page"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"When you index a CategoricalVector you don't get a raw label, but instead an instance of CategoricalValue. As explained above, this value knows the complete pool of levels from the vector from which it came. Use get(val) to extract the raw label from a value val.","category":"page"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"Despite the distinction that exists between a value (element) and a label, the two are the same, from the point of == and in:","category":"page"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"v[1] == 'A' # true\n'A' in v # true","category":"page"},{"location":"working_with_categorical_data/#Probabilistic-predictions-of-categorical-data","page":"Working with Categorical Data","title":"Probabilistic predictions of categorical data","text":"","category":"section"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"Recall from Getting Started that probabilistic classifiers ordinarily predict UnivariateFinite distributions, not raw probabilities (which are instead accessed using the pdf method.) Here's how to construct such a distribution yourself:","category":"page"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"v = coerce([\"yes\", \"no\", \"yes\", \"yes\", \"maybe\"], Multiclass)\nd = UnivariateFinite([v[2], v[1]], [0.9, 0.1])","category":"page"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"Or, equivalently,","category":"page"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"d = UnivariateFinite([\"no\", \"yes\"], [0.9, 0.1], pool=v)","category":"page"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"This distribution tracks all levels, not just the ones to which you have assigned probabilities:","category":"page"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"pdf(d, \"maybe\")","category":"page"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"However, pdf(d, \"dunno\") will throw an error.","category":"page"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"You can declare pool=missing, but then \"maybe\" will not be tracked:","category":"page"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"d = UnivariateFinite([\"no\", \"yes\"], [0.9, 0.1], pool=missing)\nlevels(d)","category":"page"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"To construct a whole vector of UnivariateFinite distributions, simply give the constructor a matrix of probabilities:","category":"page"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"yes_probs = rand(5)\nprobs = hcat(1 .- yes_probs, yes_probs)\nd_vec = UnivariateFinite([\"no\", \"yes\"], probs, pool=v)","category":"page"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"Or, equivalently:","category":"page"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"d_vec = UnivariateFinite([\"no\", \"yes\"], yes_probs, augment=true, pool=v)","category":"page"},{"location":"working_with_categorical_data/","page":"Working with Categorical Data","title":"Working with Categorical Data","text":"For more options, see UnivariateFinite.","category":"page"},{"location":"models/COPODDetector_OutlierDetectionPython/#COPODDetector_OutlierDetectionPython","page":"COPODDetector","title":"COPODDetector","text":"","category":"section"},{"location":"models/COPODDetector_OutlierDetectionPython/","page":"COPODDetector","title":"COPODDetector","text":"COPODDetector(n_jobs = 1)","category":"page"},{"location":"models/COPODDetector_OutlierDetectionPython/","page":"COPODDetector","title":"COPODDetector","text":"https://pyod.readthedocs.io/en/latest/pyod.models.html#module-pyod.models.copod","category":"page"},{"location":"models/MultitargetNeuralNetworkRegressor_BetaML/#MultitargetNeuralNetworkRegressor_BetaML","page":"MultitargetNeuralNetworkRegressor","title":"MultitargetNeuralNetworkRegressor","text":"","category":"section"},{"location":"models/MultitargetNeuralNetworkRegressor_BetaML/","page":"MultitargetNeuralNetworkRegressor","title":"MultitargetNeuralNetworkRegressor","text":"mutable struct MultitargetNeuralNetworkRegressor <: MLJModelInterface.Deterministic","category":"page"},{"location":"models/MultitargetNeuralNetworkRegressor_BetaML/","page":"MultitargetNeuralNetworkRegressor","title":"MultitargetNeuralNetworkRegressor","text":"A simple but flexible Feedforward Neural Network, from the Beta Machine Learning Toolkit (BetaML) for regression of multiple dimensional targets.","category":"page"},{"location":"models/MultitargetNeuralNetworkRegressor_BetaML/#Parameters:","page":"MultitargetNeuralNetworkRegressor","title":"Parameters:","text":"","category":"section"},{"location":"models/MultitargetNeuralNetworkRegressor_BetaML/","page":"MultitargetNeuralNetworkRegressor","title":"MultitargetNeuralNetworkRegressor","text":"layers: Array of layer objects [def: nothing, i.e. basic network]. See subtypes(BetaML.AbstractLayer) for supported layers\nloss: Loss (cost) function [def: BetaML.squared_cost]. Should always assume y and ŷ as matrices.\nwarning: Warning\nIf you change the parameter loss, you need to either provide its derivative on the parameter dloss or use autodiff with dloss=nothing.\ndloss: Derivative of the loss function [def: BetaML.dsquared_cost, i.e. use the derivative of the squared cost]. Use nothing for autodiff.\nepochs: Number of epochs, i.e. passages trough the whole training sample [def: 300]\nbatch_size: Size of each individual batch [def: 16]\nopt_alg: The optimisation algorithm to update the gradient at each batch [def: BetaML.ADAM()]. See subtypes(BetaML.OptimisationAlgorithm) for supported optimizers\nshuffle: Whether to randomly shuffle the data at each iteration (epoch) [def: true]\ndescr: An optional title and/or description for this model\ncb: A call back function to provide information during training [def: BetaML.fitting_info]\nrng: Random Number Generator (see FIXEDSEED) [deafult: Random.GLOBAL_RNG]","category":"page"},{"location":"models/MultitargetNeuralNetworkRegressor_BetaML/#Notes:","page":"MultitargetNeuralNetworkRegressor","title":"Notes:","text":"","category":"section"},{"location":"models/MultitargetNeuralNetworkRegressor_BetaML/","page":"MultitargetNeuralNetworkRegressor","title":"MultitargetNeuralNetworkRegressor","text":"data must be numerical\nthe label should be a n-records by n-dimensions matrix","category":"page"},{"location":"models/MultitargetNeuralNetworkRegressor_BetaML/#Example:","page":"MultitargetNeuralNetworkRegressor","title":"Example:","text":"","category":"section"},{"location":"models/MultitargetNeuralNetworkRegressor_BetaML/","page":"MultitargetNeuralNetworkRegressor","title":"MultitargetNeuralNetworkRegressor","text":"julia> using MLJ\n\njulia> X, y = @load_boston;\n\njulia> ydouble = hcat(y, y .*2 .+5);\n\njulia> modelType = @load MultitargetNeuralNetworkRegressor pkg = \"BetaML\" verbosity=0\nBetaML.Nn.MultitargetNeuralNetworkRegressor\n\njulia> layers = [BetaML.DenseLayer(12,50,f=BetaML.relu),BetaML.DenseLayer(50,50,f=BetaML.relu),BetaML.DenseLayer(50,50,f=BetaML.relu),BetaML.DenseLayer(50,2,f=BetaML.relu)];\n\njulia> model = modelType(layers=layers,opt_alg=BetaML.ADAM(),epochs=500)\nMultitargetNeuralNetworkRegressor(\n layers = BetaML.Nn.AbstractLayer[BetaML.Nn.DenseLayer([-0.2591582523441157 -0.027962845131416225 … 0.16044535560124418 -0.12838827994676857; -0.30381834909561184 0.2405495243851402 … -0.2588144861880588 0.09538577909777807; … ; -0.017320292924711156 -0.14042266424603767 … 0.06366999105841187 -0.13419651752478906; 0.07393079961409338 0.24521350531110264 … 0.04256867886217541 -0.0895506802948175], [0.14249427336553644, 0.24719379413682485, -0.25595911822556566, 0.10034088778965933, -0.017086404878505712, 0.21932184025609347, -0.031413516834861266, -0.12569076082247596, -0.18080140982481183, 0.14551901873323253 … -0.13321995621967364, 0.2436582233332092, 0.0552222336976439, 0.07000814133633904, 0.2280064379660025, -0.28885681475734193, -0.07414214246290696, -0.06783184733650621, -0.055318068046308455, -0.2573488383282579], BetaML.Utils.relu, BetaML.Utils.drelu), BetaML.Nn.DenseLayer([-0.0395424111703751 -0.22531232360829911 … -0.04341228943744482 0.024336206858365517; -0.16481887432946268 0.17798073384748508 … -0.18594039305095766 0.051159225856547474; … ; -0.011639475293705043 -0.02347011206244673 … 0.20508869536159186 -0.1158382446274592; -0.19078069527757857 -0.007487540070740484 … -0.21341165344291158 -0.24158671316310726], [-0.04283623889330032, 0.14924461547060602, -0.17039563392959683, 0.00907774027816255, 0.21738885963113852, -0.06308040225941691, -0.14683286822101105, 0.21726892197970937, 0.19784321784707126, -0.0344988665714947 … -0.23643089430602846, -0.013560425201427584, 0.05323948910726356, -0.04644175812567475, -0.2350400292671211, 0.09628312383424742, 0.07016420995205697, -0.23266392927140334, -0.18823664451487, 0.2304486691429084], BetaML.Utils.relu, BetaML.Utils.drelu), BetaML.Nn.DenseLayer([-0.11504184627266828 0.08601794194664503 … 0.03843129724045469 -0.18417305624127284; 0.10181551438831654 0.13459759904443674 … 0.11094951365942118 -0.1549466590355218; … ; 0.15279817525427697 0.0846661196058916 … -0.07993619892911122 0.07145402617285884; -0.1614160186346092 -0.13032002335149 … -0.12310552194729624 -0.15915773071049827], [-0.03435885900946367, -0.1198543931290306, 0.008454985905194445, -0.17980887188986966, -0.03557204910359624, 0.19125847393334877, -0.10949700778538696, -0.09343206702591, -0.12229583511781811, -0.09123969069220564 … 0.22119233518322862, 0.2053873143308657, 0.12756489387198222, 0.11567243705173319, -0.20982445664020496, 0.1595157838386987, -0.02087331046544119, -0.20556423263489765, -0.1622837764237961, -0.019220998739847395], BetaML.Utils.relu, BetaML.Utils.drelu), BetaML.Nn.DenseLayer([-0.25796717031347993 0.17579536633402948 … -0.09992960168785256 -0.09426177454620635; -0.026436330246675632 0.18070899284865127 … -0.19310119102392206 -0.06904005900252091], [0.16133004882307822, -0.3061228721091248], BetaML.Utils.relu, BetaML.Utils.drelu)], \n loss = BetaML.Utils.squared_cost, \n dloss = BetaML.Utils.dsquared_cost, \n epochs = 500, \n batch_size = 32, \n opt_alg = BetaML.Nn.ADAM(BetaML.Nn.var\"#90#93\"(), 1.0, 0.9, 0.999, 1.0e-8, BetaML.Nn.Learnable[], BetaML.Nn.Learnable[]), \n shuffle = true, \n descr = \"\", \n cb = BetaML.Nn.fitting_info, \n rng = Random._GLOBAL_RNG())\n\njulia> mach = machine(model, X, ydouble);\n\njulia> fit!(mach);\n\njulia> ŷdouble = predict(mach, X);\n\njulia> hcat(ydouble,ŷdouble)\n506×4 Matrix{Float64}:\n 24.0 53.0 28.4624 62.8607\n 21.6 48.2 22.665 49.7401\n 34.7 74.4 31.5602 67.9433\n 33.4 71.8 33.0869 72.4337\n ⋮ \n 23.9 52.8 23.3573 50.654\n 22.0 49.0 22.1141 48.5926\n 11.9 28.8 19.9639 45.5823","category":"page"},{"location":"models/MultinomialNBClassifier_MLJScikitLearnInterface/#MultinomialNBClassifier_MLJScikitLearnInterface","page":"MultinomialNBClassifier","title":"MultinomialNBClassifier","text":"","category":"section"},{"location":"models/MultinomialNBClassifier_MLJScikitLearnInterface/","page":"MultinomialNBClassifier","title":"MultinomialNBClassifier","text":"MultinomialNBClassifier","category":"page"},{"location":"models/MultinomialNBClassifier_MLJScikitLearnInterface/","page":"MultinomialNBClassifier","title":"MultinomialNBClassifier","text":"A model type for constructing a multinomial naive Bayes classifier, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/MultinomialNBClassifier_MLJScikitLearnInterface/","page":"MultinomialNBClassifier","title":"MultinomialNBClassifier","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/MultinomialNBClassifier_MLJScikitLearnInterface/","page":"MultinomialNBClassifier","title":"MultinomialNBClassifier","text":"MultinomialNBClassifier = @load MultinomialNBClassifier pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/MultinomialNBClassifier_MLJScikitLearnInterface/","page":"MultinomialNBClassifier","title":"MultinomialNBClassifier","text":"Do model = MultinomialNBClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in MultinomialNBClassifier(alpha=...).","category":"page"},{"location":"models/MultinomialNBClassifier_MLJScikitLearnInterface/","page":"MultinomialNBClassifier","title":"MultinomialNBClassifier","text":"Multinomial naive bayes classifier. It is suitable for classification with discrete features (e.g. word counts for text classification).","category":"page"},{"location":"models/LarsRegressor_MLJScikitLearnInterface/#LarsRegressor_MLJScikitLearnInterface","page":"LarsRegressor","title":"LarsRegressor","text":"","category":"section"},{"location":"models/LarsRegressor_MLJScikitLearnInterface/","page":"LarsRegressor","title":"LarsRegressor","text":"LarsRegressor","category":"page"},{"location":"models/LarsRegressor_MLJScikitLearnInterface/","page":"LarsRegressor","title":"LarsRegressor","text":"A model type for constructing a least angle regressor (LARS), based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/LarsRegressor_MLJScikitLearnInterface/","page":"LarsRegressor","title":"LarsRegressor","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/LarsRegressor_MLJScikitLearnInterface/","page":"LarsRegressor","title":"LarsRegressor","text":"LarsRegressor = @load LarsRegressor pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/LarsRegressor_MLJScikitLearnInterface/","page":"LarsRegressor","title":"LarsRegressor","text":"Do model = LarsRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in LarsRegressor(fit_intercept=...).","category":"page"},{"location":"models/LarsRegressor_MLJScikitLearnInterface/#Hyper-parameters","page":"LarsRegressor","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/LarsRegressor_MLJScikitLearnInterface/","page":"LarsRegressor","title":"LarsRegressor","text":"fit_intercept = true\nverbose = false\nnormalize = false\nprecompute = auto\nn_nonzero_coefs = 500\neps = 2.220446049250313e-16\ncopy_X = true\nfit_path = true","category":"page"},{"location":"models/LOFDetector_OutlierDetectionNeighbors/#LOFDetector_OutlierDetectionNeighbors","page":"LOFDetector","title":"LOFDetector","text":"","category":"section"},{"location":"models/LOFDetector_OutlierDetectionNeighbors/","page":"LOFDetector","title":"LOFDetector","text":"LOFDetector(k = 5,\n metric = Euclidean(),\n algorithm = :kdtree,\n leafsize = 10,\n reorder = true,\n parallel = false)","category":"page"},{"location":"models/LOFDetector_OutlierDetectionNeighbors/","page":"LOFDetector","title":"LOFDetector","text":"Calculate an anomaly score based on the density of an instance in comparison to its neighbors. This algorithm introduced the notion of local outliers and was developed by Breunig et al., see [1].","category":"page"},{"location":"models/LOFDetector_OutlierDetectionNeighbors/#Parameters","page":"LOFDetector","title":"Parameters","text":"","category":"section"},{"location":"models/LOFDetector_OutlierDetectionNeighbors/","page":"LOFDetector","title":"LOFDetector","text":"k::Integer","category":"page"},{"location":"models/LOFDetector_OutlierDetectionNeighbors/","page":"LOFDetector","title":"LOFDetector","text":"Number of neighbors (must be greater than 0).","category":"page"},{"location":"models/LOFDetector_OutlierDetectionNeighbors/","page":"LOFDetector","title":"LOFDetector","text":"metric::Metric","category":"page"},{"location":"models/LOFDetector_OutlierDetectionNeighbors/","page":"LOFDetector","title":"LOFDetector","text":"This is one of the Metric types defined in the Distances.jl package. It is possible to define your own metrics by creating new types that are subtypes of Metric.","category":"page"},{"location":"models/LOFDetector_OutlierDetectionNeighbors/","page":"LOFDetector","title":"LOFDetector","text":"algorithm::Symbol","category":"page"},{"location":"models/LOFDetector_OutlierDetectionNeighbors/","page":"LOFDetector","title":"LOFDetector","text":"One of (:kdtree, :balltree). In a kdtree, points are recursively split into groups using hyper-planes. Therefore a KDTree only works with axis aligned metrics which are: Euclidean, Chebyshev, Minkowski and Cityblock. A brutetree linearly searches all points in a brute force fashion and works with any Metric. A balltree recursively splits points into groups bounded by hyper-spheres and works with any Metric.","category":"page"},{"location":"models/LOFDetector_OutlierDetectionNeighbors/","page":"LOFDetector","title":"LOFDetector","text":"static::Union{Bool, Symbol}","category":"page"},{"location":"models/LOFDetector_OutlierDetectionNeighbors/","page":"LOFDetector","title":"LOFDetector","text":"One of (true, false, :auto). Whether the input data for fitting and transform should be statically or dynamically allocated. If true, the data is statically allocated. If false, the data is dynamically allocated. If :auto, the data is dynamically allocated if the product of all dimensions except the last is greater than 100.","category":"page"},{"location":"models/LOFDetector_OutlierDetectionNeighbors/","page":"LOFDetector","title":"LOFDetector","text":"leafsize::Int","category":"page"},{"location":"models/LOFDetector_OutlierDetectionNeighbors/","page":"LOFDetector","title":"LOFDetector","text":"Determines at what number of points to stop splitting the tree further. There is a trade-off between traversing the tree and having to evaluate the metric function for increasing number of points.","category":"page"},{"location":"models/LOFDetector_OutlierDetectionNeighbors/","page":"LOFDetector","title":"LOFDetector","text":"reorder::Bool","category":"page"},{"location":"models/LOFDetector_OutlierDetectionNeighbors/","page":"LOFDetector","title":"LOFDetector","text":"While building the tree this will put points close in distance close in memory since this helps with cache locality. In this case, a copy of the original data will be made so that the original data is left unmodified. This can have a significant impact on performance and is by default set to true.","category":"page"},{"location":"models/LOFDetector_OutlierDetectionNeighbors/","page":"LOFDetector","title":"LOFDetector","text":"parallel::Bool","category":"page"},{"location":"models/LOFDetector_OutlierDetectionNeighbors/","page":"LOFDetector","title":"LOFDetector","text":"Parallelize score and predict using all threads available. The number of threads can be set with the JULIA_NUM_THREADS environment variable. Note: fit is not parallel.","category":"page"},{"location":"models/LOFDetector_OutlierDetectionNeighbors/#Examples","page":"LOFDetector","title":"Examples","text":"","category":"section"},{"location":"models/LOFDetector_OutlierDetectionNeighbors/","page":"LOFDetector","title":"LOFDetector","text":"using OutlierDetection: LOFDetector, fit, transform\ndetector = LOFDetector()\nX = rand(10, 100)\nmodel, result = fit(detector, X; verbosity=0)\ntest_scores = transform(detector, model, X)","category":"page"},{"location":"models/LOFDetector_OutlierDetectionNeighbors/#References","page":"LOFDetector","title":"References","text":"","category":"section"},{"location":"models/LOFDetector_OutlierDetectionNeighbors/","page":"LOFDetector","title":"LOFDetector","text":"[1] Breunig, Markus M.; Kriegel, Hans-Peter; Ng, Raymond T.; Sander, Jörg (2000): LOF: Identifying Density-Based Local Outliers.","category":"page"},{"location":"models/AdaBoostClassifier_MLJScikitLearnInterface/#AdaBoostClassifier_MLJScikitLearnInterface","page":"AdaBoostClassifier","title":"AdaBoostClassifier","text":"","category":"section"},{"location":"models/AdaBoostClassifier_MLJScikitLearnInterface/","page":"AdaBoostClassifier","title":"AdaBoostClassifier","text":"AdaBoostClassifier","category":"page"},{"location":"models/AdaBoostClassifier_MLJScikitLearnInterface/","page":"AdaBoostClassifier","title":"AdaBoostClassifier","text":"A model type for constructing a ada boost classifier, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/AdaBoostClassifier_MLJScikitLearnInterface/","page":"AdaBoostClassifier","title":"AdaBoostClassifier","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/AdaBoostClassifier_MLJScikitLearnInterface/","page":"AdaBoostClassifier","title":"AdaBoostClassifier","text":"AdaBoostClassifier = @load AdaBoostClassifier pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/AdaBoostClassifier_MLJScikitLearnInterface/","page":"AdaBoostClassifier","title":"AdaBoostClassifier","text":"Do model = AdaBoostClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in AdaBoostClassifier(estimator=...).","category":"page"},{"location":"models/AdaBoostClassifier_MLJScikitLearnInterface/","page":"AdaBoostClassifier","title":"AdaBoostClassifier","text":"An AdaBoost classifier is a meta-estimator that begins by fitting a classifier on the original dataset and then fits additional copies of the classifier on the same dataset but where the weights of incorrectly classified instances are adjusted such that subsequent classifiers focus more on difficult cases.","category":"page"},{"location":"models/AdaBoostClassifier_MLJScikitLearnInterface/","page":"AdaBoostClassifier","title":"AdaBoostClassifier","text":"This class implements the algorithm known as AdaBoost-SAMME.","category":"page"},{"location":"models/SVMLinearClassifier_MLJScikitLearnInterface/#SVMLinearClassifier_MLJScikitLearnInterface","page":"SVMLinearClassifier","title":"SVMLinearClassifier","text":"","category":"section"},{"location":"models/SVMLinearClassifier_MLJScikitLearnInterface/","page":"SVMLinearClassifier","title":"SVMLinearClassifier","text":"SVMLinearClassifier","category":"page"},{"location":"models/SVMLinearClassifier_MLJScikitLearnInterface/","page":"SVMLinearClassifier","title":"SVMLinearClassifier","text":"A model type for constructing a linear support vector classifier, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/SVMLinearClassifier_MLJScikitLearnInterface/","page":"SVMLinearClassifier","title":"SVMLinearClassifier","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/SVMLinearClassifier_MLJScikitLearnInterface/","page":"SVMLinearClassifier","title":"SVMLinearClassifier","text":"SVMLinearClassifier = @load SVMLinearClassifier pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/SVMLinearClassifier_MLJScikitLearnInterface/","page":"SVMLinearClassifier","title":"SVMLinearClassifier","text":"Do model = SVMLinearClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in SVMLinearClassifier(penalty=...).","category":"page"},{"location":"models/SVMLinearClassifier_MLJScikitLearnInterface/#Hyper-parameters","page":"SVMLinearClassifier","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/SVMLinearClassifier_MLJScikitLearnInterface/","page":"SVMLinearClassifier","title":"SVMLinearClassifier","text":"penalty = l2\nloss = squared_hinge\ndual = true\ntol = 0.0001\nC = 1.0\nmulti_class = ovr\nfit_intercept = true\nintercept_scaling = 1.0\nrandom_state = nothing\nmax_iter = 1000","category":"page"},{"location":"models/StableForestClassifier_SIRUS/#StableForestClassifier_SIRUS","page":"StableForestClassifier","title":"StableForestClassifier","text":"","category":"section"},{"location":"models/StableForestClassifier_SIRUS/","page":"StableForestClassifier","title":"StableForestClassifier","text":"StableForestClassifier","category":"page"},{"location":"models/StableForestClassifier_SIRUS/","page":"StableForestClassifier","title":"StableForestClassifier","text":"A model type for constructing a stable forest classifier, based on SIRUS.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/StableForestClassifier_SIRUS/","page":"StableForestClassifier","title":"StableForestClassifier","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/StableForestClassifier_SIRUS/","page":"StableForestClassifier","title":"StableForestClassifier","text":"StableForestClassifier = @load StableForestClassifier pkg=SIRUS","category":"page"},{"location":"models/StableForestClassifier_SIRUS/","page":"StableForestClassifier","title":"StableForestClassifier","text":"Do model = StableForestClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in StableForestClassifier(rng=...).","category":"page"},{"location":"models/StableForestClassifier_SIRUS/","page":"StableForestClassifier","title":"StableForestClassifier","text":"StableForestClassifier implements the random forest classifier with a stabilized forest structure (Bénard et al., 2021). This stabilization increases stability when extracting rules. The impact on the predictive accuracy compared to standard random forests should be relatively small.","category":"page"},{"location":"models/StableForestClassifier_SIRUS/","page":"StableForestClassifier","title":"StableForestClassifier","text":"note: Note\nJust like normal random forests, this model is not easily explainable. If you are interested in an explainable model, use the StableRulesClassifier or StableRulesRegressor.","category":"page"},{"location":"models/StableForestClassifier_SIRUS/#Training-data","page":"StableForestClassifier","title":"Training data","text":"","category":"section"},{"location":"models/StableForestClassifier_SIRUS/","page":"StableForestClassifier","title":"StableForestClassifier","text":"In MLJ or MLJBase, bind an instance model to data with","category":"page"},{"location":"models/StableForestClassifier_SIRUS/","page":"StableForestClassifier","title":"StableForestClassifier","text":"mach = machine(model, X, y)","category":"page"},{"location":"models/StableForestClassifier_SIRUS/","page":"StableForestClassifier","title":"StableForestClassifier","text":"where","category":"page"},{"location":"models/StableForestClassifier_SIRUS/","page":"StableForestClassifier","title":"StableForestClassifier","text":"X: any table of input features (eg, a DataFrame) whose columns each have one of the following element scitypes: Continuous, Count, or <:OrderedFactor; check column scitypes with schema(X)\ny: the target, which can be any AbstractVector whose element scitype is <:OrderedFactor or <:Multiclass; check the scitype with scitype(y)","category":"page"},{"location":"models/StableForestClassifier_SIRUS/","page":"StableForestClassifier","title":"StableForestClassifier","text":"Train the machine with fit!(mach, rows=...).","category":"page"},{"location":"models/StableForestClassifier_SIRUS/#Hyperparameters","page":"StableForestClassifier","title":"Hyperparameters","text":"","category":"section"},{"location":"models/StableForestClassifier_SIRUS/","page":"StableForestClassifier","title":"StableForestClassifier","text":"rng::AbstractRNG=default_rng(): Random number generator. Using a StableRNG from StableRNGs.jl is advised.\npartial_sampling::Float64=0.7: Ratio of samples to use in each subset of the data. The default should be fine for most cases.\nn_trees::Int=1000: The number of trees to use. It is advisable to use at least thousand trees to for a better rule selection, and in turn better predictive performance.\nmax_depth::Int=2: The depth of the tree. A lower depth decreases model complexity and can therefore improve accuracy when the sample size is small (reduce overfitting).\nq::Int=10: Number of cutpoints to use per feature. The default value should be fine for most situations.\nmin_data_in_leaf::Int=5: Minimum number of data points per leaf.","category":"page"},{"location":"models/StableForestClassifier_SIRUS/#Fitted-parameters","page":"StableForestClassifier","title":"Fitted parameters","text":"","category":"section"},{"location":"models/StableForestClassifier_SIRUS/","page":"StableForestClassifier","title":"StableForestClassifier","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/StableForestClassifier_SIRUS/","page":"StableForestClassifier","title":"StableForestClassifier","text":"fitresult: A StableForest object.","category":"page"},{"location":"models/StableForestClassifier_SIRUS/#Operations","page":"StableForestClassifier","title":"Operations","text":"","category":"section"},{"location":"models/StableForestClassifier_SIRUS/","page":"StableForestClassifier","title":"StableForestClassifier","text":"predict(mach, Xnew): Return a vector of predictions for each row of Xnew.","category":"page"},{"location":"models/RidgeCVRegressor_MLJScikitLearnInterface/#RidgeCVRegressor_MLJScikitLearnInterface","page":"RidgeCVRegressor","title":"RidgeCVRegressor","text":"","category":"section"},{"location":"models/RidgeCVRegressor_MLJScikitLearnInterface/","page":"RidgeCVRegressor","title":"RidgeCVRegressor","text":"RidgeCVRegressor","category":"page"},{"location":"models/RidgeCVRegressor_MLJScikitLearnInterface/","page":"RidgeCVRegressor","title":"RidgeCVRegressor","text":"A model type for constructing a ridge regressor with built-in cross-validation, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/RidgeCVRegressor_MLJScikitLearnInterface/","page":"RidgeCVRegressor","title":"RidgeCVRegressor","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/RidgeCVRegressor_MLJScikitLearnInterface/","page":"RidgeCVRegressor","title":"RidgeCVRegressor","text":"RidgeCVRegressor = @load RidgeCVRegressor pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/RidgeCVRegressor_MLJScikitLearnInterface/","page":"RidgeCVRegressor","title":"RidgeCVRegressor","text":"Do model = RidgeCVRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in RidgeCVRegressor(alphas=...).","category":"page"},{"location":"models/RidgeCVRegressor_MLJScikitLearnInterface/#Hyper-parameters","page":"RidgeCVRegressor","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/RidgeCVRegressor_MLJScikitLearnInterface/","page":"RidgeCVRegressor","title":"RidgeCVRegressor","text":"alphas = (0.1, 1.0, 10.0)\nfit_intercept = true\nscoring = nothing\ncv = 5\ngcv_mode = nothing\nstore_cv_values = false","category":"page"},{"location":"models/CountTransformer_MLJText/#CountTransformer_MLJText","page":"CountTransformer","title":"CountTransformer","text":"","category":"section"},{"location":"models/CountTransformer_MLJText/","page":"CountTransformer","title":"CountTransformer","text":"CountTransformer","category":"page"},{"location":"models/CountTransformer_MLJText/","page":"CountTransformer","title":"CountTransformer","text":"A model type for constructing a count transformer, based on MLJText.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/CountTransformer_MLJText/","page":"CountTransformer","title":"CountTransformer","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/CountTransformer_MLJText/","page":"CountTransformer","title":"CountTransformer","text":"CountTransformer = @load CountTransformer pkg=MLJText","category":"page"},{"location":"models/CountTransformer_MLJText/","page":"CountTransformer","title":"CountTransformer","text":"Do model = CountTransformer() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in CountTransformer(max_doc_freq=...).","category":"page"},{"location":"models/CountTransformer_MLJText/","page":"CountTransformer","title":"CountTransformer","text":"The transformer converts a collection of documents, tokenized or pre-parsed as bags of words/ngrams, to a matrix of term counts.","category":"page"},{"location":"models/CountTransformer_MLJText/#Training-data","page":"CountTransformer","title":"Training data","text":"","category":"section"},{"location":"models/CountTransformer_MLJText/","page":"CountTransformer","title":"CountTransformer","text":"In MLJ or MLJBase, bind an instance model to data with","category":"page"},{"location":"models/CountTransformer_MLJText/","page":"CountTransformer","title":"CountTransformer","text":"mach = machine(model, X)","category":"page"},{"location":"models/CountTransformer_MLJText/","page":"CountTransformer","title":"CountTransformer","text":"Here:","category":"page"},{"location":"models/CountTransformer_MLJText/","page":"CountTransformer","title":"CountTransformer","text":"X is any vector whose elements are either tokenized documents or bags of words/ngrams. Specifically, each element is one of the following:\nA vector of abstract strings (tokens), e.g., [\"I\", \"like\", \"Sam\", \".\", \"Sam\", \"is\", \"nice\", \".\"] (scitype AbstractVector{Textual})\nA dictionary of counts, indexed on abstract strings, e.g., Dict(\"I\"=>1, \"Sam\"=>2, \"Sam is\"=>1) (scitype Multiset{Textual}})\nA dictionary of counts, indexed on plain ngrams, e.g., Dict((\"I\",)=>1, (\"Sam\",)=>2, (\"I\", \"Sam\")=>1) (scitype Multiset{<:NTuple{N,Textual} where N}); here a plain ngram is a tuple of abstract strings.","category":"page"},{"location":"models/CountTransformer_MLJText/","page":"CountTransformer","title":"CountTransformer","text":"Train the machine using fit!(mach, rows=...).","category":"page"},{"location":"models/CountTransformer_MLJText/#Hyper-parameters","page":"CountTransformer","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/CountTransformer_MLJText/","page":"CountTransformer","title":"CountTransformer","text":"max_doc_freq=1.0: Restricts the vocabulary that the transformer will consider. Terms that occur in > max_doc_freq documents will not be considered by the transformer. For example, if max_doc_freq is set to 0.9, terms that are in more than 90% of the documents will be removed.\nmin_doc_freq=0.0: Restricts the vocabulary that the transformer will consider. Terms that occur in < max_doc_freq documents will not be considered by the transformer. A value of 0.01 means that only terms that are at least in 1% of the documents will be included.","category":"page"},{"location":"models/CountTransformer_MLJText/#Operations","page":"CountTransformer","title":"Operations","text":"","category":"section"},{"location":"models/CountTransformer_MLJText/","page":"CountTransformer","title":"CountTransformer","text":"transform(mach, Xnew): Based on the vocabulary learned in training, return the matrix of counts for Xnew, a vector of the same form as X above. The matrix has size (n, p), where n = length(Xnew) and p the size of the vocabulary. Tokens/ngrams not appearing in the learned vocabulary are scored zero.","category":"page"},{"location":"models/CountTransformer_MLJText/#Fitted-parameters","page":"CountTransformer","title":"Fitted parameters","text":"","category":"section"},{"location":"models/CountTransformer_MLJText/","page":"CountTransformer","title":"CountTransformer","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/CountTransformer_MLJText/","page":"CountTransformer","title":"CountTransformer","text":"vocab: A vector containing the string used in the transformer's vocabulary.","category":"page"},{"location":"models/CountTransformer_MLJText/#Examples","page":"CountTransformer","title":"Examples","text":"","category":"section"},{"location":"models/CountTransformer_MLJText/","page":"CountTransformer","title":"CountTransformer","text":"CountTransformer accepts a variety of inputs. The example below transforms tokenized documents:","category":"page"},{"location":"models/CountTransformer_MLJText/","page":"CountTransformer","title":"CountTransformer","text":"using MLJ\nimport TextAnalysis\n\nCountTransformer = @load CountTransformer pkg=MLJText\n\ndocs = [\"Hi my name is Sam.\", \"How are you today?\"]\ncount_transformer = CountTransformer()\n\njulia> tokenized_docs = TextAnalysis.tokenize.(docs)\n2-element Vector{Vector{String}}:\n [\"Hi\", \"my\", \"name\", \"is\", \"Sam\", \".\"]\n [\"How\", \"are\", \"you\", \"today\", \"?\"]\n\nmach = machine(count_transformer, tokenized_docs)\nfit!(mach)\n\nfitted_params(mach)\n\ntfidf_mat = transform(mach, tokenized_docs)","category":"page"},{"location":"models/CountTransformer_MLJText/","page":"CountTransformer","title":"CountTransformer","text":"Alternatively, one can provide documents pre-parsed as ngrams counts:","category":"page"},{"location":"models/CountTransformer_MLJText/","page":"CountTransformer","title":"CountTransformer","text":"using MLJ\nimport TextAnalysis\n\ndocs = [\"Hi my name is Sam.\", \"How are you today?\"]\ncorpus = TextAnalysis.Corpus(TextAnalysis.NGramDocument.(docs, 1, 2))\nngram_docs = TextAnalysis.ngrams.(corpus)\n\njulia> ngram_docs[1]\nDict{AbstractString, Int64} with 11 entries:\n \"is\" => 1\n \"my\" => 1\n \"name\" => 1\n \".\" => 1\n \"Hi\" => 1\n \"Sam\" => 1\n \"my name\" => 1\n \"Hi my\" => 1\n \"name is\" => 1\n \"Sam .\" => 1\n \"is Sam\" => 1\n\ncount_transformer = CountTransformer()\nmach = machine(count_transformer, ngram_docs)\nMLJ.fit!(mach)\nfitted_params(mach)\n\ntfidf_mat = transform(mach, ngram_docs)","category":"page"},{"location":"models/CountTransformer_MLJText/","page":"CountTransformer","title":"CountTransformer","text":"See also TfidfTransformer, BM25Transformer","category":"page"},{"location":"models/RandomForestRegressor_DecisionTree/#RandomForestRegressor_DecisionTree","page":"RandomForestRegressor","title":"RandomForestRegressor","text":"","category":"section"},{"location":"models/RandomForestRegressor_DecisionTree/","page":"RandomForestRegressor","title":"RandomForestRegressor","text":"RandomForestRegressor","category":"page"},{"location":"models/RandomForestRegressor_DecisionTree/","page":"RandomForestRegressor","title":"RandomForestRegressor","text":"A model type for constructing a CART random forest regressor, based on DecisionTree.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/RandomForestRegressor_DecisionTree/","page":"RandomForestRegressor","title":"RandomForestRegressor","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/RandomForestRegressor_DecisionTree/","page":"RandomForestRegressor","title":"RandomForestRegressor","text":"RandomForestRegressor = @load RandomForestRegressor pkg=DecisionTree","category":"page"},{"location":"models/RandomForestRegressor_DecisionTree/","page":"RandomForestRegressor","title":"RandomForestRegressor","text":"Do model = RandomForestRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in RandomForestRegressor(max_depth=...).","category":"page"},{"location":"models/RandomForestRegressor_DecisionTree/","page":"RandomForestRegressor","title":"RandomForestRegressor","text":"DecisionTreeRegressor implements the standard Random Forest algorithm, originally published in Breiman, L. (2001): \"Random Forests.\", Machine Learning, vol. 45, pp. 5–32","category":"page"},{"location":"models/RandomForestRegressor_DecisionTree/#Training-data","page":"RandomForestRegressor","title":"Training data","text":"","category":"section"},{"location":"models/RandomForestRegressor_DecisionTree/","page":"RandomForestRegressor","title":"RandomForestRegressor","text":"In MLJ or MLJBase, bind an instance model to data with","category":"page"},{"location":"models/RandomForestRegressor_DecisionTree/","page":"RandomForestRegressor","title":"RandomForestRegressor","text":"mach = machine(model, X, y)","category":"page"},{"location":"models/RandomForestRegressor_DecisionTree/","page":"RandomForestRegressor","title":"RandomForestRegressor","text":"where","category":"page"},{"location":"models/RandomForestRegressor_DecisionTree/","page":"RandomForestRegressor","title":"RandomForestRegressor","text":"X: any table of input features (eg, a DataFrame) whose columns each have one of the following element scitypes: Continuous, Count, or <:OrderedFactor; check column scitypes with schema(X)\ny: the target, which can be any AbstractVector whose element scitype is Continuous; check the scitype with scitype(y)","category":"page"},{"location":"models/RandomForestRegressor_DecisionTree/","page":"RandomForestRegressor","title":"RandomForestRegressor","text":"Train the machine with fit!(mach, rows=...).","category":"page"},{"location":"models/RandomForestRegressor_DecisionTree/#Hyperparameters","page":"RandomForestRegressor","title":"Hyperparameters","text":"","category":"section"},{"location":"models/RandomForestRegressor_DecisionTree/","page":"RandomForestRegressor","title":"RandomForestRegressor","text":"max_depth=-1: max depth of the decision tree (-1=any)\nmin_samples_leaf=1: min number of samples each leaf needs to have\nmin_samples_split=2: min number of samples needed for a split\nmin_purity_increase=0: min purity needed for a split\nn_subfeatures=-1: number of features to select at random (0 for all, -1 for square root of number of features)\nn_trees=10: number of trees to train\nsampling_fraction=0.7 fraction of samples to train each tree on\nfeature_importance: method to use for computing feature importances. One of (:impurity, :split)\nrng=Random.GLOBAL_RNG: random number generator or seed","category":"page"},{"location":"models/RandomForestRegressor_DecisionTree/#Operations","page":"RandomForestRegressor","title":"Operations","text":"","category":"section"},{"location":"models/RandomForestRegressor_DecisionTree/","page":"RandomForestRegressor","title":"RandomForestRegressor","text":"predict(mach, Xnew): return predictions of the target given new features Xnew having the same scitype as X above.","category":"page"},{"location":"models/RandomForestRegressor_DecisionTree/#Fitted-parameters","page":"RandomForestRegressor","title":"Fitted parameters","text":"","category":"section"},{"location":"models/RandomForestRegressor_DecisionTree/","page":"RandomForestRegressor","title":"RandomForestRegressor","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/RandomForestRegressor_DecisionTree/","page":"RandomForestRegressor","title":"RandomForestRegressor","text":"forest: the Ensemble object returned by the core DecisionTree.jl algorithm","category":"page"},{"location":"models/RandomForestRegressor_DecisionTree/#Report","page":"RandomForestRegressor","title":"Report","text":"","category":"section"},{"location":"models/RandomForestRegressor_DecisionTree/","page":"RandomForestRegressor","title":"RandomForestRegressor","text":"features: the names of the features encountered in training","category":"page"},{"location":"models/RandomForestRegressor_DecisionTree/#Accessor-functions","page":"RandomForestRegressor","title":"Accessor functions","text":"","category":"section"},{"location":"models/RandomForestRegressor_DecisionTree/","page":"RandomForestRegressor","title":"RandomForestRegressor","text":"feature_importances(mach) returns a vector of (feature::Symbol => importance) pairs; the type of importance is determined by the hyperparameter feature_importance (see above)","category":"page"},{"location":"models/RandomForestRegressor_DecisionTree/#Examples","page":"RandomForestRegressor","title":"Examples","text":"","category":"section"},{"location":"models/RandomForestRegressor_DecisionTree/","page":"RandomForestRegressor","title":"RandomForestRegressor","text":"using MLJ\nForest = @load RandomForestRegressor pkg=DecisionTree\nforest = Forest(max_depth=4, min_samples_split=3)\n\nX, y = make_regression(100, 2) ## synthetic data\nmach = machine(forest, X, y) |> fit!\n\nXnew, _ = make_regression(3, 2)\nyhat = predict(mach, Xnew) ## new predictions\n\nfitted_params(mach).forest ## raw `Ensemble` object from DecisionTree.jl\nfeature_importances(mach)","category":"page"},{"location":"models/RandomForestRegressor_DecisionTree/","page":"RandomForestRegressor","title":"RandomForestRegressor","text":"See also DecisionTree.jl and the unwrapped model type MLJDecisionTreeInterface.DecisionTree.RandomForestRegressor.","category":"page"},{"location":"models/MultiTaskElasticNetRegressor_MLJScikitLearnInterface/#MultiTaskElasticNetRegressor_MLJScikitLearnInterface","page":"MultiTaskElasticNetRegressor","title":"MultiTaskElasticNetRegressor","text":"","category":"section"},{"location":"models/MultiTaskElasticNetRegressor_MLJScikitLearnInterface/","page":"MultiTaskElasticNetRegressor","title":"MultiTaskElasticNetRegressor","text":"MultiTaskElasticNetRegressor","category":"page"},{"location":"models/MultiTaskElasticNetRegressor_MLJScikitLearnInterface/","page":"MultiTaskElasticNetRegressor","title":"MultiTaskElasticNetRegressor","text":"A model type for constructing a multi-target elastic net regressor, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/MultiTaskElasticNetRegressor_MLJScikitLearnInterface/","page":"MultiTaskElasticNetRegressor","title":"MultiTaskElasticNetRegressor","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/MultiTaskElasticNetRegressor_MLJScikitLearnInterface/","page":"MultiTaskElasticNetRegressor","title":"MultiTaskElasticNetRegressor","text":"MultiTaskElasticNetRegressor = @load MultiTaskElasticNetRegressor pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/MultiTaskElasticNetRegressor_MLJScikitLearnInterface/","page":"MultiTaskElasticNetRegressor","title":"MultiTaskElasticNetRegressor","text":"Do model = MultiTaskElasticNetRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in MultiTaskElasticNetRegressor(alpha=...).","category":"page"},{"location":"models/MultiTaskElasticNetRegressor_MLJScikitLearnInterface/#Hyper-parameters","page":"MultiTaskElasticNetRegressor","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/MultiTaskElasticNetRegressor_MLJScikitLearnInterface/","page":"MultiTaskElasticNetRegressor","title":"MultiTaskElasticNetRegressor","text":"alpha = 1.0\nl1_ratio = 0.5\nfit_intercept = true\ncopy_X = true\nmax_iter = 1000\ntol = 0.0001\nwarm_start = false\nrandom_state = nothing\nselection = cyclic","category":"page"},{"location":"models/XGBoostCount_XGBoost/#XGBoostCount_XGBoost","page":"XGBoostCount","title":"XGBoostCount","text":"","category":"section"},{"location":"models/XGBoostCount_XGBoost/","page":"XGBoostCount","title":"XGBoostCount","text":"XGBoostCount","category":"page"},{"location":"models/XGBoostCount_XGBoost/","page":"XGBoostCount","title":"XGBoostCount","text":"A model type for constructing a eXtreme Gradient Boosting Count Regressor, based on XGBoost.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/XGBoostCount_XGBoost/","page":"XGBoostCount","title":"XGBoostCount","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/XGBoostCount_XGBoost/","page":"XGBoostCount","title":"XGBoostCount","text":"XGBoostCount = @load XGBoostCount pkg=XGBoost","category":"page"},{"location":"models/XGBoostCount_XGBoost/","page":"XGBoostCount","title":"XGBoostCount","text":"Do model = XGBoostCount() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in XGBoostCount(test=...).","category":"page"},{"location":"models/XGBoostCount_XGBoost/","page":"XGBoostCount","title":"XGBoostCount","text":"Univariate discrete regression using xgboost.","category":"page"},{"location":"models/XGBoostCount_XGBoost/#Training-data","page":"XGBoostCount","title":"Training data","text":"","category":"section"},{"location":"models/XGBoostCount_XGBoost/","page":"XGBoostCount","title":"XGBoostCount","text":"In MLJ or MLJBase, bind an instance model to data with","category":"page"},{"location":"models/XGBoostCount_XGBoost/","page":"XGBoostCount","title":"XGBoostCount","text":"m = machine(model, X, y)","category":"page"},{"location":"models/XGBoostCount_XGBoost/","page":"XGBoostCount","title":"XGBoostCount","text":"where","category":"page"},{"location":"models/XGBoostCount_XGBoost/","page":"XGBoostCount","title":"XGBoostCount","text":"X: any table of input features, either an AbstractMatrix or Tables.jl-compatible table.\ny: is an AbstractVector continuous target.","category":"page"},{"location":"models/XGBoostCount_XGBoost/","page":"XGBoostCount","title":"XGBoostCount","text":"Train using fit!(m, rows=...).","category":"page"},{"location":"models/XGBoostCount_XGBoost/#Hyper-parameters","page":"XGBoostCount","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/XGBoostCount_XGBoost/","page":"XGBoostCount","title":"XGBoostCount","text":"See https://xgboost.readthedocs.io/en/stable/parameter.html.","category":"page"},{"location":"models/HistGradientBoostingRegressor_MLJScikitLearnInterface/#HistGradientBoostingRegressor_MLJScikitLearnInterface","page":"HistGradientBoostingRegressor","title":"HistGradientBoostingRegressor","text":"","category":"section"},{"location":"models/HistGradientBoostingRegressor_MLJScikitLearnInterface/","page":"HistGradientBoostingRegressor","title":"HistGradientBoostingRegressor","text":"HistGradientBoostingRegressor","category":"page"},{"location":"models/HistGradientBoostingRegressor_MLJScikitLearnInterface/","page":"HistGradientBoostingRegressor","title":"HistGradientBoostingRegressor","text":"A model type for constructing a gradient boosting ensemble regression, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/HistGradientBoostingRegressor_MLJScikitLearnInterface/","page":"HistGradientBoostingRegressor","title":"HistGradientBoostingRegressor","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/HistGradientBoostingRegressor_MLJScikitLearnInterface/","page":"HistGradientBoostingRegressor","title":"HistGradientBoostingRegressor","text":"HistGradientBoostingRegressor = @load HistGradientBoostingRegressor pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/HistGradientBoostingRegressor_MLJScikitLearnInterface/","page":"HistGradientBoostingRegressor","title":"HistGradientBoostingRegressor","text":"Do model = HistGradientBoostingRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in HistGradientBoostingRegressor(loss=...).","category":"page"},{"location":"models/HistGradientBoostingRegressor_MLJScikitLearnInterface/","page":"HistGradientBoostingRegressor","title":"HistGradientBoostingRegressor","text":"This estimator builds an additive model in a forward stage-wise fashion; it allows for the optimization of arbitrary differentiable loss functions. In each stage a regression tree is fit on the negative gradient of the given loss function.","category":"page"},{"location":"models/HistGradientBoostingRegressor_MLJScikitLearnInterface/","page":"HistGradientBoostingRegressor","title":"HistGradientBoostingRegressor","text":"HistGradientBoostingRegressor is a much faster variant of this algorithm for intermediate datasets (n_samples >= 10_000).","category":"page"},{"location":"models/EvoTreeMLE_EvoTrees/#EvoTreeMLE_EvoTrees","page":"EvoTreeMLE","title":"EvoTreeMLE","text":"","category":"section"},{"location":"models/EvoTreeMLE_EvoTrees/","page":"EvoTreeMLE","title":"EvoTreeMLE","text":"EvoTreeMLE(;kwargs...)","category":"page"},{"location":"models/EvoTreeMLE_EvoTrees/","page":"EvoTreeMLE","title":"EvoTreeMLE","text":"A model type for constructing a EvoTreeMLE, based on EvoTrees.jl, and implementing both an internal API the MLJ model interface. EvoTreeMLE performs maximum likelihood estimation. Assumed distribution is specified through loss kwargs. Both Gaussian and Logistic distributions are supported.","category":"page"},{"location":"models/EvoTreeMLE_EvoTrees/#Hyper-parameters","page":"EvoTreeMLE","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/EvoTreeMLE_EvoTrees/","page":"EvoTreeMLE","title":"EvoTreeMLE","text":"loss=:gaussian: Loss to be be minimized during training. One of:","category":"page"},{"location":"models/EvoTreeMLE_EvoTrees/","page":"EvoTreeMLE","title":"EvoTreeMLE","text":":gaussian / :gaussian_mle\n:logistic / :logistic_mle\nnrounds=100: Number of rounds. It corresponds to the number of trees that will be sequentially stacked. Must be >= 1.\neta=0.1: Learning rate. Each tree raw predictions are scaled by eta prior to be added to the stack of predictions. Must be > 0.","category":"page"},{"location":"models/EvoTreeMLE_EvoTrees/","page":"EvoTreeMLE","title":"EvoTreeMLE","text":"A lower eta results in slower learning, requiring a higher nrounds but typically improves model performance. ","category":"page"},{"location":"models/EvoTreeMLE_EvoTrees/","page":"EvoTreeMLE","title":"EvoTreeMLE","text":"L2::T=0.0: L2 regularization factor on aggregate gain. Must be >= 0. Higher L2 can result in a more robust model.\nlambda::T=0.0: L2 regularization factor on individual gain. Must be >= 0. Higher lambda can result in a more robust model.\ngamma::T=0.0: Minimum gain imprvement needed to perform a node split. Higher gamma can result in a more robust model. Must be >= 0.\nmax_depth=6: Maximum depth of a tree. Must be >= 1. A tree of depth 1 is made of a single prediction leaf. A complete tree of depth N contains 2^(N - 1) terminal leaves and 2^(N - 1) - 1 split nodes. Compute cost is proportional to 2^max_depth. Typical optimal values are in the 3 to 9 range.\nmin_weight=8.0: Minimum weight needed in a node to perform a split. Matches the number of observations by default or the sum of weights as provided by the weights vector. Must be > 0.\nrowsample=1.0: Proportion of rows that are sampled at each iteration to build the tree. Should be in ]0, 1].\ncolsample=1.0: Proportion of columns / features that are sampled at each iteration to build the tree. Should be in ]0, 1].\nnbins=64: Number of bins into which each feature is quantized. Buckets are defined based on quantiles, hence resulting in equal weight bins. Should be between 2 and 255.\nmonotone_constraints=Dict{Int, Int}(): Specify monotonic constraints using a dict where the key is the feature index and the value the applicable constraint (-1=decreasing, 0=none, 1=increasing). !Experimental feature: note that for MLE regression, constraints may not be enforced systematically.\ntree_type=\"binary\" Tree structure to be used. One of:\nbinary: Each node of a tree is grown independently. Tree are built depthwise until max depth is reach or if min weight or gain (see gamma) stops further node splits.\noblivious: A common splitting condition is imposed to all nodes of a given depth.\nrng=123: Either an integer used as a seed to the random number generator or an actual random number generator (::Random.AbstractRNG).","category":"page"},{"location":"models/EvoTreeMLE_EvoTrees/#Internal-API","page":"EvoTreeMLE","title":"Internal API","text":"","category":"section"},{"location":"models/EvoTreeMLE_EvoTrees/","page":"EvoTreeMLE","title":"EvoTreeMLE","text":"Do config = EvoTreeMLE() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in EvoTreeMLE(max_depth=...).","category":"page"},{"location":"models/EvoTreeMLE_EvoTrees/#Training-model","page":"EvoTreeMLE","title":"Training model","text":"","category":"section"},{"location":"models/EvoTreeMLE_EvoTrees/","page":"EvoTreeMLE","title":"EvoTreeMLE","text":"A model is built using fit_evotree:","category":"page"},{"location":"models/EvoTreeMLE_EvoTrees/","page":"EvoTreeMLE","title":"EvoTreeMLE","text":"model = fit_evotree(config; x_train, y_train, kwargs...)","category":"page"},{"location":"models/EvoTreeMLE_EvoTrees/#Inference","page":"EvoTreeMLE","title":"Inference","text":"","category":"section"},{"location":"models/EvoTreeMLE_EvoTrees/","page":"EvoTreeMLE","title":"EvoTreeMLE","text":"Predictions are obtained using predict which returns a Matrix of size [nobs, nparams] where the second dimensions refer to μ & σ for Normal/Gaussian and μ & s for Logistic.","category":"page"},{"location":"models/EvoTreeMLE_EvoTrees/","page":"EvoTreeMLE","title":"EvoTreeMLE","text":"EvoTrees.predict(model, X)","category":"page"},{"location":"models/EvoTreeMLE_EvoTrees/","page":"EvoTreeMLE","title":"EvoTreeMLE","text":"Alternatively, models act as a functor, returning predictions when called as a function with features as argument:","category":"page"},{"location":"models/EvoTreeMLE_EvoTrees/","page":"EvoTreeMLE","title":"EvoTreeMLE","text":"model(X)","category":"page"},{"location":"models/EvoTreeMLE_EvoTrees/#MLJ","page":"EvoTreeMLE","title":"MLJ","text":"","category":"section"},{"location":"models/EvoTreeMLE_EvoTrees/","page":"EvoTreeMLE","title":"EvoTreeMLE","text":"From MLJ, the type can be imported using:","category":"page"},{"location":"models/EvoTreeMLE_EvoTrees/","page":"EvoTreeMLE","title":"EvoTreeMLE","text":"EvoTreeMLE = @load EvoTreeMLE pkg=EvoTrees","category":"page"},{"location":"models/EvoTreeMLE_EvoTrees/","page":"EvoTreeMLE","title":"EvoTreeMLE","text":"Do model = EvoTreeMLE() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in EvoTreeMLE(loss=...).","category":"page"},{"location":"models/EvoTreeMLE_EvoTrees/#Training-data","page":"EvoTreeMLE","title":"Training data","text":"","category":"section"},{"location":"models/EvoTreeMLE_EvoTrees/","page":"EvoTreeMLE","title":"EvoTreeMLE","text":"In MLJ or MLJBase, bind an instance model to data with","category":"page"},{"location":"models/EvoTreeMLE_EvoTrees/","page":"EvoTreeMLE","title":"EvoTreeMLE","text":"mach = machine(model, X, y)","category":"page"},{"location":"models/EvoTreeMLE_EvoTrees/","page":"EvoTreeMLE","title":"EvoTreeMLE","text":"where","category":"page"},{"location":"models/EvoTreeMLE_EvoTrees/","page":"EvoTreeMLE","title":"EvoTreeMLE","text":"X: any table of input features (eg, a DataFrame) whose columns each have one of the following element scitypes: Continuous, Count, or <:OrderedFactor; check column scitypes with schema(X)\ny: is the target, which can be any AbstractVector whose element scitype is <:Continuous; check the scitype with scitype(y)","category":"page"},{"location":"models/EvoTreeMLE_EvoTrees/","page":"EvoTreeMLE","title":"EvoTreeMLE","text":"Train the machine using fit!(mach, rows=...).","category":"page"},{"location":"models/EvoTreeMLE_EvoTrees/#Operations","page":"EvoTreeMLE","title":"Operations","text":"","category":"section"},{"location":"models/EvoTreeMLE_EvoTrees/","page":"EvoTreeMLE","title":"EvoTreeMLE","text":"predict(mach, Xnew): returns a vector of Gaussian or Logistic distributions (according to provided loss) given features Xnew having the same scitype as X above.","category":"page"},{"location":"models/EvoTreeMLE_EvoTrees/","page":"EvoTreeMLE","title":"EvoTreeMLE","text":"Predictions are probabilistic.","category":"page"},{"location":"models/EvoTreeMLE_EvoTrees/","page":"EvoTreeMLE","title":"EvoTreeMLE","text":"Specific metrics can also be predicted using:","category":"page"},{"location":"models/EvoTreeMLE_EvoTrees/","page":"EvoTreeMLE","title":"EvoTreeMLE","text":"predict_mean(mach, Xnew)\npredict_mode(mach, Xnew)\npredict_median(mach, Xnew)","category":"page"},{"location":"models/EvoTreeMLE_EvoTrees/#Fitted-parameters","page":"EvoTreeMLE","title":"Fitted parameters","text":"","category":"section"},{"location":"models/EvoTreeMLE_EvoTrees/","page":"EvoTreeMLE","title":"EvoTreeMLE","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/EvoTreeMLE_EvoTrees/","page":"EvoTreeMLE","title":"EvoTreeMLE","text":":fitresult: The GBTree object returned by EvoTrees.jl fitting algorithm.","category":"page"},{"location":"models/EvoTreeMLE_EvoTrees/#Report","page":"EvoTreeMLE","title":"Report","text":"","category":"section"},{"location":"models/EvoTreeMLE_EvoTrees/","page":"EvoTreeMLE","title":"EvoTreeMLE","text":"The fields of report(mach) are:","category":"page"},{"location":"models/EvoTreeMLE_EvoTrees/","page":"EvoTreeMLE","title":"EvoTreeMLE","text":":features: The names of the features encountered in training.","category":"page"},{"location":"models/EvoTreeMLE_EvoTrees/#Examples","page":"EvoTreeMLE","title":"Examples","text":"","category":"section"},{"location":"models/EvoTreeMLE_EvoTrees/","page":"EvoTreeMLE","title":"EvoTreeMLE","text":"## Internal API\nusing EvoTrees\nconfig = EvoTreeMLE(max_depth=5, nbins=32, nrounds=100)\nnobs, nfeats = 1_000, 5\nx_train, y_train = randn(nobs, nfeats), rand(nobs)\nmodel = fit_evotree(config; x_train, y_train)\npreds = EvoTrees.predict(model, x_train)","category":"page"},{"location":"models/EvoTreeMLE_EvoTrees/","page":"EvoTreeMLE","title":"EvoTreeMLE","text":"## MLJ Interface\nusing MLJ\nEvoTreeMLE = @load EvoTreeMLE pkg=EvoTrees\nmodel = EvoTreeMLE(max_depth=5, nbins=32, nrounds=100)\nX, y = @load_boston\nmach = machine(model, X, y) |> fit!\npreds = predict(mach, X)\npreds = predict_mean(mach, X)\npreds = predict_mode(mach, X)\npreds = predict_median(mach, X)","category":"page"},{"location":"homogeneous_ensembles/#Homogeneous-Ensembles","page":"Homogeneous Ensembles","title":"Homogeneous Ensembles","text":"","category":"section"},{"location":"homogeneous_ensembles/","page":"Homogeneous Ensembles","title":"Homogeneous Ensembles","text":"Although an ensemble of models sharing a common set of hyperparameters can be defined using the learning network API, MLJ's EnsembleModel model wrapper is preferred, for convenience and best performance. Examples of using EnsembleModel are given in this Data Science Tutorial.","category":"page"},{"location":"homogeneous_ensembles/","page":"Homogeneous Ensembles","title":"Homogeneous Ensembles","text":"When bagging decision trees, further randomness is normally introduced by subsampling features, when training each node of each tree (Ho (1995), Brieman and Cutler (2001)). A bagged ensemble of such trees is known as a Random Forest. You can see an example of using EnsembleModel to build a random forest in this Data Science Tutorial. However, you may also want to use a canned random forest model. Run models(\"RandomForest\") to list such models.","category":"page"},{"location":"homogeneous_ensembles/","page":"Homogeneous Ensembles","title":"Homogeneous Ensembles","text":"MLJEnsembles.EnsembleModel","category":"page"},{"location":"homogeneous_ensembles/#MLJEnsembles.EnsembleModel","page":"Homogeneous Ensembles","title":"MLJEnsembles.EnsembleModel","text":"EnsembleModel(model,\n atomic_weights=Float64[],\n bagging_fraction=0.8,\n n=100,\n rng=GLOBAL_RNG,\n acceleration=CPU1(),\n out_of_bag_measure=[])\n\nCreate a model for training an ensemble of n clones of model, with optional bagging. Ensembling is useful if fit!(machine(atom, data...)) does not create identical models on repeated calls (ie, is a stochastic model, such as a decision tree with randomized node selection criteria), or if bagging_fraction is set to a value less than 1.0, or both.\n\nHere the atomic model must support targets with scitype AbstractVector{<:Finite} (single-target classifiers) or AbstractVector{<:Continuous} (single-target regressors).\n\nIf rng is an integer, then MersenneTwister(rng) is the random number generator used for bagging. Otherwise some AbstractRNG object is expected.\n\nThe atomic predictions are optionally weighted according to the vector atomic_weights (to allow for external optimization) except in the case that model is a Deterministic classifier, in which case atomic_weights are ignored.\n\nThe ensemble model is Deterministic or Probabilistic, according to the corresponding supertype of atom. In the case of deterministic classifiers (target_scitype(atom) <: Abstract{<:Finite}), the predictions are majority votes, and for regressors (target_scitype(atom)<: AbstractVector{<:Continuous}) they are ordinary averages. Probabilistic predictions are obtained by averaging the atomic probability distribution/mass functions; in particular, for regressors, the ensemble prediction on each input pattern has the type MixtureModel{VF,VS,D} from the Distributions.jl package, where D is the type of predicted distribution for atom.\n\nSpecify acceleration=CPUProcesses() for distributed computing, or CPUThreads() for multithreading.\n\nIf a single measure or non-empty vector of measures is specified by out_of_bag_measure, then out-of-bag estimates of performance are written to the training report (call report on the trained machine wrapping the ensemble model).\n\nImportant: If per-observation or class weights w (not to be confused with atomic weights) are specified when constructing a machine for the ensemble model, as in mach = machine(ensemble_model, X, y, w), then w is used by any measures specified in out_of_bag_measure that support them.\n\n\n\n\n\n","category":"function"},{"location":"models/PLSRegressor_PartialLeastSquaresRegressor/#PLSRegressor_PartialLeastSquaresRegressor","page":"PLSRegressor","title":"PLSRegressor","text":"","category":"section"},{"location":"models/PLSRegressor_PartialLeastSquaresRegressor/","page":"PLSRegressor","title":"PLSRegressor","text":"A Partial Least Squares Regressor. Contains PLS1, PLS2 (multi target) algorithms. Can be used mainly for regression.","category":"page"},{"location":"openml_integration/#OpenML-Integration","page":"OpenML Integration","title":"OpenML Integration","text":"","category":"section"},{"location":"openml_integration/","page":"OpenML Integration","title":"OpenML Integration","text":"The OpenML platform provides an integration platform for carrying out and comparing machine learning solutions across a broad collection of public datasets and software platforms.","category":"page"},{"location":"openml_integration/","page":"OpenML Integration","title":"OpenML Integration","text":"Integration with OpenML API is presently limited to querying and downloading datasets.","category":"page"},{"location":"openml_integration/","page":"OpenML Integration","title":"OpenML Integration","text":"Documentation is here.","category":"page"},{"location":"models/ECODDetector_OutlierDetectionPython/#ECODDetector_OutlierDetectionPython","page":"ECODDetector","title":"ECODDetector","text":"","category":"section"},{"location":"models/ECODDetector_OutlierDetectionPython/","page":"ECODDetector","title":"ECODDetector","text":"ECODDetector(n_jobs = 1)","category":"page"},{"location":"models/ECODDetector_OutlierDetectionPython/","page":"ECODDetector","title":"ECODDetector","text":"https://pyod.readthedocs.io/en/latest/pyod.models.html#module-pyod.models.ecod","category":"page"},{"location":"models/UnivariateBoxCoxTransformer_MLJModels/#UnivariateBoxCoxTransformer_MLJModels","page":"UnivariateBoxCoxTransformer","title":"UnivariateBoxCoxTransformer","text":"","category":"section"},{"location":"models/UnivariateBoxCoxTransformer_MLJModels/","page":"UnivariateBoxCoxTransformer","title":"UnivariateBoxCoxTransformer","text":"UnivariateBoxCoxTransformer","category":"page"},{"location":"models/UnivariateBoxCoxTransformer_MLJModels/","page":"UnivariateBoxCoxTransformer","title":"UnivariateBoxCoxTransformer","text":"A model type for constructing a single variable Box-Cox transformer, based on MLJModels.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/UnivariateBoxCoxTransformer_MLJModels/","page":"UnivariateBoxCoxTransformer","title":"UnivariateBoxCoxTransformer","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/UnivariateBoxCoxTransformer_MLJModels/","page":"UnivariateBoxCoxTransformer","title":"UnivariateBoxCoxTransformer","text":"UnivariateBoxCoxTransformer = @load UnivariateBoxCoxTransformer pkg=MLJModels","category":"page"},{"location":"models/UnivariateBoxCoxTransformer_MLJModels/","page":"UnivariateBoxCoxTransformer","title":"UnivariateBoxCoxTransformer","text":"Do model = UnivariateBoxCoxTransformer() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in UnivariateBoxCoxTransformer(n=...).","category":"page"},{"location":"models/UnivariateBoxCoxTransformer_MLJModels/","page":"UnivariateBoxCoxTransformer","title":"UnivariateBoxCoxTransformer","text":"Box-Cox transformations attempt to make data look more normally distributed. This can improve performance and assist in the interpretation of models which suppose that data is generated by a normal distribution.","category":"page"},{"location":"models/UnivariateBoxCoxTransformer_MLJModels/","page":"UnivariateBoxCoxTransformer","title":"UnivariateBoxCoxTransformer","text":"A Box-Cox transformation (with shift) is of the form","category":"page"},{"location":"models/UnivariateBoxCoxTransformer_MLJModels/","page":"UnivariateBoxCoxTransformer","title":"UnivariateBoxCoxTransformer","text":"x -> ((x + c)^λ - 1)/λ","category":"page"},{"location":"models/UnivariateBoxCoxTransformer_MLJModels/","page":"UnivariateBoxCoxTransformer","title":"UnivariateBoxCoxTransformer","text":"for some constant c and real λ, unless λ = 0, in which case the above is replaced with","category":"page"},{"location":"models/UnivariateBoxCoxTransformer_MLJModels/","page":"UnivariateBoxCoxTransformer","title":"UnivariateBoxCoxTransformer","text":"x -> log(x + c)","category":"page"},{"location":"models/UnivariateBoxCoxTransformer_MLJModels/","page":"UnivariateBoxCoxTransformer","title":"UnivariateBoxCoxTransformer","text":"Given user-specified hyper-parameters n::Integer and shift::Bool, the present implementation learns the parameters c and λ from the training data as follows: If shift=true and zeros are encountered in the data, then c is set to 0.2 times the data mean. If there are no zeros, then no shift is applied. Finally, n different values of λ between -0.4 and 3 are considered, with λ fixed to the value maximizing normality of the transformed data.","category":"page"},{"location":"models/UnivariateBoxCoxTransformer_MLJModels/","page":"UnivariateBoxCoxTransformer","title":"UnivariateBoxCoxTransformer","text":"Reference: Wikipedia entry for power transform.","category":"page"},{"location":"models/UnivariateBoxCoxTransformer_MLJModels/#Training-data","page":"UnivariateBoxCoxTransformer","title":"Training data","text":"","category":"section"},{"location":"models/UnivariateBoxCoxTransformer_MLJModels/","page":"UnivariateBoxCoxTransformer","title":"UnivariateBoxCoxTransformer","text":"In MLJ or MLJBase, bind an instance model to data with","category":"page"},{"location":"models/UnivariateBoxCoxTransformer_MLJModels/","page":"UnivariateBoxCoxTransformer","title":"UnivariateBoxCoxTransformer","text":"mach = machine(model, x)","category":"page"},{"location":"models/UnivariateBoxCoxTransformer_MLJModels/","page":"UnivariateBoxCoxTransformer","title":"UnivariateBoxCoxTransformer","text":"where","category":"page"},{"location":"models/UnivariateBoxCoxTransformer_MLJModels/","page":"UnivariateBoxCoxTransformer","title":"UnivariateBoxCoxTransformer","text":"x: any abstract vector with element scitype Continuous; check the scitype with scitype(x)","category":"page"},{"location":"models/UnivariateBoxCoxTransformer_MLJModels/","page":"UnivariateBoxCoxTransformer","title":"UnivariateBoxCoxTransformer","text":"Train the machine using fit!(mach, rows=...).","category":"page"},{"location":"models/UnivariateBoxCoxTransformer_MLJModels/#Hyper-parameters","page":"UnivariateBoxCoxTransformer","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/UnivariateBoxCoxTransformer_MLJModels/","page":"UnivariateBoxCoxTransformer","title":"UnivariateBoxCoxTransformer","text":"n=171: number of values of the exponent λ to try\nshift=false: whether to include a preliminary constant translation in transformations, in the presence of zeros","category":"page"},{"location":"models/UnivariateBoxCoxTransformer_MLJModels/#Operations","page":"UnivariateBoxCoxTransformer","title":"Operations","text":"","category":"section"},{"location":"models/UnivariateBoxCoxTransformer_MLJModels/","page":"UnivariateBoxCoxTransformer","title":"UnivariateBoxCoxTransformer","text":"transform(mach, xnew): apply the Box-Cox transformation learned when fitting mach\ninverse_transform(mach, z): reconstruct the vector z whose transformation learned by mach is z","category":"page"},{"location":"models/UnivariateBoxCoxTransformer_MLJModels/#Fitted-parameters","page":"UnivariateBoxCoxTransformer","title":"Fitted parameters","text":"","category":"section"},{"location":"models/UnivariateBoxCoxTransformer_MLJModels/","page":"UnivariateBoxCoxTransformer","title":"UnivariateBoxCoxTransformer","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/UnivariateBoxCoxTransformer_MLJModels/","page":"UnivariateBoxCoxTransformer","title":"UnivariateBoxCoxTransformer","text":"λ: the learned Box-Cox exponent\nc: the learned shift","category":"page"},{"location":"models/UnivariateBoxCoxTransformer_MLJModels/#Examples","page":"UnivariateBoxCoxTransformer","title":"Examples","text":"","category":"section"},{"location":"models/UnivariateBoxCoxTransformer_MLJModels/","page":"UnivariateBoxCoxTransformer","title":"UnivariateBoxCoxTransformer","text":"using MLJ\nusing UnicodePlots\nusing Random\nRandom.seed!(123)\n\ntransf = UnivariateBoxCoxTransformer()\n\nx = randn(1000).^2\n\nmach = machine(transf, x)\nfit!(mach)\n\nz = transform(mach, x)\n\njulia> histogram(x)\n ┌ ┐\n [ 0.0, 2.0) ┤███████████████████████████████████ 848\n [ 2.0, 4.0) ┤████▌ 109\n [ 4.0, 6.0) ┤█▍ 33\n [ 6.0, 8.0) ┤▍ 7\n [ 8.0, 10.0) ┤▏ 2\n [10.0, 12.0) ┤ 0\n [12.0, 14.0) ┤▏ 1\n └ ┘\n Frequency\n\njulia> histogram(z)\n ┌ ┐\n [-5.0, -4.0) ┤█▎ 8\n [-4.0, -3.0) ┤████████▊ 64\n [-3.0, -2.0) ┤█████████████████████▊ 159\n [-2.0, -1.0) ┤█████████████████████████████▊ 216\n [-1.0, 0.0) ┤███████████████████████████████████ 254\n [ 0.0, 1.0) ┤█████████████████████████▊ 188\n [ 1.0, 2.0) ┤████████████▍ 90\n [ 2.0, 3.0) ┤██▊ 20\n [ 3.0, 4.0) ┤▎ 1\n └ ┘\n Frequency\n","category":"page"},{"location":"performance_measures/#Performance-Measures","page":"Performance Measures","title":"Performance Measures","text":"","category":"section"},{"location":"performance_measures/#Quick-links","page":"Performance Measures","title":"Quick links","text":"","category":"section"},{"location":"performance_measures/","page":"Performance Measures","title":"Performance Measures","text":"List of aliases of all measures\nMigration guide for changes to measures in MLJBase 1.0","category":"page"},{"location":"performance_measures/#Introduction","page":"Performance Measures","title":"Introduction","text":"","category":"section"},{"location":"performance_measures/","page":"Performance Measures","title":"Performance Measures","text":"In MLJ loss functions, scoring rules, confusion matrices, sensitivities, etc, are collectively referred to as measures. These measures are provided by the package StatisticalMeasures.jl but are immediately available to the MLJ user. Here's a simple example of direct application of the log_loss measures to compute a training loss:","category":"page"},{"location":"performance_measures/","page":"Performance Measures","title":"Performance Measures","text":"using MLJ\nX, y = @load_iris\nDecisionTreeClassifier = @load DecisionTreeClassifier pkg=DecisionTree\ntree = DecisionTreeClassifier(max_depth=2)\nmach = machine(tree, X, y) |> fit!\nyhat = predict(mach, X)\nlog_loss(yhat, y)","category":"page"},{"location":"performance_measures/","page":"Performance Measures","title":"Performance Measures","text":"For more examples of direct measure usage, see the StatisticalMeasures.jl tutorial.","category":"page"},{"location":"performance_measures/","page":"Performance Measures","title":"Performance Measures","text":"A list of all measures, ready to use after running using MLJ or using StatisticalMeasures, is here. Alternatively, call measures() (experimental) to generate a dictionary keyed on available measure constructors, with measure metadata as values.","category":"page"},{"location":"performance_measures/#Custom-measures","page":"Performance Measures","title":"Custom measures","text":"","category":"section"},{"location":"performance_measures/","page":"Performance Measures","title":"Performance Measures","text":"Any measure-like object with appropriate calling behavior can be used with MLJ. To quickly build custom measures, we recommend using the package StatisticalMeasuresBase.jl, which provides this tutorial. Note, in particular, that an \"atomic\" measure can be transformed into a multi-target measure using this package.","category":"page"},{"location":"performance_measures/#Uses-of-measures","page":"Performance Measures","title":"Uses of measures","text":"","category":"section"},{"location":"performance_measures/","page":"Performance Measures","title":"Performance Measures","text":"In MLJ, measures are specified:","category":"page"},{"location":"performance_measures/","page":"Performance Measures","title":"Performance Measures","text":"when evaluating model performance using evaluate!/evaluate; see Evaluating Model Performance\nwhen wrapping models using TunedModel - see Tuning Models\nwhen wrapping iterative models using IteratedModel - see Controlling Iterative Models\nwhen generating learning curves using learning_curve - see Learning Curves","category":"page"},{"location":"performance_measures/","page":"Performance Measures","title":"Performance Measures","text":"and elsewhere.","category":"page"},{"location":"performance_measures/#Using-LossFunctions.jl","page":"Performance Measures","title":"Using LossFunctions.jl","text":"","category":"section"},{"location":"performance_measures/","page":"Performance Measures","title":"Performance Measures","text":"In previous versions of MLJ, measures from LossFunctions.jl were also available. Now measures from that package must be explicitly imported and wrapped, as described here.","category":"page"},{"location":"performance_measures/#Receiver-operator-characteristics","page":"Performance Measures","title":"Receiver operator characteristics","text":"","category":"section"},{"location":"performance_measures/","page":"Performance Measures","title":"Performance Measures","text":"A related performance evaluation tool provided by StatisticalMeasures.jl, and hence by MLJ, is the roc_curve method:","category":"page"},{"location":"performance_measures/","page":"Performance Measures","title":"Performance Measures","text":"StatisticalMeasures.roc_curve","category":"page"},{"location":"performance_measures/#StatisticalMeasures.roc_curve","page":"Performance Measures","title":"StatisticalMeasures.roc_curve","text":"roc_curve(ŷ, y) -> false_positive_rates, true_positive_rates, thresholds\n\nReturn data for plotting the receiver operator characteristic (ROC curve) for a binary classification problem.\n\nHere ŷ is a vector of UnivariateFinite distributions (from CategoricalDistributions.jl) over the two values taken by the ground truth observations y, a CategoricalVector. \n\nIf there are k unique probabilities, then there are correspondingly k thresholds and k+1 \"bins\" over which the false positive and true positive rates are constant.:\n\n[0.0 - thresholds[1]]\n[thresholds[1] - thresholds[2]]\n...\n[thresholds[k] - 1]\n\nConsequently, true_positive_rates and false_positive_rates have length k+1 if thresholds has length k.\n\nTo plot the curve using your favorite plotting backend, do something like plot(false_positive_rates, true_positive_rates).\n\nCore algorithm: Functions.roc_curve\n\nSee also AreaUnderCurve. \n\n\n\n\n\n","category":"function"},{"location":"performance_measures/#Migration-guide-for-changes-to-measures-in-MLJBase-1.0","page":"Performance Measures","title":"Migration guide for changes to measures in MLJBase 1.0","text":"","category":"section"},{"location":"performance_measures/","page":"Performance Measures","title":"Performance Measures","text":"Prior to MLJBase.jl 1.0 (respectivey, MLJ.jl version 0.19.6) measures were defined in MLJBase.jl (a dependency of MLJ.jl) but now they are provided by MLJ.jl dependency StatisticalMeasures. Effects on users are detailed below:","category":"page"},{"location":"performance_measures/#Breaking-behavior-likely-relevant-to-many-users","page":"Performance Measures","title":"Breaking behavior likely relevant to many users","text":"","category":"section"},{"location":"performance_measures/","page":"Performance Measures","title":"Performance Measures","text":"If using MLJBase without MLJ, then, in Julia 1.9 or higher, StatisticalMeasures must be explicitly imported to use measures that were previously part of MLJBase. If using MLJ, then all previous measures are still available, with the exception of those corresponding to LossFunctions.jl (see below).\nAll measures return a single aggregated measurement. In other words, measures previously reporting a measurement per-observation (previously subtyping Unaggregated) no longer do so. To get per-observation measurements, use the new method StatisticalMeasures.measurements(measure, ŷ, y[, weights, class_weights]).\nThe default measure for regression models (used in evaluate/evaluate! when measures is unspecified) is changed from rms to l2=LPLoss(2) (mean sum of squares).\nMeanAbsoluteError has been removed and instead mae is an alias for LPLoss(p=1).\nMeasures that previously skipped NaN values will now (at least by default) propagate those values. Missing value behavior is unchanged, except some measures that previously did not support missing now do.\nAliases for measure types have been removed. For example RMSE (alias for RootMeanSquaredError) is gone. Aliases for instances, such as rms and cross_entropy persist. The exception is precision, for which ppv can be used in its place. (This is to avoid conflict with Base.precision, which was previously pirated.)\ninfo(measure) has been decommissioned; query docstrings or access the new measure traits individually instead. These traits are now provided by StatisticalMeasures.jl and not are not exported. For example, to access the orientation of the measure rms, do import StatisticalMeasures as SM; SM.orientation(rms).\nBehavior of the measures() method, to list all measures and associated traits, has changed. It now returns a dictionary instead of a vector of named tuples; measures(predicate) is decommissioned, but measures(needle) is preserved. (This method, owned by StatisticalMeasures.jl, has some other search options, but is experimental.)\nMeasures that were wraps of losses from LossFunctions.jl are no longer exposed by MLJBase or MLJ. To use such a loss, you must explicitly import LossFunctions and wrap the loss appropriately. See Using losses from LossFunctions.jl for examples.\nSome user-defined measures working in previous versions of MLJBase.jl may not work without modification, as they must conform to the new StatisticalMeasuresBase.jl API. See this tutorial on how define new measures.\nMeasures with a \"feature argument\" X, as in some_measure(ŷ, y, X), are no longer supported. See What is a measure? for allowed signatures in measures.","category":"page"},{"location":"performance_measures/#Packages-implementing-the-MLJ-model-interface","page":"Performance Measures","title":"Packages implementing the MLJ model interface","text":"","category":"section"},{"location":"performance_measures/","page":"Performance Measures","title":"Performance Measures","text":"The migration of measures is not expected to require any changes to the source code in packges providing implementations of the MLJ model interface (MLJModelInterface.jl) such as MLJDecisionTreeInterface.jl and MLJFlux.jl, and this is confirmed by extensive integration tests. However, some current tests will fail, if they use MLJBase measures. The following should generally suffice to adapt such tests:","category":"page"},{"location":"performance_measures/","page":"Performance Measures","title":"Performance Measures","text":"Add StatisticalMeasures as test dependency, and add using StatisticalMeasures to your runtests.jl (and/or included submodules).\nIf measures are qualified, as in MLJBase.rms, then the qualification must be removed or changed to StatisticalMeasures.rms, etc.\nBe aware that the default measure used in methods such as evaluate!, when measure is not specified, is changed from rms to l2 for regression models.\nBe aware of that all measures now report a measurement for every observation, and never an aggregate. See second point above.","category":"page"},{"location":"performance_measures/#Breaking-behavior-possibly-relevant-to-some-developers","page":"Performance Measures","title":"Breaking behavior possibly relevant to some developers","text":"","category":"section"},{"location":"performance_measures/","page":"Performance Measures","title":"Performance Measures","text":"The abstract measure types Aggregated, Unaggregated, Measure have been decommissioned. (A measure is now defined purely by its calling behavior.)\nWhat were previously exported as measure types are now only constructors.\ntarget_scitype(measure) is decommissioned. Related is StatisticalMeasures.observation_scitype(measure) which declares an upper bound on the allowed scitype of a single observation.\nprediction_type(measure) is decommissioned. Instead use StatisticalMeasures.kind_of_proxy(measure).\nThe trait reports_each_observation is decommissioned. Related is StatisticalMeasures.can_report_unaggregated; if false the new measurements method simply returns n copies of the aggregated measurement, where n is the number of observations provided, instead of individual observation-dependent measurements.\naggregation(measure) has been decommissioned. Instead use StatisticalMeasures.external_mode_of_aggregation(measure).\ninstances(measure) has been decommissioned; query docstrings for measure aliases, or follow this example: aliases = measures()[RootMeanSquaredError].aliases.\nis_feature_dependent(measure) has been decommissioned. Measures consuming feature data are not longer supported; see above.\ndistribution_type(measure) has been decommissioned.\ndocstring(measure) has been decommissioned.\nBehavior of aggregate has changed.\nThe following traits, previously exported by MLJBase and MLJ, cannot be applied to measures: supports_weights, supports_class_weights, orientation, human_name. Instead use the traits with these names provided by StatisticalMeausures.jl (they will need to be qualified, as in import StatisticalMeasures; StatisticalMeasures.orientation(measure)).","category":"page"},{"location":"models/GMMDetector_OutlierDetectionPython/#GMMDetector_OutlierDetectionPython","page":"GMMDetector","title":"GMMDetector","text":"","category":"section"},{"location":"models/GMMDetector_OutlierDetectionPython/","page":"GMMDetector","title":"GMMDetector","text":"GMMDetector(n_components=1,\n covariance_type=\"full\",\n tol=0.001,\n reg_covar=1e-06,\n max_iter=100,\n n_init=1,\n init_params=\"kmeans\",\n weights_init=None,\n means_init=None,\n precisions_init=None,\n random_state=None,\n warm_start=False)","category":"page"},{"location":"models/GMMDetector_OutlierDetectionPython/","page":"GMMDetector","title":"GMMDetector","text":"https://pyod.readthedocs.io/en/latest/pyod.models.html#module-pyod.models.gmm","category":"page"},{"location":"models/LGBMRegressor_LightGBM/#LGBMRegressor_LightGBM","page":"LGBMRegressor","title":"LGBMRegressor","text":"","category":"section"},{"location":"models/LGBMRegressor_LightGBM/","page":"LGBMRegressor","title":"LGBMRegressor","text":"Microsoft LightGBM FFI wrapper: Regressor","category":"page"},{"location":"models/LMDDDetector_OutlierDetectionPython/#LMDDDetector_OutlierDetectionPython","page":"LMDDDetector","title":"LMDDDetector","text":"","category":"section"},{"location":"models/LMDDDetector_OutlierDetectionPython/","page":"LMDDDetector","title":"LMDDDetector","text":"LMDDDetector(n_iter = 50,\n dis_measure = \"aad\",\n random_state = nothing)","category":"page"},{"location":"models/LMDDDetector_OutlierDetectionPython/","page":"LMDDDetector","title":"LMDDDetector","text":"https://pyod.readthedocs.io/en/latest/pyod.models.html#module-pyod.models.lmdd","category":"page"},{"location":"models/EvoTreeClassifier_EvoTrees/#EvoTreeClassifier_EvoTrees","page":"EvoTreeClassifier","title":"EvoTreeClassifier","text":"","category":"section"},{"location":"models/EvoTreeClassifier_EvoTrees/","page":"EvoTreeClassifier","title":"EvoTreeClassifier","text":"EvoTreeClassifier(;kwargs...)","category":"page"},{"location":"models/EvoTreeClassifier_EvoTrees/","page":"EvoTreeClassifier","title":"EvoTreeClassifier","text":"A model type for constructing a EvoTreeClassifier, based on EvoTrees.jl, and implementing both an internal API and the MLJ model interface. EvoTreeClassifier is used to perform multi-class classification, using cross-entropy loss.","category":"page"},{"location":"models/EvoTreeClassifier_EvoTrees/#Hyper-parameters","page":"EvoTreeClassifier","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/EvoTreeClassifier_EvoTrees/","page":"EvoTreeClassifier","title":"EvoTreeClassifier","text":"nrounds=100: Number of rounds. It corresponds to the number of trees that will be sequentially stacked. Must be >= 1.\neta=0.1: Learning rate. Each tree raw predictions are scaled by eta prior to be added to the stack of predictions. Must be > 0. A lower eta results in slower learning, requiring a higher nrounds but typically improves model performance.\nL2::T=0.0: L2 regularization factor on aggregate gain. Must be >= 0. Higher L2 can result in a more robust model.\nlambda::T=0.0: L2 regularization factor on individual gain. Must be >= 0. Higher lambda can result in a more robust model.\ngamma::T=0.0: Minimum gain improvement needed to perform a node split. Higher gamma can result in a more robust model. Must be >= 0.\nmax_depth=6: Maximum depth of a tree. Must be >= 1. A tree of depth 1 is made of a single prediction leaf. A complete tree of depth N contains 2^(N - 1) terminal leaves and 2^(N - 1) - 1 split nodes. Compute cost is proportional to 2^max_depth. Typical optimal values are in the 3 to 9 range.\nmin_weight=1.0: Minimum weight needed in a node to perform a split. Matches the number of observations by default or the sum of weights as provided by the weights vector. Must be > 0.\nrowsample=1.0: Proportion of rows that are sampled at each iteration to build the tree. Should be in ]0, 1].\ncolsample=1.0: Proportion of columns / features that are sampled at each iteration to build the tree. Should be in ]0, 1].\nnbins=64: Number of bins into which each feature is quantized. Buckets are defined based on quantiles, hence resulting in equal weight bins. Should be between 2 and 255.\ntree_type=\"binary\" Tree structure to be used. One of:\nbinary: Each node of a tree is grown independently. Tree are built depthwise until max depth is reach or if min weight or gain (see gamma) stops further node splits.\noblivious: A common splitting condition is imposed to all nodes of a given depth.\nrng=123: Either an integer used as a seed to the random number generator or an actual random number generator (::Random.AbstractRNG).","category":"page"},{"location":"models/EvoTreeClassifier_EvoTrees/#Internal-API","page":"EvoTreeClassifier","title":"Internal API","text":"","category":"section"},{"location":"models/EvoTreeClassifier_EvoTrees/","page":"EvoTreeClassifier","title":"EvoTreeClassifier","text":"Do config = EvoTreeClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in EvoTreeClassifier(max_depth=...).","category":"page"},{"location":"models/EvoTreeClassifier_EvoTrees/#Training-model","page":"EvoTreeClassifier","title":"Training model","text":"","category":"section"},{"location":"models/EvoTreeClassifier_EvoTrees/","page":"EvoTreeClassifier","title":"EvoTreeClassifier","text":"A model is built using fit_evotree:","category":"page"},{"location":"models/EvoTreeClassifier_EvoTrees/","page":"EvoTreeClassifier","title":"EvoTreeClassifier","text":"model = fit_evotree(config; x_train, y_train, kwargs...)","category":"page"},{"location":"models/EvoTreeClassifier_EvoTrees/#Inference","page":"EvoTreeClassifier","title":"Inference","text":"","category":"section"},{"location":"models/EvoTreeClassifier_EvoTrees/","page":"EvoTreeClassifier","title":"EvoTreeClassifier","text":"Predictions are obtained using predict which returns a Matrix of size [nobs, K] where K is the number of classes:","category":"page"},{"location":"models/EvoTreeClassifier_EvoTrees/","page":"EvoTreeClassifier","title":"EvoTreeClassifier","text":"EvoTrees.predict(model, X)","category":"page"},{"location":"models/EvoTreeClassifier_EvoTrees/","page":"EvoTreeClassifier","title":"EvoTreeClassifier","text":"Alternatively, models act as a functor, returning predictions when called as a function with features as argument:","category":"page"},{"location":"models/EvoTreeClassifier_EvoTrees/","page":"EvoTreeClassifier","title":"EvoTreeClassifier","text":"model(X)","category":"page"},{"location":"models/EvoTreeClassifier_EvoTrees/#MLJ","page":"EvoTreeClassifier","title":"MLJ","text":"","category":"section"},{"location":"models/EvoTreeClassifier_EvoTrees/","page":"EvoTreeClassifier","title":"EvoTreeClassifier","text":"From MLJ, the type can be imported using:","category":"page"},{"location":"models/EvoTreeClassifier_EvoTrees/","page":"EvoTreeClassifier","title":"EvoTreeClassifier","text":"EvoTreeClassifier = @load EvoTreeClassifier pkg=EvoTrees","category":"page"},{"location":"models/EvoTreeClassifier_EvoTrees/","page":"EvoTreeClassifier","title":"EvoTreeClassifier","text":"Do model = EvoTreeClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in EvoTreeClassifier(loss=...).","category":"page"},{"location":"models/EvoTreeClassifier_EvoTrees/#Training-data","page":"EvoTreeClassifier","title":"Training data","text":"","category":"section"},{"location":"models/EvoTreeClassifier_EvoTrees/","page":"EvoTreeClassifier","title":"EvoTreeClassifier","text":"In MLJ or MLJBase, bind an instance model to data with","category":"page"},{"location":"models/EvoTreeClassifier_EvoTrees/","page":"EvoTreeClassifier","title":"EvoTreeClassifier","text":"mach = machine(model, X, y)","category":"page"},{"location":"models/EvoTreeClassifier_EvoTrees/","page":"EvoTreeClassifier","title":"EvoTreeClassifier","text":"where","category":"page"},{"location":"models/EvoTreeClassifier_EvoTrees/","page":"EvoTreeClassifier","title":"EvoTreeClassifier","text":"X: any table of input features (eg, a DataFrame) whose columns each have one of the following element scitypes: Continuous, Count, or <:OrderedFactor; check column scitypes with schema(X)\ny: is the target, which can be any AbstractVector whose element scitype is <:Multiclas or <:OrderedFactor; check the scitype with scitype(y)","category":"page"},{"location":"models/EvoTreeClassifier_EvoTrees/","page":"EvoTreeClassifier","title":"EvoTreeClassifier","text":"Train the machine using fit!(mach, rows=...).","category":"page"},{"location":"models/EvoTreeClassifier_EvoTrees/#Operations","page":"EvoTreeClassifier","title":"Operations","text":"","category":"section"},{"location":"models/EvoTreeClassifier_EvoTrees/","page":"EvoTreeClassifier","title":"EvoTreeClassifier","text":"predict(mach, Xnew): return predictions of the target given features Xnew having the same scitype as X above. Predictions are probabilistic.\npredict_mode(mach, Xnew): returns the mode of each of the prediction above.","category":"page"},{"location":"models/EvoTreeClassifier_EvoTrees/#Fitted-parameters","page":"EvoTreeClassifier","title":"Fitted parameters","text":"","category":"section"},{"location":"models/EvoTreeClassifier_EvoTrees/","page":"EvoTreeClassifier","title":"EvoTreeClassifier","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/EvoTreeClassifier_EvoTrees/","page":"EvoTreeClassifier","title":"EvoTreeClassifier","text":":fitresult: The GBTree object returned by EvoTrees.jl fitting algorithm.","category":"page"},{"location":"models/EvoTreeClassifier_EvoTrees/#Report","page":"EvoTreeClassifier","title":"Report","text":"","category":"section"},{"location":"models/EvoTreeClassifier_EvoTrees/","page":"EvoTreeClassifier","title":"EvoTreeClassifier","text":"The fields of report(mach) are:","category":"page"},{"location":"models/EvoTreeClassifier_EvoTrees/","page":"EvoTreeClassifier","title":"EvoTreeClassifier","text":":features: The names of the features encountered in training.","category":"page"},{"location":"models/EvoTreeClassifier_EvoTrees/#Examples","page":"EvoTreeClassifier","title":"Examples","text":"","category":"section"},{"location":"models/EvoTreeClassifier_EvoTrees/","page":"EvoTreeClassifier","title":"EvoTreeClassifier","text":"## Internal API\nusing EvoTrees\nconfig = EvoTreeClassifier(max_depth=5, nbins=32, nrounds=100)\nnobs, nfeats = 1_000, 5\nx_train, y_train = randn(nobs, nfeats), rand(1:3, nobs)\nmodel = fit_evotree(config; x_train, y_train)\npreds = EvoTrees.predict(model, x_train)","category":"page"},{"location":"models/EvoTreeClassifier_EvoTrees/","page":"EvoTreeClassifier","title":"EvoTreeClassifier","text":"## MLJ Interface\nusing MLJ\nEvoTreeClassifier = @load EvoTreeClassifier pkg=EvoTrees\nmodel = EvoTreeClassifier(max_depth=5, nbins=32, nrounds=100)\nX, y = @load_iris\nmach = machine(model, X, y) |> fit!\npreds = predict(mach, X)\npreds = predict_mode(mach, X)","category":"page"},{"location":"models/EvoTreeClassifier_EvoTrees/","page":"EvoTreeClassifier","title":"EvoTreeClassifier","text":"See also EvoTrees.jl.","category":"page"},{"location":"models/FactorAnalysis_MultivariateStats/#FactorAnalysis_MultivariateStats","page":"FactorAnalysis","title":"FactorAnalysis","text":"","category":"section"},{"location":"models/FactorAnalysis_MultivariateStats/","page":"FactorAnalysis","title":"FactorAnalysis","text":"FactorAnalysis","category":"page"},{"location":"models/FactorAnalysis_MultivariateStats/","page":"FactorAnalysis","title":"FactorAnalysis","text":"A model type for constructing a factor analysis model, based on MultivariateStats.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/FactorAnalysis_MultivariateStats/","page":"FactorAnalysis","title":"FactorAnalysis","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/FactorAnalysis_MultivariateStats/","page":"FactorAnalysis","title":"FactorAnalysis","text":"FactorAnalysis = @load FactorAnalysis pkg=MultivariateStats","category":"page"},{"location":"models/FactorAnalysis_MultivariateStats/","page":"FactorAnalysis","title":"FactorAnalysis","text":"Do model = FactorAnalysis() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in FactorAnalysis(method=...).","category":"page"},{"location":"models/FactorAnalysis_MultivariateStats/","page":"FactorAnalysis","title":"FactorAnalysis","text":"Factor analysis is a linear-Gaussian latent variable model that is closely related to probabilistic PCA. In contrast to the probabilistic PCA model, the covariance of conditional distribution of the observed variable given the latent variable is diagonal rather than isotropic.","category":"page"},{"location":"models/FactorAnalysis_MultivariateStats/#Training-data","page":"FactorAnalysis","title":"Training data","text":"","category":"section"},{"location":"models/FactorAnalysis_MultivariateStats/","page":"FactorAnalysis","title":"FactorAnalysis","text":"In MLJ or MLJBase, bind an instance model to data with","category":"page"},{"location":"models/FactorAnalysis_MultivariateStats/","page":"FactorAnalysis","title":"FactorAnalysis","text":"mach = machine(model, X)","category":"page"},{"location":"models/FactorAnalysis_MultivariateStats/","page":"FactorAnalysis","title":"FactorAnalysis","text":"Here:","category":"page"},{"location":"models/FactorAnalysis_MultivariateStats/","page":"FactorAnalysis","title":"FactorAnalysis","text":"X is any table of input features (eg, a DataFrame) whose columns are of scitype Continuous; check column scitypes with schema(X).","category":"page"},{"location":"models/FactorAnalysis_MultivariateStats/","page":"FactorAnalysis","title":"FactorAnalysis","text":"Train the machine using fit!(mach, rows=...).","category":"page"},{"location":"models/FactorAnalysis_MultivariateStats/#Hyper-parameters","page":"FactorAnalysis","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/FactorAnalysis_MultivariateStats/","page":"FactorAnalysis","title":"FactorAnalysis","text":"method::Symbol=:cm: Method to use to solve the problem, one of :ml, :em, :bayes.\nmaxoutdim=0: Controls the the dimension (number of columns) of the output, outdim. Specifically, outdim = min(n, indim, maxoutdim), where n is the number of observations and indim the input dimension.\nmaxiter::Int=1000: Maximum number of iterations.\ntol::Real=1e-6: Convergence tolerance.\neta::Real=tol: Variance lower bound.\nmean::Union{Nothing, Real, Vector{Float64}}=nothing: If nothing, centering will be computed and applied; if set to 0 no centering is applied (data is assumed pre-centered); if a vector, the centering is done with that vector.","category":"page"},{"location":"models/FactorAnalysis_MultivariateStats/#Operations","page":"FactorAnalysis","title":"Operations","text":"","category":"section"},{"location":"models/FactorAnalysis_MultivariateStats/","page":"FactorAnalysis","title":"FactorAnalysis","text":"transform(mach, Xnew): Return a lower dimensional projection of the input Xnew, which should have the same scitype as X above.\ninverse_transform(mach, Xsmall): For a dimension-reduced table Xsmall, such as returned by transform, reconstruct a table, having same the number of columns as the original training data X, that transforms to Xsmall. Mathematically, inverse_transform is a right-inverse for the PCA projection map, whose image is orthogonal to the kernel of that map. In particular, if Xsmall = transform(mach, Xnew), then inverse_transform(Xsmall) is only an approximation to Xnew.","category":"page"},{"location":"models/FactorAnalysis_MultivariateStats/#Fitted-parameters","page":"FactorAnalysis","title":"Fitted parameters","text":"","category":"section"},{"location":"models/FactorAnalysis_MultivariateStats/","page":"FactorAnalysis","title":"FactorAnalysis","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/FactorAnalysis_MultivariateStats/","page":"FactorAnalysis","title":"FactorAnalysis","text":"projection: Returns the projection matrix, which has size (indim, outdim), where indim and outdim are the number of features of the input and ouput respectively. Each column of the projection matrix corresponds to a factor.","category":"page"},{"location":"models/FactorAnalysis_MultivariateStats/#Report","page":"FactorAnalysis","title":"Report","text":"","category":"section"},{"location":"models/FactorAnalysis_MultivariateStats/","page":"FactorAnalysis","title":"FactorAnalysis","text":"The fields of report(mach) are:","category":"page"},{"location":"models/FactorAnalysis_MultivariateStats/","page":"FactorAnalysis","title":"FactorAnalysis","text":"indim: Dimension (number of columns) of the training data and new data to be transformed.\noutdim: Dimension of transformed data (number of factors).\nvariance: The variance of the factors.\ncovariance_matrix: The estimated covariance matrix.\nmean: The mean of the untransformed training data, of length indim.\nloadings: The factor loadings. A matrix of size (indim, outdim) where indim and outdim are as defined above.","category":"page"},{"location":"models/FactorAnalysis_MultivariateStats/#Examples","page":"FactorAnalysis","title":"Examples","text":"","category":"section"},{"location":"models/FactorAnalysis_MultivariateStats/","page":"FactorAnalysis","title":"FactorAnalysis","text":"using MLJ\n\nFactorAnalysis = @load FactorAnalysis pkg=MultivariateStats\n\nX, y = @load_iris ## a table and a vector\n\nmodel = FactorAnalysis(maxoutdim=2)\nmach = machine(model, X) |> fit!\n\nXproj = transform(mach, X)","category":"page"},{"location":"models/FactorAnalysis_MultivariateStats/","page":"FactorAnalysis","title":"FactorAnalysis","text":"See also KernelPCA, ICA, PPCA, PCA","category":"page"},{"location":"models/SRRegressor_SymbolicRegression/#SRRegressor_SymbolicRegression","page":"SRRegressor","title":"SRRegressor","text":"","category":"section"},{"location":"models/SRRegressor_SymbolicRegression/","page":"SRRegressor","title":"SRRegressor","text":"SRRegressor","category":"page"},{"location":"models/SRRegressor_SymbolicRegression/","page":"SRRegressor","title":"SRRegressor","text":"A model type for constructing a Symbolic Regression via Evolutionary Search, based on SymbolicRegression.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/SRRegressor_SymbolicRegression/","page":"SRRegressor","title":"SRRegressor","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/SRRegressor_SymbolicRegression/","page":"SRRegressor","title":"SRRegressor","text":"SRRegressor = @load SRRegressor pkg=SymbolicRegression","category":"page"},{"location":"models/SRRegressor_SymbolicRegression/","page":"SRRegressor","title":"SRRegressor","text":"Do model = SRRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in SRRegressor(binary_operators=...).","category":"page"},{"location":"models/SRRegressor_SymbolicRegression/","page":"SRRegressor","title":"SRRegressor","text":"Single-target Symbolic Regression regressor (SRRegressor) searches for symbolic expressions that predict a single target variable from a set of input variables. All data is assumed to be Continuous. The search is performed using an evolutionary algorithm. This algorithm is described in the paper https://arxiv.org/abs/2305.01582.","category":"page"},{"location":"models/SRRegressor_SymbolicRegression/#Training-data","page":"SRRegressor","title":"Training data","text":"","category":"section"},{"location":"models/SRRegressor_SymbolicRegression/","page":"SRRegressor","title":"SRRegressor","text":"In MLJ or MLJBase, bind an instance model to data with","category":"page"},{"location":"models/SRRegressor_SymbolicRegression/","page":"SRRegressor","title":"SRRegressor","text":"mach = machine(model, X, y)","category":"page"},{"location":"models/SRRegressor_SymbolicRegression/","page":"SRRegressor","title":"SRRegressor","text":"OR","category":"page"},{"location":"models/SRRegressor_SymbolicRegression/","page":"SRRegressor","title":"SRRegressor","text":"mach = machine(model, X, y, w)","category":"page"},{"location":"models/SRRegressor_SymbolicRegression/","page":"SRRegressor","title":"SRRegressor","text":"Here:","category":"page"},{"location":"models/SRRegressor_SymbolicRegression/","page":"SRRegressor","title":"SRRegressor","text":"X is any table of input features (eg, a DataFrame) whose columns are of scitype Continuous; check column scitypes with schema(X). Variable names in discovered expressions will be taken from the column names of X, if available. Units in columns of X (use DynamicQuantities for units) will trigger dimensional analysis to be used.\ny is the target, which can be any AbstractVector whose element scitype is Continuous; check the scitype with scitype(y). Units in y (use DynamicQuantities for units) will trigger dimensional analysis to be used.\nw is the observation weights which can either be nothing (default) or an AbstractVector whoose element scitype is Count or Continuous.","category":"page"},{"location":"models/SRRegressor_SymbolicRegression/","page":"SRRegressor","title":"SRRegressor","text":"Train the machine using fit!(mach), inspect the discovered expressions with report(mach), and predict on new data with predict(mach, Xnew). Note that unlike other regressors, symbolic regression stores a list of trained models. The model chosen from this list is defined by the function selection_method keyword argument, which by default balances accuracy and complexity.","category":"page"},{"location":"models/SRRegressor_SymbolicRegression/#Hyper-parameters","page":"SRRegressor","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/SRRegressor_SymbolicRegression/","page":"SRRegressor","title":"SRRegressor","text":"binary_operators: Vector of binary operators (functions) to use. Each operator should be defined for two input scalars, and one output scalar. All operators need to be defined over the entire real line (excluding infinity - these are stopped before they are input), or return NaN where not defined. For speed, define it so it takes two reals of the same type as input, and outputs the same type. For the SymbolicUtils simplification backend, you will need to define a generic method of the operator so it takes arbitrary types.\nunary_operators: Same, but for unary operators (one input scalar, gives an output scalar).\nconstraints: Array of pairs specifying size constraints for each operator. The constraints for a binary operator should be a 2-tuple (e.g., (-1, -1)) and the constraints for a unary operator should be an Int. A size constraint is a limit to the size of the subtree in each argument of an operator. e.g., [(^)=>(-1, 3)] means that the ^ operator can have arbitrary size (-1) in its left argument, but a maximum size of 3 in its right argument. Default is no constraints.\nbatching: Whether to evolve based on small mini-batches of data, rather than the entire dataset.\nbatch_size: What batch size to use if using batching.\nelementwise_loss: What elementwise loss function to use. Can be one of the following losses, or any other loss of type SupervisedLoss. You can also pass a function that takes a scalar target (left argument), and scalar predicted (right argument), and returns a scalar. This will be averaged over the predicted data. If weights are supplied, your function should take a third argument for the weight scalar. Included losses: Regression: - LPDistLoss{P}(), - L1DistLoss(), - L2DistLoss() (mean square), - LogitDistLoss(), - HuberLoss(d), - L1EpsilonInsLoss(ϵ), - L2EpsilonInsLoss(ϵ), - PeriodicLoss(c), - QuantileLoss(τ), Classification: - ZeroOneLoss(), - PerceptronLoss(), - L1HingeLoss(), - SmoothedL1HingeLoss(γ), - ModifiedHuberLoss(), - L2MarginLoss(), - ExpLoss(), - SigmoidLoss(), - DWDMarginLoss(q).\nloss_function: Alternatively, you may redefine the loss used as any function of tree::Node{T}, dataset::Dataset{T}, and options::Options, so long as you output a non-negative scalar of type T. This is useful if you want to use a loss that takes into account derivatives, or correlations across the dataset. This also means you could use a custom evaluation for a particular expression. If you are using batching=true, then your function should accept a fourth argument idx, which is either nothing (indicating that the full dataset should be used), or a vector of indices to use for the batch. For example,\n function my_loss(tree, dataset::Dataset{T,L}, options)::L where {T,L}\n prediction, flag = eval_tree_array(tree, dataset.X, options)\n if !flag\n return L(Inf)\n end\n return sum((prediction .- dataset.y) .^ 2) / dataset.n\n end\npopulations: How many populations of equations to use.\npopulation_size: How many equations in each population.\nncycles_per_iteration: How many generations to consider per iteration.\ntournament_selection_n: Number of expressions considered in each tournament.\ntournament_selection_p: The fittest expression in a tournament is to be selected with probability p, the next fittest with probability p*(1-p), and so forth.\ntopn: Number of equations to return to the host process, and to consider for the hall of fame.\ncomplexity_of_operators: What complexity should be assigned to each operator, and the occurrence of a constant or variable. By default, this is 1 for all operators. Can be a real number as well, in which case the complexity of an expression will be rounded to the nearest integer. Input this in the form of, e.g., [(^) => 3, sin => 2].\ncomplexity_of_constants: What complexity should be assigned to use of a constant. By default, this is 1.\ncomplexity_of_variables: What complexity should be assigned to each variable. By default, this is 1.\nalpha: The probability of accepting an equation mutation during regularized evolution is given by exp(-delta_loss/(alpha * T)), where T goes from 1 to 0. Thus, alpha=infinite is the same as no annealing.\nmaxsize: Maximum size of equations during the search.\nmaxdepth: Maximum depth of equations during the search, by default this is set equal to the maxsize.\nparsimony: A multiplicative factor for how much complexity is punished.\ndimensional_constraint_penalty: An additive factor if the dimensional constraint is violated.\nuse_frequency: Whether to use a parsimony that adapts to the relative proportion of equations at each complexity; this will ensure that there are a balanced number of equations considered for every complexity.\nuse_frequency_in_tournament: Whether to use the adaptive parsimony described above inside the score, rather than just at the mutation accept/reject stage.\nadaptive_parsimony_scaling: How much to scale the adaptive parsimony term in the loss. Increase this if the search is spending too much time optimizing the most complex equations.\nturbo: Whether to use LoopVectorization.@turbo to evaluate expressions. This can be significantly faster, but is only compatible with certain operators. Experimental!\nmigration: Whether to migrate equations between processes.\nhof_migration: Whether to migrate equations from the hall of fame to processes.\nfraction_replaced: What fraction of each population to replace with migrated equations at the end of each cycle.\nfraction_replaced_hof: What fraction to replace with hall of fame equations at the end of each cycle.\nshould_simplify: Whether to simplify equations. If you pass a custom objective, this will be set to false.\nshould_optimize_constants: Whether to use an optimization algorithm to periodically optimize constants in equations.\noptimizer_nrestarts: How many different random starting positions to consider for optimization of constants.\noptimizer_algorithm: Select algorithm to use for optimizing constants. Default is \"BFGS\", but \"NelderMead\" is also supported.\noptimizer_options: General options for the constant optimization. For details we refer to the documentation on Optim.Options from the Optim.jl package. Options can be provided here as NamedTuple, e.g. (iterations=16,), as a Dict, e.g. Dict(:x_tol => 1.0e-32,), or as an Optim.Options instance.\noutput_file: What file to store equations to, as a backup.\nperturbation_factor: When mutating a constant, either multiply or divide by (1+perturbation_factor)^(rand()+1).\nprobability_negate_constant: Probability of negating a constant in the equation when mutating it.\nmutation_weights: Relative probabilities of the mutations. The struct MutationWeights should be passed to these options. See its documentation on MutationWeights for the different weights.\ncrossover_probability: Probability of performing crossover.\nannealing: Whether to use simulated annealing.\nwarmup_maxsize_by: Whether to slowly increase the max size from 5 up to maxsize. If nonzero, specifies the fraction through the search at which the maxsize should be reached.\nverbosity: Whether to print debugging statements or not.\nprint_precision: How many digits to print when printing equations. By default, this is 5.\nsave_to_file: Whether to save equations to a file during the search.\nbin_constraints: See constraints. This is the same, but specified for binary operators only (for example, if you have an operator that is both a binary and unary operator).\nuna_constraints: Likewise, for unary operators.\nseed: What random seed to use. nothing uses no seed.\nprogress: Whether to use a progress bar output (verbosity will have no effect).\nearly_stop_condition: Float - whether to stop early if the mean loss gets below this value. Function - a function taking (loss, complexity) as arguments and returning true or false.\ntimeout_in_seconds: Float64 - the time in seconds after which to exit (as an alternative to the number of iterations).\nmax_evals: Int (or Nothing) - the maximum number of evaluations of expressions to perform.\nskip_mutation_failures: Whether to simply skip over mutations that fail or are rejected, rather than to replace the mutated expression with the original expression and proceed normally.\nenable_autodiff: Whether to enable automatic differentiation functionality. This is turned off by default. If turned on, this will be turned off if one of the operators does not have well-defined gradients.\nnested_constraints: Specifies how many times a combination of operators can be nested. For example, [sin => [cos => 0], cos => [cos => 2]] specifies that cos may never appear within a sin, but sin can be nested with itself an unlimited number of times. The second term specifies that cos can be nested up to 2 times within a cos, so that cos(cos(cos(x))) is allowed (as well as any combination of + or - within it), but cos(cos(cos(cos(x)))) is not allowed. When an operator is not specified, it is assumed that it can be nested an unlimited number of times. This requires that there is no operator which is used both in the unary operators and the binary operators (e.g., - could be both subtract, and negation). For binary operators, both arguments are treated the same way, and the max of each argument is constrained.\ndeterministic: Use a global counter for the birth time, rather than calls to time(). This gives perfect resolution, and is therefore deterministic. However, it is not thread safe, and must be used in serial mode.\ndefine_helper_functions: Whether to define helper functions for constructing and evaluating trees.\nniterations::Int=10: The number of iterations to perform the search. More iterations will improve the results.\nparallelism=:multithreading: What parallelism mode to use. The options are :multithreading, :multiprocessing, and :serial. By default, multithreading will be used. Multithreading uses less memory, but multiprocessing can handle multi-node compute. If using :multithreading mode, the number of threads available to julia are used. If using :multiprocessing, numprocs processes will be created dynamically if procs is unset. If you have already allocated processes, pass them to the procs argument and they will be used. You may also pass a string instead of a symbol, like \"multithreading\".\nnumprocs::Union{Int, Nothing}=nothing: The number of processes to use, if you want equation_search to set this up automatically. By default this will be 4, but can be any number (you should pick a number <= the number of cores available).\nprocs::Union{Vector{Int}, Nothing}=nothing: If you have set up a distributed run manually with procs = addprocs() and @everywhere, pass the procs to this keyword argument.\naddprocs_function::Union{Function, Nothing}=nothing: If using multiprocessing (parallelism=:multithreading), and are not passing procs manually, then they will be allocated dynamically using addprocs. However, you may also pass a custom function to use instead of addprocs. This function should take a single positional argument, which is the number of processes to use, as well as the lazy keyword argument. For example, if set up on a slurm cluster, you could pass addprocs_function = addprocs_slurm, which will set up slurm processes.\nheap_size_hint_in_bytes::Union{Int,Nothing}=nothing: On Julia 1.9+, you may set the --heap-size-hint flag on Julia processes, recommending garbage collection once a process is close to the recommended size. This is important for long-running distributed jobs where each process has an independent memory, and can help avoid out-of-memory errors. By default, this is set to Sys.free_memory() / numprocs.\nruntests::Bool=true: Whether to run (quick) tests before starting the search, to see if there will be any problems during the equation search related to the host environment.\nloss_type::Type=Nothing: If you would like to use a different type for the loss than for the data you passed, specify the type here. Note that if you pass complex data ::Complex{L}, then the loss type will automatically be set to L.\nselection_method::Function: Function to selection expression from the Pareto frontier for use in predict. See SymbolicRegression.MLJInterfaceModule.choose_best for an example. This function should return a single integer specifying the index of the expression to use. By default, choose_best maximizes the score (a pound-for-pound rating) of expressions reaching the threshold of 1.5x the minimum loss. To fix the index at 5, you could just write Returns(5).\ndimensions_type::AbstractDimensions: The type of dimensions to use when storing the units of the data. By default this is DynamicQuantities.SymbolicDimensions.","category":"page"},{"location":"models/SRRegressor_SymbolicRegression/#Operations","page":"SRRegressor","title":"Operations","text":"","category":"section"},{"location":"models/SRRegressor_SymbolicRegression/","page":"SRRegressor","title":"SRRegressor","text":"predict(mach, Xnew): Return predictions of the target given features Xnew, which should have same scitype as X above. The expression used for prediction is defined by the selection_method function, which can be seen by viewing report(mach).best_idx.","category":"page"},{"location":"models/SRRegressor_SymbolicRegression/#Fitted-parameters","page":"SRRegressor","title":"Fitted parameters","text":"","category":"section"},{"location":"models/SRRegressor_SymbolicRegression/","page":"SRRegressor","title":"SRRegressor","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/SRRegressor_SymbolicRegression/","page":"SRRegressor","title":"SRRegressor","text":"best_idx::Int: The index of the best expression in the Pareto frontier, as determined by the selection_method function.\nequations::Vector{Node{T}}: The expressions discovered by the search, represented in a dominating Pareto frontier (i.e., the best expressions found for each complexity). T is equal to the element type of the passed data.\nequation_strings::Vector{String}: The expressions discovered by the search, represented as strings for easy inspection.","category":"page"},{"location":"models/SRRegressor_SymbolicRegression/#Report","page":"SRRegressor","title":"Report","text":"","category":"section"},{"location":"models/SRRegressor_SymbolicRegression/","page":"SRRegressor","title":"SRRegressor","text":"The fields of report(mach) are:","category":"page"},{"location":"models/SRRegressor_SymbolicRegression/","page":"SRRegressor","title":"SRRegressor","text":"best_idx::Int: The index of the best expression in the Pareto frontier, as determined by the selection_method function.\nequations::Vector{Node{T}}: The expressions discovered by the search, represented in a dominating Pareto frontier (i.e., the best expressions found for each complexity).\nequation_strings::Vector{String}: The expressions discovered by the search, represented as strings for easy inspection.\ncomplexities::Vector{Int}: The complexity of each expression in the Pareto frontier.\nlosses::Vector{L}: The loss of each expression in the Pareto frontier, according to the loss function specified in the model. The type L is the loss type, which is usually the same as the element type of data passed (i.e., T), but can differ if complex data types are passed.\nscores::Vector{L}: A metric which considers both the complexity and loss of an expression, equal to the change in the log-loss divided by the change in complexity, relative to the previous expression along the Pareto frontier. A larger score aims to indicate an expression is more likely to be the true expression generating the data, but this is very problem-dependent and generally several other factors should be considered.","category":"page"},{"location":"models/SRRegressor_SymbolicRegression/#Examples","page":"SRRegressor","title":"Examples","text":"","category":"section"},{"location":"models/SRRegressor_SymbolicRegression/","page":"SRRegressor","title":"SRRegressor","text":"using MLJ\nSRRegressor = @load SRRegressor pkg=SymbolicRegression\nX, y = @load_boston\nmodel = SRRegressor(binary_operators=[+, -, *], unary_operators=[exp], niterations=100)\nmach = machine(model, X, y)\nfit!(mach)\ny_hat = predict(mach, X)\n## View the equation used:\nr = report(mach)\nprintln(\"Equation used:\", r.equation_strings[r.best_idx])","category":"page"},{"location":"models/SRRegressor_SymbolicRegression/","page":"SRRegressor","title":"SRRegressor","text":"With units and variable names:","category":"page"},{"location":"models/SRRegressor_SymbolicRegression/","page":"SRRegressor","title":"SRRegressor","text":"using MLJ\nusing DynamicQuantities\nSRegressor = @load SRRegressor pkg=SymbolicRegression\n\nX = (; x1=rand(32) .* us\"km/h\", x2=rand(32) .* us\"km\")\ny = @. X.x2 / X.x1 + 0.5us\"h\"\nmodel = SRRegressor(binary_operators=[+, -, *, /])\nmach = machine(model, X, y)\nfit!(mach)\ny_hat = predict(mach, X)\n## View the equation used:\nr = report(mach)\nprintln(\"Equation used:\", r.equation_strings[r.best_idx])","category":"page"},{"location":"models/SRRegressor_SymbolicRegression/","page":"SRRegressor","title":"SRRegressor","text":"See also MultitargetSRRegressor.","category":"page"},{"location":"models/EvoTreeGaussian_EvoTrees/#EvoTreeGaussian_EvoTrees","page":"EvoTreeGaussian","title":"EvoTreeGaussian","text":"","category":"section"},{"location":"models/EvoTreeGaussian_EvoTrees/","page":"EvoTreeGaussian","title":"EvoTreeGaussian","text":"EvoTreeGaussian(;kwargs...)","category":"page"},{"location":"models/EvoTreeGaussian_EvoTrees/","page":"EvoTreeGaussian","title":"EvoTreeGaussian","text":"A model type for constructing a EvoTreeGaussian, based on EvoTrees.jl, and implementing both an internal API the MLJ model interface. EvoTreeGaussian is used to perform Gaussian probabilistic regression, fitting μ and σ parameters to maximize likelihood.","category":"page"},{"location":"models/EvoTreeGaussian_EvoTrees/#Hyper-parameters","page":"EvoTreeGaussian","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/EvoTreeGaussian_EvoTrees/","page":"EvoTreeGaussian","title":"EvoTreeGaussian","text":"nrounds=100: Number of rounds. It corresponds to the number of trees that will be sequentially stacked. Must be >= 1.\neta=0.1: Learning rate. Each tree raw predictions are scaled by eta prior to be added to the stack of predictions. Must be > 0. A lower eta results in slower learning, requiring a higher nrounds but typically improves model performance.\nL2::T=0.0: L2 regularization factor on aggregate gain. Must be >= 0. Higher L2 can result in a more robust model.\nlambda::T=0.0: L2 regularization factor on individual gain. Must be >= 0. Higher lambda can result in a more robust model.\ngamma::T=0.0: Minimum gain imprvement needed to perform a node split. Higher gamma can result in a more robust model. Must be >= 0.\nmax_depth=6: Maximum depth of a tree. Must be >= 1. A tree of depth 1 is made of a single prediction leaf. A complete tree of depth N contains 2^(N - 1) terminal leaves and 2^(N - 1) - 1 split nodes. Compute cost is proportional to 2^max_depth. Typical optimal values are in the 3 to 9 range.\nmin_weight=8.0: Minimum weight needed in a node to perform a split. Matches the number of observations by default or the sum of weights as provided by the weights vector. Must be > 0.\nrowsample=1.0: Proportion of rows that are sampled at each iteration to build the tree. Should be in ]0, 1].\ncolsample=1.0: Proportion of columns / features that are sampled at each iteration to build the tree. Should be in ]0, 1].\nnbins=64: Number of bins into which each feature is quantized. Buckets are defined based on quantiles, hence resulting in equal weight bins. Should be between 2 and 255.\nmonotone_constraints=Dict{Int, Int}(): Specify monotonic constraints using a dict where the key is the feature index and the value the applicable constraint (-1=decreasing, 0=none, 1=increasing). !Experimental feature: note that for Gaussian regression, constraints may not be enforce systematically.\ntree_type=\"binary\" Tree structure to be used. One of:\nbinary: Each node of a tree is grown independently. Tree are built depthwise until max depth is reach or if min weight or gain (see gamma) stops further node splits.\noblivious: A common splitting condition is imposed to all nodes of a given depth.\nrng=123: Either an integer used as a seed to the random number generator or an actual random number generator (::Random.AbstractRNG).","category":"page"},{"location":"models/EvoTreeGaussian_EvoTrees/#Internal-API","page":"EvoTreeGaussian","title":"Internal API","text":"","category":"section"},{"location":"models/EvoTreeGaussian_EvoTrees/","page":"EvoTreeGaussian","title":"EvoTreeGaussian","text":"Do config = EvoTreeGaussian() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in EvoTreeGaussian(max_depth=...).","category":"page"},{"location":"models/EvoTreeGaussian_EvoTrees/#Training-model","page":"EvoTreeGaussian","title":"Training model","text":"","category":"section"},{"location":"models/EvoTreeGaussian_EvoTrees/","page":"EvoTreeGaussian","title":"EvoTreeGaussian","text":"A model is built using fit_evotree:","category":"page"},{"location":"models/EvoTreeGaussian_EvoTrees/","page":"EvoTreeGaussian","title":"EvoTreeGaussian","text":"model = fit_evotree(config; x_train, y_train, kwargs...)","category":"page"},{"location":"models/EvoTreeGaussian_EvoTrees/#Inference","page":"EvoTreeGaussian","title":"Inference","text":"","category":"section"},{"location":"models/EvoTreeGaussian_EvoTrees/","page":"EvoTreeGaussian","title":"EvoTreeGaussian","text":"Predictions are obtained using predict which returns a Matrix of size [nobs, 2] where the second dimensions refer to μ and σ respectively:","category":"page"},{"location":"models/EvoTreeGaussian_EvoTrees/","page":"EvoTreeGaussian","title":"EvoTreeGaussian","text":"EvoTrees.predict(model, X)","category":"page"},{"location":"models/EvoTreeGaussian_EvoTrees/","page":"EvoTreeGaussian","title":"EvoTreeGaussian","text":"Alternatively, models act as a functor, returning predictions when called as a function with features as argument:","category":"page"},{"location":"models/EvoTreeGaussian_EvoTrees/","page":"EvoTreeGaussian","title":"EvoTreeGaussian","text":"model(X)","category":"page"},{"location":"models/EvoTreeGaussian_EvoTrees/#MLJ","page":"EvoTreeGaussian","title":"MLJ","text":"","category":"section"},{"location":"models/EvoTreeGaussian_EvoTrees/","page":"EvoTreeGaussian","title":"EvoTreeGaussian","text":"From MLJ, the type can be imported using:","category":"page"},{"location":"models/EvoTreeGaussian_EvoTrees/","page":"EvoTreeGaussian","title":"EvoTreeGaussian","text":"EvoTreeGaussian = @load EvoTreeGaussian pkg=EvoTrees","category":"page"},{"location":"models/EvoTreeGaussian_EvoTrees/","page":"EvoTreeGaussian","title":"EvoTreeGaussian","text":"Do model = EvoTreeGaussian() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in EvoTreeGaussian(loss=...).","category":"page"},{"location":"models/EvoTreeGaussian_EvoTrees/#Training-data","page":"EvoTreeGaussian","title":"Training data","text":"","category":"section"},{"location":"models/EvoTreeGaussian_EvoTrees/","page":"EvoTreeGaussian","title":"EvoTreeGaussian","text":"In MLJ or MLJBase, bind an instance model to data with","category":"page"},{"location":"models/EvoTreeGaussian_EvoTrees/","page":"EvoTreeGaussian","title":"EvoTreeGaussian","text":"mach = machine(model, X, y)","category":"page"},{"location":"models/EvoTreeGaussian_EvoTrees/","page":"EvoTreeGaussian","title":"EvoTreeGaussian","text":"where","category":"page"},{"location":"models/EvoTreeGaussian_EvoTrees/","page":"EvoTreeGaussian","title":"EvoTreeGaussian","text":"X: any table of input features (eg, a DataFrame) whose columns each have one of the following element scitypes: Continuous, Count, or <:OrderedFactor; check column scitypes with schema(X)\ny: is the target, which can be any AbstractVector whose element scitype is <:Continuous; check the scitype with scitype(y)","category":"page"},{"location":"models/EvoTreeGaussian_EvoTrees/","page":"EvoTreeGaussian","title":"EvoTreeGaussian","text":"Train the machine using fit!(mach, rows=...).","category":"page"},{"location":"models/EvoTreeGaussian_EvoTrees/#Operations","page":"EvoTreeGaussian","title":"Operations","text":"","category":"section"},{"location":"models/EvoTreeGaussian_EvoTrees/","page":"EvoTreeGaussian","title":"EvoTreeGaussian","text":"predict(mach, Xnew): returns a vector of Gaussian distributions given features Xnew having the same scitype as X above.","category":"page"},{"location":"models/EvoTreeGaussian_EvoTrees/","page":"EvoTreeGaussian","title":"EvoTreeGaussian","text":"Predictions are probabilistic.","category":"page"},{"location":"models/EvoTreeGaussian_EvoTrees/","page":"EvoTreeGaussian","title":"EvoTreeGaussian","text":"Specific metrics can also be predicted using:","category":"page"},{"location":"models/EvoTreeGaussian_EvoTrees/","page":"EvoTreeGaussian","title":"EvoTreeGaussian","text":"predict_mean(mach, Xnew)\npredict_mode(mach, Xnew)\npredict_median(mach, Xnew)","category":"page"},{"location":"models/EvoTreeGaussian_EvoTrees/#Fitted-parameters","page":"EvoTreeGaussian","title":"Fitted parameters","text":"","category":"section"},{"location":"models/EvoTreeGaussian_EvoTrees/","page":"EvoTreeGaussian","title":"EvoTreeGaussian","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/EvoTreeGaussian_EvoTrees/","page":"EvoTreeGaussian","title":"EvoTreeGaussian","text":":fitresult: The GBTree object returned by EvoTrees.jl fitting algorithm.","category":"page"},{"location":"models/EvoTreeGaussian_EvoTrees/#Report","page":"EvoTreeGaussian","title":"Report","text":"","category":"section"},{"location":"models/EvoTreeGaussian_EvoTrees/","page":"EvoTreeGaussian","title":"EvoTreeGaussian","text":"The fields of report(mach) are:","category":"page"},{"location":"models/EvoTreeGaussian_EvoTrees/","page":"EvoTreeGaussian","title":"EvoTreeGaussian","text":":features: The names of the features encountered in training.","category":"page"},{"location":"models/EvoTreeGaussian_EvoTrees/#Examples","page":"EvoTreeGaussian","title":"Examples","text":"","category":"section"},{"location":"models/EvoTreeGaussian_EvoTrees/","page":"EvoTreeGaussian","title":"EvoTreeGaussian","text":"## Internal API\nusing EvoTrees\nparams = EvoTreeGaussian(max_depth=5, nbins=32, nrounds=100)\nnobs, nfeats = 1_000, 5\nx_train, y_train = randn(nobs, nfeats), rand(nobs)\nmodel = fit_evotree(params; x_train, y_train)\npreds = EvoTrees.predict(model, x_train)","category":"page"},{"location":"models/EvoTreeGaussian_EvoTrees/","page":"EvoTreeGaussian","title":"EvoTreeGaussian","text":"## MLJ Interface\nusing MLJ\nEvoTreeGaussian = @load EvoTreeGaussian pkg=EvoTrees\nmodel = EvoTreeGaussian(max_depth=5, nbins=32, nrounds=100)\nX, y = @load_boston\nmach = machine(model, X, y) |> fit!\npreds = predict(mach, X)\npreds = predict_mean(mach, X)\npreds = predict_mode(mach, X)\npreds = predict_median(mach, X)","category":"page"},{"location":"models/GaussianMixtureImputer_BetaML/#GaussianMixtureImputer_BetaML","page":"GaussianMixtureImputer","title":"GaussianMixtureImputer","text":"","category":"section"},{"location":"models/GaussianMixtureImputer_BetaML/","page":"GaussianMixtureImputer","title":"GaussianMixtureImputer","text":"mutable struct GaussianMixtureImputer <: MLJModelInterface.Unsupervised","category":"page"},{"location":"models/GaussianMixtureImputer_BetaML/","page":"GaussianMixtureImputer","title":"GaussianMixtureImputer","text":"Impute missing values using a probabilistic approach (Gaussian Mixture Models) fitted using the Expectation-Maximisation algorithm, from the Beta Machine Learning Toolkit (BetaML).","category":"page"},{"location":"models/GaussianMixtureImputer_BetaML/#Hyperparameters:","page":"GaussianMixtureImputer","title":"Hyperparameters:","text":"","category":"section"},{"location":"models/GaussianMixtureImputer_BetaML/","page":"GaussianMixtureImputer","title":"GaussianMixtureImputer","text":"n_classes::Int64: Number of mixtures (latent classes) to consider [def: 3]\ninitial_probmixtures::Vector{Float64}: Initial probabilities of the categorical distribution (n_classes x 1) [default: []]\nmixtures::Union{Type, Vector{<:BetaML.GMM.AbstractMixture}}: An array (of length n_classes) of the mixtures to employ (see the [?GMM](@ref GMM) module in BetaML). Each mixture object can be provided with or without its parameters (e.g. mean and variance for the gaussian ones). Fully qualified mixtures are useful only if theinitialisationstrategyparameter is set to \"gived\" This parameter can also be given symply in term of a _type. In this case it is automatically extended to a vector of n_classesmixtures of the specified type. Note that mixing of different mixture types is not currently supported and that currently implemented mixtures areSphericalGaussian,DiagonalGaussianandFullGaussian. [def:DiagonalGaussian`]\ntol::Float64: Tolerance to stop the algorithm [default: 10^(-6)]\nminimum_variance::Float64: Minimum variance for the mixtures [default: 0.05]\nminimum_covariance::Float64: Minimum covariance for the mixtures with full covariance matrix [default: 0]. This should be set different than minimum_variance.\ninitialisation_strategy::String: The computation method of the vector of the initial mixtures. One of the following:\n\"grid\": using a grid approach\n\"given\": using the mixture provided in the fully qualified mixtures parameter\n\"kmeans\": use first kmeans (itself initialised with a \"grid\" strategy) to set the initial mixture centers [default]\nNote that currently \"random\" and \"shuffle\" initialisations are not supported in gmm-based algorithms.\nrng::Random.AbstractRNG: A Random Number Generator to be used in stochastic parts of the code [deafult: Random.GLOBAL_RNG]","category":"page"},{"location":"models/GaussianMixtureImputer_BetaML/#Example-:","page":"GaussianMixtureImputer","title":"Example :","text":"","category":"section"},{"location":"models/GaussianMixtureImputer_BetaML/","page":"GaussianMixtureImputer","title":"GaussianMixtureImputer","text":"julia> using MLJ\n\njulia> X = [1 10.5;1.5 missing; 1.8 8; 1.7 15; 3.2 40; missing missing; 3.3 38; missing -2.3; 5.2 -2.4] |> table ;\n\njulia> modelType = @load GaussianMixtureImputer pkg = \"BetaML\" verbosity=0\nBetaML.Imputation.GaussianMixtureImputer\n\njulia> model = modelType(initialisation_strategy=\"grid\")\nGaussianMixtureImputer(\n n_classes = 3, \n initial_probmixtures = Float64[], \n mixtures = BetaML.GMM.DiagonalGaussian{Float64}[BetaML.GMM.DiagonalGaussian{Float64}(nothing, nothing), BetaML.GMM.DiagonalGaussian{Float64}(nothing, nothing), BetaML.GMM.DiagonalGaussian{Float64}(nothing, nothing)], \n tol = 1.0e-6, \n minimum_variance = 0.05, \n minimum_covariance = 0.0, \n initialisation_strategy = \"grid\", \n rng = Random._GLOBAL_RNG())\n\njulia> mach = machine(model, X);\n\njulia> fit!(mach);\n[ Info: Training machine(GaussianMixtureImputer(n_classes = 3, …), …).\nIter. 1: Var. of the post 2.0225921341714286 Log-likelihood -42.96100103213314\n\njulia> X_full = transform(mach) |> MLJ.matrix\n9×2 Matrix{Float64}:\n 1.0 10.5\n 1.5 14.7366\n 1.8 8.0\n 1.7 15.0\n 3.2 40.0\n 2.51842 15.1747\n 3.3 38.0\n 2.47412 -2.3\n 5.2 -2.4","category":"page"},{"location":"models/HuberRegressor_MLJLinearModels/#HuberRegressor_MLJLinearModels","page":"HuberRegressor","title":"HuberRegressor","text":"","category":"section"},{"location":"models/HuberRegressor_MLJLinearModels/","page":"HuberRegressor","title":"HuberRegressor","text":"HuberRegressor","category":"page"},{"location":"models/HuberRegressor_MLJLinearModels/","page":"HuberRegressor","title":"HuberRegressor","text":"A model type for constructing a huber regressor, based on MLJLinearModels.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/HuberRegressor_MLJLinearModels/","page":"HuberRegressor","title":"HuberRegressor","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/HuberRegressor_MLJLinearModels/","page":"HuberRegressor","title":"HuberRegressor","text":"HuberRegressor = @load HuberRegressor pkg=MLJLinearModels","category":"page"},{"location":"models/HuberRegressor_MLJLinearModels/","page":"HuberRegressor","title":"HuberRegressor","text":"Do model = HuberRegressor() to construct an instance with default hyper-parameters.","category":"page"},{"location":"models/HuberRegressor_MLJLinearModels/","page":"HuberRegressor","title":"HuberRegressor","text":"This model coincides with RobustRegressor, with the exception that the robust loss, rho, is fixed to HuberRho(delta), where delta is a new hyperparameter.","category":"page"},{"location":"models/HuberRegressor_MLJLinearModels/","page":"HuberRegressor","title":"HuberRegressor","text":"Different solver options exist, as indicated under \"Hyperparameters\" below. ","category":"page"},{"location":"models/HuberRegressor_MLJLinearModels/#Training-data","page":"HuberRegressor","title":"Training data","text":"","category":"section"},{"location":"models/HuberRegressor_MLJLinearModels/","page":"HuberRegressor","title":"HuberRegressor","text":"In MLJ or MLJBase, bind an instance model to data with","category":"page"},{"location":"models/HuberRegressor_MLJLinearModels/","page":"HuberRegressor","title":"HuberRegressor","text":"mach = machine(model, X, y)","category":"page"},{"location":"models/HuberRegressor_MLJLinearModels/","page":"HuberRegressor","title":"HuberRegressor","text":"where:","category":"page"},{"location":"models/HuberRegressor_MLJLinearModels/","page":"HuberRegressor","title":"HuberRegressor","text":"X is any table of input features (eg, a DataFrame) whose columns have Continuous scitype; check column scitypes with schema(X)\ny is the target, which can be any AbstractVector whose element scitype is Continuous; check the scitype with scitype(y)","category":"page"},{"location":"models/HuberRegressor_MLJLinearModels/","page":"HuberRegressor","title":"HuberRegressor","text":"Train the machine using fit!(mach, rows=...).","category":"page"},{"location":"models/HuberRegressor_MLJLinearModels/#Hyperparameters","page":"HuberRegressor","title":"Hyperparameters","text":"","category":"section"},{"location":"models/HuberRegressor_MLJLinearModels/","page":"HuberRegressor","title":"HuberRegressor","text":"delta::Real: parameterizes the HuberRho function (radius of the ball within which the loss is a quadratic loss) Default: 0.5\nlambda::Real: strength of the regularizer if penalty is :l2 or :l1. Strength of the L2 regularizer if penalty is :en. Default: 1.0\ngamma::Real: strength of the L1 regularizer if penalty is :en. Default: 0.0\npenalty::Union{String, Symbol}: the penalty to use, either :l2, :l1, :en (elastic net) or :none. Default: :l2\nfit_intercept::Bool: whether to fit the intercept or not. Default: true\npenalize_intercept::Bool: whether to penalize the intercept. Default: false\nscale_penalty_with_samples::Bool: whether to scale the penalty with the number of observations. Default: true\nsolver::Union{Nothing, MLJLinearModels.Solver}: some instance of MLJLinearModels.S where S is one of: LBFGS, IWLSCG, Newton, NewtonCG, if penalty = :l2, and ProxGrad otherwise.\nIf solver = nothing (default) then LBFGS() is used, if penalty = :l2, and otherwise ProxGrad(accel=true) (FISTA) is used.\nSolver aliases: FISTA(; kwargs...) = ProxGrad(accel=true, kwargs...), ISTA(; kwargs...) = ProxGrad(accel=false, kwargs...) Default: nothing","category":"page"},{"location":"models/HuberRegressor_MLJLinearModels/#Example","page":"HuberRegressor","title":"Example","text":"","category":"section"},{"location":"models/HuberRegressor_MLJLinearModels/","page":"HuberRegressor","title":"HuberRegressor","text":"using MLJ\nX, y = make_regression()\nmach = fit!(machine(HuberRegressor(), X, y))\npredict(mach, X)\nfitted_params(mach)","category":"page"},{"location":"models/HuberRegressor_MLJLinearModels/","page":"HuberRegressor","title":"HuberRegressor","text":"See also RobustRegressor, QuantileRegressor.","category":"page"},{"location":"models/UnivariateTimeTypeToContinuous_MLJModels/#UnivariateTimeTypeToContinuous_MLJModels","page":"UnivariateTimeTypeToContinuous","title":"UnivariateTimeTypeToContinuous","text":"","category":"section"},{"location":"models/UnivariateTimeTypeToContinuous_MLJModels/","page":"UnivariateTimeTypeToContinuous","title":"UnivariateTimeTypeToContinuous","text":"UnivariateTimeTypeToContinuous","category":"page"},{"location":"models/UnivariateTimeTypeToContinuous_MLJModels/","page":"UnivariateTimeTypeToContinuous","title":"UnivariateTimeTypeToContinuous","text":"A model type for constructing a single variable transformer that creates continuous representations of temporally typed data, based on MLJModels.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/UnivariateTimeTypeToContinuous_MLJModels/","page":"UnivariateTimeTypeToContinuous","title":"UnivariateTimeTypeToContinuous","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/UnivariateTimeTypeToContinuous_MLJModels/","page":"UnivariateTimeTypeToContinuous","title":"UnivariateTimeTypeToContinuous","text":"UnivariateTimeTypeToContinuous = @load UnivariateTimeTypeToContinuous pkg=MLJModels","category":"page"},{"location":"models/UnivariateTimeTypeToContinuous_MLJModels/","page":"UnivariateTimeTypeToContinuous","title":"UnivariateTimeTypeToContinuous","text":"Do model = UnivariateTimeTypeToContinuous() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in UnivariateTimeTypeToContinuous(zero_time=...).","category":"page"},{"location":"models/UnivariateTimeTypeToContinuous_MLJModels/","page":"UnivariateTimeTypeToContinuous","title":"UnivariateTimeTypeToContinuous","text":"Use this model to convert vectors with a TimeType element type to vectors of Float64 type (Continuous element scitype).","category":"page"},{"location":"models/UnivariateTimeTypeToContinuous_MLJModels/#Training-data","page":"UnivariateTimeTypeToContinuous","title":"Training data","text":"","category":"section"},{"location":"models/UnivariateTimeTypeToContinuous_MLJModels/","page":"UnivariateTimeTypeToContinuous","title":"UnivariateTimeTypeToContinuous","text":"In MLJ or MLJBase, bind an instance model to data with","category":"page"},{"location":"models/UnivariateTimeTypeToContinuous_MLJModels/","page":"UnivariateTimeTypeToContinuous","title":"UnivariateTimeTypeToContinuous","text":"mach = machine(model, x)","category":"page"},{"location":"models/UnivariateTimeTypeToContinuous_MLJModels/","page":"UnivariateTimeTypeToContinuous","title":"UnivariateTimeTypeToContinuous","text":"where","category":"page"},{"location":"models/UnivariateTimeTypeToContinuous_MLJModels/","page":"UnivariateTimeTypeToContinuous","title":"UnivariateTimeTypeToContinuous","text":"x: any abstract vector whose element type is a subtype of Dates.TimeType","category":"page"},{"location":"models/UnivariateTimeTypeToContinuous_MLJModels/","page":"UnivariateTimeTypeToContinuous","title":"UnivariateTimeTypeToContinuous","text":"Train the machine using fit!(mach, rows=...).","category":"page"},{"location":"models/UnivariateTimeTypeToContinuous_MLJModels/#Hyper-parameters","page":"UnivariateTimeTypeToContinuous","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/UnivariateTimeTypeToContinuous_MLJModels/","page":"UnivariateTimeTypeToContinuous","title":"UnivariateTimeTypeToContinuous","text":"zero_time: the time that is to correspond to 0.0 under transformations, with the type coinciding with the training data element type. If unspecified, the earliest time encountered in training is used.\nstep::Period=Hour(24): time interval to correspond to one unit under transformation","category":"page"},{"location":"models/UnivariateTimeTypeToContinuous_MLJModels/#Operations","page":"UnivariateTimeTypeToContinuous","title":"Operations","text":"","category":"section"},{"location":"models/UnivariateTimeTypeToContinuous_MLJModels/","page":"UnivariateTimeTypeToContinuous","title":"UnivariateTimeTypeToContinuous","text":"transform(mach, xnew): apply the encoding inferred when mach was fit","category":"page"},{"location":"models/UnivariateTimeTypeToContinuous_MLJModels/#Fitted-parameters","page":"UnivariateTimeTypeToContinuous","title":"Fitted parameters","text":"","category":"section"},{"location":"models/UnivariateTimeTypeToContinuous_MLJModels/","page":"UnivariateTimeTypeToContinuous","title":"UnivariateTimeTypeToContinuous","text":"fitted_params(mach).fitresult is the tuple (zero_time, step) actually used in transformations, which may differ from the user-specified hyper-parameters.","category":"page"},{"location":"models/UnivariateTimeTypeToContinuous_MLJModels/#Example","page":"UnivariateTimeTypeToContinuous","title":"Example","text":"","category":"section"},{"location":"models/UnivariateTimeTypeToContinuous_MLJModels/","page":"UnivariateTimeTypeToContinuous","title":"UnivariateTimeTypeToContinuous","text":"using MLJ\nusing Dates\n\nx = [Date(2001, 1, 1) + Day(i) for i in 0:4]\n\nencoder = UnivariateTimeTypeToContinuous(zero_time=Date(2000, 1, 1),\n step=Week(1))\n\nmach = machine(encoder, x)\nfit!(mach)\njulia> transform(mach, x)\n5-element Vector{Float64}:\n 52.285714285714285\n 52.42857142857143\n 52.57142857142857\n 52.714285714285715\n 52.857142","category":"page"},{"location":"models/AdaBoostStumpClassifier_DecisionTree/#AdaBoostStumpClassifier_DecisionTree","page":"AdaBoostStumpClassifier","title":"AdaBoostStumpClassifier","text":"","category":"section"},{"location":"models/AdaBoostStumpClassifier_DecisionTree/","page":"AdaBoostStumpClassifier","title":"AdaBoostStumpClassifier","text":"AdaBoostStumpClassifier","category":"page"},{"location":"models/AdaBoostStumpClassifier_DecisionTree/","page":"AdaBoostStumpClassifier","title":"AdaBoostStumpClassifier","text":"A model type for constructing a Ada-boosted stump classifier, based on DecisionTree.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/AdaBoostStumpClassifier_DecisionTree/","page":"AdaBoostStumpClassifier","title":"AdaBoostStumpClassifier","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/AdaBoostStumpClassifier_DecisionTree/","page":"AdaBoostStumpClassifier","title":"AdaBoostStumpClassifier","text":"AdaBoostStumpClassifier = @load AdaBoostStumpClassifier pkg=DecisionTree","category":"page"},{"location":"models/AdaBoostStumpClassifier_DecisionTree/","page":"AdaBoostStumpClassifier","title":"AdaBoostStumpClassifier","text":"Do model = AdaBoostStumpClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in AdaBoostStumpClassifier(n_iter=...).","category":"page"},{"location":"models/AdaBoostStumpClassifier_DecisionTree/#Training-data","page":"AdaBoostStumpClassifier","title":"Training data","text":"","category":"section"},{"location":"models/AdaBoostStumpClassifier_DecisionTree/","page":"AdaBoostStumpClassifier","title":"AdaBoostStumpClassifier","text":"In MLJ or MLJBase, bind an instance model to data with","category":"page"},{"location":"models/AdaBoostStumpClassifier_DecisionTree/","page":"AdaBoostStumpClassifier","title":"AdaBoostStumpClassifier","text":"mach = machine(model, X, y)","category":"page"},{"location":"models/AdaBoostStumpClassifier_DecisionTree/","page":"AdaBoostStumpClassifier","title":"AdaBoostStumpClassifier","text":"where:","category":"page"},{"location":"models/AdaBoostStumpClassifier_DecisionTree/","page":"AdaBoostStumpClassifier","title":"AdaBoostStumpClassifier","text":"X: any table of input features (eg, a DataFrame) whose columns each have one of the following element scitypes: Continuous, Count, or <:OrderedFactor; check column scitypes with schema(X)\ny: the target, which can be any AbstractVector whose element scitype is <:OrderedFactor or <:Multiclass; check the scitype with scitype(y)","category":"page"},{"location":"models/AdaBoostStumpClassifier_DecisionTree/","page":"AdaBoostStumpClassifier","title":"AdaBoostStumpClassifier","text":"Train the machine with fit!(mach, rows=...).","category":"page"},{"location":"models/AdaBoostStumpClassifier_DecisionTree/#Hyperparameters","page":"AdaBoostStumpClassifier","title":"Hyperparameters","text":"","category":"section"},{"location":"models/AdaBoostStumpClassifier_DecisionTree/","page":"AdaBoostStumpClassifier","title":"AdaBoostStumpClassifier","text":"n_iter=10: number of iterations of AdaBoost\nfeature_importance: method to use for computing feature importances. One of (:impurity, :split)\nrng=Random.GLOBAL_RNG: random number generator or seed","category":"page"},{"location":"models/AdaBoostStumpClassifier_DecisionTree/#Operations","page":"AdaBoostStumpClassifier","title":"Operations","text":"","category":"section"},{"location":"models/AdaBoostStumpClassifier_DecisionTree/","page":"AdaBoostStumpClassifier","title":"AdaBoostStumpClassifier","text":"predict(mach, Xnew): return predictions of the target given features Xnew having the same scitype as X above. Predictions are probabilistic, but uncalibrated.\npredict_mode(mach, Xnew): instead return the mode of each prediction above.","category":"page"},{"location":"models/AdaBoostStumpClassifier_DecisionTree/#Fitted-Parameters","page":"AdaBoostStumpClassifier","title":"Fitted Parameters","text":"","category":"section"},{"location":"models/AdaBoostStumpClassifier_DecisionTree/","page":"AdaBoostStumpClassifier","title":"AdaBoostStumpClassifier","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/AdaBoostStumpClassifier_DecisionTree/","page":"AdaBoostStumpClassifier","title":"AdaBoostStumpClassifier","text":"stumps: the Ensemble object returned by the core DecisionTree.jl algorithm.\ncoefficients: the stump coefficients (one per stump)","category":"page"},{"location":"models/AdaBoostStumpClassifier_DecisionTree/#Report","page":"AdaBoostStumpClassifier","title":"Report","text":"","category":"section"},{"location":"models/AdaBoostStumpClassifier_DecisionTree/","page":"AdaBoostStumpClassifier","title":"AdaBoostStumpClassifier","text":"features: the names of the features encountered in training","category":"page"},{"location":"models/AdaBoostStumpClassifier_DecisionTree/#Accessor-functions","page":"AdaBoostStumpClassifier","title":"Accessor functions","text":"","category":"section"},{"location":"models/AdaBoostStumpClassifier_DecisionTree/","page":"AdaBoostStumpClassifier","title":"AdaBoostStumpClassifier","text":"feature_importances(mach) returns a vector of (feature::Symbol => importance) pairs; the type of importance is determined by the hyperparameter feature_importance (see above)","category":"page"},{"location":"models/AdaBoostStumpClassifier_DecisionTree/#Examples","page":"AdaBoostStumpClassifier","title":"Examples","text":"","category":"section"},{"location":"models/AdaBoostStumpClassifier_DecisionTree/","page":"AdaBoostStumpClassifier","title":"AdaBoostStumpClassifier","text":"using MLJ\nBooster = @load AdaBoostStumpClassifier pkg=DecisionTree\nbooster = Booster(n_iter=15)\n\nX, y = @load_iris\nmach = machine(booster, X, y) |> fit!\n\nXnew = (sepal_length = [6.4, 7.2, 7.4],\n sepal_width = [2.8, 3.0, 2.8],\n petal_length = [5.6, 5.8, 6.1],\n petal_width = [2.1, 1.6, 1.9],)\nyhat = predict(mach, Xnew) ## probabilistic predictions\npredict_mode(mach, Xnew) ## point predictions\npdf.(yhat, \"virginica\") ## probabilities for the \"verginica\" class\n\nfitted_params(mach).stumps ## raw `Ensemble` object from DecisionTree.jl\nfitted_params(mach).coefs ## coefficient associated with each stump\nfeature_importances(mach)","category":"page"},{"location":"models/AdaBoostStumpClassifier_DecisionTree/","page":"AdaBoostStumpClassifier","title":"AdaBoostStumpClassifier","text":"See also DecisionTree.jl and the unwrapped model type MLJDecisionTreeInterface.DecisionTree.AdaBoostStumpClassifier.","category":"page"},{"location":"models/CBLOFDetector_OutlierDetectionPython/#CBLOFDetector_OutlierDetectionPython","page":"CBLOFDetector","title":"CBLOFDetector","text":"","category":"section"},{"location":"models/CBLOFDetector_OutlierDetectionPython/","page":"CBLOFDetector","title":"CBLOFDetector","text":"CBLOFDetector(n_clusters = 8,\n alpha = 0.9,\n beta = 5,\n use_weights = false,\n random_state = nothing,\n n_jobs = 1)","category":"page"},{"location":"models/CBLOFDetector_OutlierDetectionPython/","page":"CBLOFDetector","title":"CBLOFDetector","text":"https://pyod.readthedocs.io/en/latest/pyod.models.html#module-pyod.models.cblof","category":"page"},{"location":"models/LassoLarsRegressor_MLJScikitLearnInterface/#LassoLarsRegressor_MLJScikitLearnInterface","page":"LassoLarsRegressor","title":"LassoLarsRegressor","text":"","category":"section"},{"location":"models/LassoLarsRegressor_MLJScikitLearnInterface/","page":"LassoLarsRegressor","title":"LassoLarsRegressor","text":"LassoLarsRegressor","category":"page"},{"location":"models/LassoLarsRegressor_MLJScikitLearnInterface/","page":"LassoLarsRegressor","title":"LassoLarsRegressor","text":"A model type for constructing a Lasso model fit with least angle regression (LARS), based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/LassoLarsRegressor_MLJScikitLearnInterface/","page":"LassoLarsRegressor","title":"LassoLarsRegressor","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/LassoLarsRegressor_MLJScikitLearnInterface/","page":"LassoLarsRegressor","title":"LassoLarsRegressor","text":"LassoLarsRegressor = @load LassoLarsRegressor pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/LassoLarsRegressor_MLJScikitLearnInterface/","page":"LassoLarsRegressor","title":"LassoLarsRegressor","text":"Do model = LassoLarsRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in LassoLarsRegressor(alpha=...).","category":"page"},{"location":"models/LassoLarsRegressor_MLJScikitLearnInterface/#Hyper-parameters","page":"LassoLarsRegressor","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/LassoLarsRegressor_MLJScikitLearnInterface/","page":"LassoLarsRegressor","title":"LassoLarsRegressor","text":"alpha = 1.0\nfit_intercept = true\nverbose = false\nnormalize = false\nprecompute = auto\nmax_iter = 500\neps = 2.220446049250313e-16\ncopy_X = true\nfit_path = true\npositive = false","category":"page"},{"location":"models/TSVDTransformer_TSVD/#TSVDTransformer_TSVD","page":"TSVDTransformer","title":"TSVDTransformer","text":"","category":"section"},{"location":"models/TSVDTransformer_TSVD/","page":"TSVDTransformer","title":"TSVDTransformer","text":"Truncated SVD dimensionality reduction","category":"page"},{"location":"models/COFDetector_OutlierDetectionPython/#COFDetector_OutlierDetectionPython","page":"COFDetector","title":"COFDetector","text":"","category":"section"},{"location":"models/COFDetector_OutlierDetectionPython/","page":"COFDetector","title":"COFDetector","text":"COFDetector(n_neighbors = 5,\n method=\"fast\")","category":"page"},{"location":"models/COFDetector_OutlierDetectionPython/","page":"COFDetector","title":"COFDetector","text":"https://pyod.readthedocs.io/en/latest/pyod.models.html#module-pyod.models.cof","category":"page"},{"location":"models/ProbabilisticSVC_LIBSVM/#ProbabilisticSVC_LIBSVM","page":"ProbabilisticSVC","title":"ProbabilisticSVC","text":"","category":"section"},{"location":"models/ProbabilisticSVC_LIBSVM/","page":"ProbabilisticSVC","title":"ProbabilisticSVC","text":"ProbabilisticSVC","category":"page"},{"location":"models/ProbabilisticSVC_LIBSVM/","page":"ProbabilisticSVC","title":"ProbabilisticSVC","text":"A model type for constructing a probabilistic C-support vector classifier, based on LIBSVM.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/ProbabilisticSVC_LIBSVM/","page":"ProbabilisticSVC","title":"ProbabilisticSVC","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/ProbabilisticSVC_LIBSVM/","page":"ProbabilisticSVC","title":"ProbabilisticSVC","text":"ProbabilisticSVC = @load ProbabilisticSVC pkg=LIBSVM","category":"page"},{"location":"models/ProbabilisticSVC_LIBSVM/","page":"ProbabilisticSVC","title":"ProbabilisticSVC","text":"Do model = ProbabilisticSVC() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in ProbabilisticSVC(kernel=...).","category":"page"},{"location":"models/ProbabilisticSVC_LIBSVM/","page":"ProbabilisticSVC","title":"ProbabilisticSVC","text":"This model is identical to SVC with the exception that it predicts probabilities, instead of actual class labels. Probabilities are computed using Platt scaling, which will add to the total computation time.","category":"page"},{"location":"models/ProbabilisticSVC_LIBSVM/","page":"ProbabilisticSVC","title":"ProbabilisticSVC","text":"Reference for algorithm and core C-library: C.-C. Chang and C.-J. Lin (2011): \"LIBSVM: a library for support vector machines.\" ACM Transactions on Intelligent Systems and Technology, 2(3):27:1–27:27. Updated at https://www.csie.ntu.edu.tw/~cjlin/papers/libsvm.pdf. ","category":"page"},{"location":"models/ProbabilisticSVC_LIBSVM/","page":"ProbabilisticSVC","title":"ProbabilisticSVC","text":"Platt, John (1999): \"Probabilistic Outputs for Support Vector Machines and Comparisons to Regularized Likelihood Methods.\"","category":"page"},{"location":"models/ProbabilisticSVC_LIBSVM/#Training-data","page":"ProbabilisticSVC","title":"Training data","text":"","category":"section"},{"location":"models/ProbabilisticSVC_LIBSVM/","page":"ProbabilisticSVC","title":"ProbabilisticSVC","text":"In MLJ or MLJBase, bind an instance model to data with one of:","category":"page"},{"location":"models/ProbabilisticSVC_LIBSVM/","page":"ProbabilisticSVC","title":"ProbabilisticSVC","text":"mach = machine(model, X, y)\nmach = machine(model, X, y, w)","category":"page"},{"location":"models/ProbabilisticSVC_LIBSVM/","page":"ProbabilisticSVC","title":"ProbabilisticSVC","text":"where","category":"page"},{"location":"models/ProbabilisticSVC_LIBSVM/","page":"ProbabilisticSVC","title":"ProbabilisticSVC","text":"X: any table of input features (eg, a DataFrame) whose columns each have Continuous element scitype; check column scitypes with schema(X)\ny: is the target, which can be any AbstractVector whose element scitype is <:OrderedFactor or <:Multiclass; check the scitype with scitype(y)\nw: a dictionary of class weights, keyed on levels(y).","category":"page"},{"location":"models/ProbabilisticSVC_LIBSVM/","page":"ProbabilisticSVC","title":"ProbabilisticSVC","text":"Train the machine using fit!(mach, rows=...).","category":"page"},{"location":"models/ProbabilisticSVC_LIBSVM/#Hyper-parameters","page":"ProbabilisticSVC","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/ProbabilisticSVC_LIBSVM/","page":"ProbabilisticSVC","title":"ProbabilisticSVC","text":"kernel=LIBSVM.Kernel.RadialBasis: either an object that can be called, as in kernel(x1, x2), or one of the built-in kernels from the LIBSVM.jl package listed below. Here x1 and x2 are vectors whose lengths match the number of columns of the training data X (see \"Examples\" below).\nLIBSVM.Kernel.Linear: (x1, x2) -> x1'*x2\nLIBSVM.Kernel.Polynomial: (x1, x2) -> gamma*x1'*x2 + coef0)^degree\nLIBSVM.Kernel.RadialBasis: (x1, x2) -> (exp(-gamma*norm(x1 - x2)^2))\nLIBSVM.Kernel.Sigmoid: (x1, x2) - > tanh(gamma*x1'*x2 + coef0)\nHere gamma, coef0, degree are other hyper-parameters. Serialization of models with user-defined kernels comes with some restrictions. See LIVSVM.jl issue91\ngamma = 0.0: kernel parameter (see above); if gamma==-1.0 then gamma = 1/nfeatures is used in training, where nfeatures is the number of features (columns of X). If gamma==0.0 then gamma = 1/(var(Tables.matrix(X))*nfeatures) is used. Actual value used appears in the report (see below).\ncoef0 = 0.0: kernel parameter (see above)\ndegree::Int32 = Int32(3): degree in polynomial kernel (see above)\ncost=1.0 (range (0, Inf)): the parameter denoted C in the cited reference; for greater regularization, decrease cost\ncachesize=200.0 cache memory size in MB\ntolerance=0.001: tolerance for the stopping criterion\nshrinking=true: whether to use shrinking heuristics","category":"page"},{"location":"models/ProbabilisticSVC_LIBSVM/#Operations","page":"ProbabilisticSVC","title":"Operations","text":"","category":"section"},{"location":"models/ProbabilisticSVC_LIBSVM/","page":"ProbabilisticSVC","title":"ProbabilisticSVC","text":"predict(mach, Xnew): return probabilistic predictions of the target given features Xnew having the same scitype as X above.","category":"page"},{"location":"models/ProbabilisticSVC_LIBSVM/#Fitted-parameters","page":"ProbabilisticSVC","title":"Fitted parameters","text":"","category":"section"},{"location":"models/ProbabilisticSVC_LIBSVM/","page":"ProbabilisticSVC","title":"ProbabilisticSVC","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/ProbabilisticSVC_LIBSVM/","page":"ProbabilisticSVC","title":"ProbabilisticSVC","text":"libsvm_model: the trained model object created by the LIBSVM.jl package\nencoding: class encoding used internally by libsvm_model - a dictionary of class labels keyed on the internal integer representation","category":"page"},{"location":"models/ProbabilisticSVC_LIBSVM/#Report","page":"ProbabilisticSVC","title":"Report","text":"","category":"section"},{"location":"models/ProbabilisticSVC_LIBSVM/","page":"ProbabilisticSVC","title":"ProbabilisticSVC","text":"The fields of report(mach) are:","category":"page"},{"location":"models/ProbabilisticSVC_LIBSVM/","page":"ProbabilisticSVC","title":"ProbabilisticSVC","text":"gamma: actual value of the kernel parameter gamma used in training","category":"page"},{"location":"models/ProbabilisticSVC_LIBSVM/#Examples","page":"ProbabilisticSVC","title":"Examples","text":"","category":"section"},{"location":"models/ProbabilisticSVC_LIBSVM/#Using-a-built-in-kernel","page":"ProbabilisticSVC","title":"Using a built-in kernel","text":"","category":"section"},{"location":"models/ProbabilisticSVC_LIBSVM/","page":"ProbabilisticSVC","title":"ProbabilisticSVC","text":"using MLJ\nimport LIBSVM\n\nProbabilisticSVC = @load ProbabilisticSVC pkg=LIBSVM ## model type\nmodel = ProbabilisticSVC(kernel=LIBSVM.Kernel.Polynomial) ## instance\n\nX, y = @load_iris ## table, vector\nmach = machine(model, X, y) |> fit!\n\nXnew = (sepal_length = [6.4, 7.2, 7.4],\n sepal_width = [2.8, 3.0, 2.8],\n petal_length = [5.6, 5.8, 6.1],\n petal_width = [2.1, 1.6, 1.9],)\n\njulia> probs = predict(mach, Xnew)\n3-element UnivariateFiniteVector{Multiclass{3}, String, UInt32, Float64}:\n UnivariateFinite{Multiclass{3}}(setosa=>0.00186, versicolor=>0.003, virginica=>0.995)\n UnivariateFinite{Multiclass{3}}(setosa=>0.000563, versicolor=>0.0554, virginica=>0.944)\n UnivariateFinite{Multiclass{3}}(setosa=>1.4e-6, versicolor=>1.68e-6, virginica=>1.0)\n\n\njulia> labels = mode.(probs)\n3-element CategoricalArrays.CategoricalArray{String,1,UInt32}:\n \"virginica\"\n \"virginica\"\n \"virginica\"","category":"page"},{"location":"models/ProbabilisticSVC_LIBSVM/#User-defined-kernels","page":"ProbabilisticSVC","title":"User-defined kernels","text":"","category":"section"},{"location":"models/ProbabilisticSVC_LIBSVM/","page":"ProbabilisticSVC","title":"ProbabilisticSVC","text":"k(x1, x2) = x1'*x2 ## equivalent to `LIBSVM.Kernel.Linear`\nmodel = ProbabilisticSVC(kernel=k)\nmach = machine(model, X, y) |> fit!\n\nprobs = predict(mach, Xnew)","category":"page"},{"location":"models/ProbabilisticSVC_LIBSVM/#Incorporating-class-weights","page":"ProbabilisticSVC","title":"Incorporating class weights","text":"","category":"section"},{"location":"models/ProbabilisticSVC_LIBSVM/","page":"ProbabilisticSVC","title":"ProbabilisticSVC","text":"In either scenario above, we can do:","category":"page"},{"location":"models/ProbabilisticSVC_LIBSVM/","page":"ProbabilisticSVC","title":"ProbabilisticSVC","text":"weights = Dict(\"virginica\" => 1, \"versicolor\" => 20, \"setosa\" => 1)\nmach = machine(model, X, y, weights) |> fit!\n\nprobs = predict(mach, Xnew)","category":"page"},{"location":"models/ProbabilisticSVC_LIBSVM/","page":"ProbabilisticSVC","title":"ProbabilisticSVC","text":"See also the classifiers SVC, NuSVC and LinearSVC, and LIVSVM.jl and the original C implementation documentation.","category":"page"},{"location":"models/LogisticClassifier_MLJScikitLearnInterface/#LogisticClassifier_MLJScikitLearnInterface","page":"LogisticClassifier","title":"LogisticClassifier","text":"","category":"section"},{"location":"models/LogisticClassifier_MLJScikitLearnInterface/","page":"LogisticClassifier","title":"LogisticClassifier","text":"LogisticClassifier","category":"page"},{"location":"models/LogisticClassifier_MLJScikitLearnInterface/","page":"LogisticClassifier","title":"LogisticClassifier","text":"A model type for constructing a logistic regression classifier, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/LogisticClassifier_MLJScikitLearnInterface/","page":"LogisticClassifier","title":"LogisticClassifier","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/LogisticClassifier_MLJScikitLearnInterface/","page":"LogisticClassifier","title":"LogisticClassifier","text":"LogisticClassifier = @load LogisticClassifier pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/LogisticClassifier_MLJScikitLearnInterface/","page":"LogisticClassifier","title":"LogisticClassifier","text":"Do model = LogisticClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in LogisticClassifier(penalty=...).","category":"page"},{"location":"models/LogisticClassifier_MLJScikitLearnInterface/#Hyper-parameters","page":"LogisticClassifier","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/LogisticClassifier_MLJScikitLearnInterface/","page":"LogisticClassifier","title":"LogisticClassifier","text":"penalty = l2\ndual = false\ntol = 0.0001\nC = 1.0\nfit_intercept = true\nintercept_scaling = 1.0\nclass_weight = nothing\nrandom_state = nothing\nsolver = lbfgs\nmax_iter = 100\nmulti_class = auto\nverbose = 0\nwarm_start = false\nn_jobs = nothing\nl1_ratio = nothing","category":"page"},{"location":"models/ImageClassifier_MLJFlux/#ImageClassifier_MLJFlux","page":"ImageClassifier","title":"ImageClassifier","text":"","category":"section"},{"location":"models/ImageClassifier_MLJFlux/","page":"ImageClassifier","title":"ImageClassifier","text":"ImageClassifier","category":"page"},{"location":"models/ImageClassifier_MLJFlux/","page":"ImageClassifier","title":"ImageClassifier","text":"A model type for constructing a image classifier, based on MLJFlux.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/ImageClassifier_MLJFlux/","page":"ImageClassifier","title":"ImageClassifier","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/ImageClassifier_MLJFlux/","page":"ImageClassifier","title":"ImageClassifier","text":"ImageClassifier = @load ImageClassifier pkg=MLJFlux","category":"page"},{"location":"models/ImageClassifier_MLJFlux/","page":"ImageClassifier","title":"ImageClassifier","text":"Do model = ImageClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in ImageClassifier(builder=...).","category":"page"},{"location":"models/ImageClassifier_MLJFlux/","page":"ImageClassifier","title":"ImageClassifier","text":"ImageClassifier classifies images using a neural network adapted to the type of images provided (color or gray scale). Predictions are probabilistic. Users provide a recipe for constructing the network, based on properties of the image encountered, by specifying an appropriate builder. See MLJFlux documentation for more on builders.","category":"page"},{"location":"models/ImageClassifier_MLJFlux/#Training-data","page":"ImageClassifier","title":"Training data","text":"","category":"section"},{"location":"models/ImageClassifier_MLJFlux/","page":"ImageClassifier","title":"ImageClassifier","text":"In MLJ or MLJBase, bind an instance model to data with","category":"page"},{"location":"models/ImageClassifier_MLJFlux/","page":"ImageClassifier","title":"ImageClassifier","text":"mach = machine(model, X, y)","category":"page"},{"location":"models/ImageClassifier_MLJFlux/","page":"ImageClassifier","title":"ImageClassifier","text":"Here:","category":"page"},{"location":"models/ImageClassifier_MLJFlux/","page":"ImageClassifier","title":"ImageClassifier","text":"X is any AbstractVector of images with ColorImage or GrayImage scitype; check the scitype with scitype(X) and refer to ScientificTypes.jl documentation on coercing typical image formats into an appropriate type.\ny is the target, which can be any AbstractVector whose element scitype is Multiclass; check the scitype with scitype(y).","category":"page"},{"location":"models/ImageClassifier_MLJFlux/","page":"ImageClassifier","title":"ImageClassifier","text":"Train the machine with fit!(mach, rows=...).","category":"page"},{"location":"models/ImageClassifier_MLJFlux/#Hyper-parameters","page":"ImageClassifier","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/ImageClassifier_MLJFlux/","page":"ImageClassifier","title":"ImageClassifier","text":"builder: An MLJFlux builder that constructs the neural network. The fallback builds a depth-16 VGG architecture adapted to the image size and number of target classes, with no batch normalization; see the Metalhead.jl documentation for details. See the example below for a user-specified builder. A convenience macro @builder is also available. See also finaliser below.\noptimiser::Flux.Adam(): A Flux.Optimise optimiser. The optimiser performs the updating of the weights of the network. For further reference, see the Flux optimiser documentation. To choose a learning rate (the update rate of the optimizer), a good rule of thumb is to start out at 10e-3, and tune using powers of 10 between 1 and 1e-7.\nloss=Flux.crossentropy: The loss function which the network will optimize. Should be a function which can be called in the form loss(yhat, y). Possible loss functions are listed in the Flux loss function documentation. For a classification task, the most natural loss functions are:\nFlux.crossentropy: Standard multiclass classification loss, also known as the log loss.\nFlux.logitcrossentopy: Mathematically equal to crossentropy, but numerically more stable than finalising the outputs with softmax and then calculating crossentropy. You will need to specify finaliser=identity to remove MLJFlux's default softmax finaliser, and understand that the output of predict is then unnormalized (no longer probabilistic).\nFlux.tversky_loss: Used with imbalanced data to give more weight to false negatives.\nFlux.focal_loss: Used with highly imbalanced data. Weights harder examples more than easier examples.\nCurrently MLJ measures are not supported values of loss.\nepochs::Int=10: The duration of training, in epochs. Typically, one epoch represents one pass through the complete the training dataset.\nbatch_size::int=1: the batch size to be used for training, representing the number of samples per update of the network weights. Typically, batch size is between 8 and\nIncreassing batch size may accelerate training if acceleration=CUDALibs() and a\nGPU is available.\nlambda::Float64=0: The strength of the weight regularization penalty. Can be any value in the range [0, ∞).\nalpha::Float64=0: The L2/L1 mix of regularization, in the range [0, 1]. A value of 0 represents L2 regularization, and a value of 1 represents L1 regularization.\nrng::Union{AbstractRNG, Int64}: The random number generator or seed used during training.\noptimizer_changes_trigger_retraining::Bool=false: Defines what happens when re-fitting a machine if the associated optimiser has changed. If true, the associated machine will retrain from scratch on fit! call, otherwise it will not.\nacceleration::AbstractResource=CPU1(): Defines on what hardware training is done. For Training on GPU, use CUDALibs().\nfinaliser=Flux.softmax: The final activation function of the neural network (applied after the network defined by builder). Defaults to Flux.softmax.","category":"page"},{"location":"models/ImageClassifier_MLJFlux/#Operations","page":"ImageClassifier","title":"Operations","text":"","category":"section"},{"location":"models/ImageClassifier_MLJFlux/","page":"ImageClassifier","title":"ImageClassifier","text":"predict(mach, Xnew): return predictions of the target given new features Xnew, which should have the same scitype as X above. Predictions are probabilistic but uncalibrated.\npredict_mode(mach, Xnew): Return the modes of the probabilistic predictions returned above.","category":"page"},{"location":"models/ImageClassifier_MLJFlux/#Fitted-parameters","page":"ImageClassifier","title":"Fitted parameters","text":"","category":"section"},{"location":"models/ImageClassifier_MLJFlux/","page":"ImageClassifier","title":"ImageClassifier","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/ImageClassifier_MLJFlux/","page":"ImageClassifier","title":"ImageClassifier","text":"chain: The trained \"chain\" (Flux.jl model), namely the series of layers, functions, and activations which make up the neural network. This includes the final layer specified by finaliser (eg, softmax).","category":"page"},{"location":"models/ImageClassifier_MLJFlux/#Report","page":"ImageClassifier","title":"Report","text":"","category":"section"},{"location":"models/ImageClassifier_MLJFlux/","page":"ImageClassifier","title":"ImageClassifier","text":"The fields of report(mach) are:","category":"page"},{"location":"models/ImageClassifier_MLJFlux/","page":"ImageClassifier","title":"ImageClassifier","text":"training_losses: A vector of training losses (penalised if lambda != 0) in historical order, of length epochs + 1. The first element is the pre-training loss.","category":"page"},{"location":"models/ImageClassifier_MLJFlux/#Examples","page":"ImageClassifier","title":"Examples","text":"","category":"section"},{"location":"models/ImageClassifier_MLJFlux/","page":"ImageClassifier","title":"ImageClassifier","text":"In this example we use MLJFlux and a custom builder to classify the MNIST image dataset.","category":"page"},{"location":"models/ImageClassifier_MLJFlux/","page":"ImageClassifier","title":"ImageClassifier","text":"using MLJ\nusing Flux\nimport MLJFlux\nimport MLJIteration ## for `skip` control","category":"page"},{"location":"models/ImageClassifier_MLJFlux/","page":"ImageClassifier","title":"ImageClassifier","text":"First we want to download the MNIST dataset, and unpack into images and labels:","category":"page"},{"location":"models/ImageClassifier_MLJFlux/","page":"ImageClassifier","title":"ImageClassifier","text":"import MLDatasets: MNIST\ndata = MNIST(split=:train)\nimages, labels = data.features, data.targets","category":"page"},{"location":"models/ImageClassifier_MLJFlux/","page":"ImageClassifier","title":"ImageClassifier","text":"In MLJ, integers cannot be used for encoding categorical data, so we must coerce them into the Multiclass scitype:","category":"page"},{"location":"models/ImageClassifier_MLJFlux/","page":"ImageClassifier","title":"ImageClassifier","text":"labels = coerce(labels, Multiclass);","category":"page"},{"location":"models/ImageClassifier_MLJFlux/","page":"ImageClassifier","title":"ImageClassifier","text":"Above images is a single array but MLJFlux requires the images to be a vector of individual image arrays:","category":"page"},{"location":"models/ImageClassifier_MLJFlux/","page":"ImageClassifier","title":"ImageClassifier","text":"images = coerce(images, GrayImage);\nimages[1]","category":"page"},{"location":"models/ImageClassifier_MLJFlux/","page":"ImageClassifier","title":"ImageClassifier","text":"We start by defining a suitable builder object. This is a recipe for building the neural network. Our builder will work for images of any (constant) size, whether they be color or black and white (ie, single or multi-channel). The architecture always consists of six alternating convolution and max-pool layers, and a final dense layer; the filter size and the number of channels after each convolution layer is customizable.","category":"page"},{"location":"models/ImageClassifier_MLJFlux/","page":"ImageClassifier","title":"ImageClassifier","text":"import MLJFlux\n\nstruct MyConvBuilder\n filter_size::Int\n channels1::Int\n channels2::Int\n channels3::Int\nend\n\nmake2d(x::AbstractArray) = reshape(x, :, size(x)[end])\n\nfunction MLJFlux.build(b::MyConvBuilder, rng, n_in, n_out, n_channels)\n k, c1, c2, c3 = b.filter_size, b.channels1, b.channels2, b.channels3\n mod(k, 2) == 1 || error(\"`filter_size` must be odd. \")\n p = div(k - 1, 2) ## padding to preserve image size\n init = Flux.glorot_uniform(rng)\n front = Chain(\n Conv((k, k), n_channels => c1, pad=(p, p), relu, init=init),\n MaxPool((2, 2)),\n Conv((k, k), c1 => c2, pad=(p, p), relu, init=init),\n MaxPool((2, 2)),\n Conv((k, k), c2 => c3, pad=(p, p), relu, init=init),\n MaxPool((2 ,2)),\n make2d)\n d = Flux.outputsize(front, (n_in..., n_channels, 1)) |> first\n return Chain(front, Dense(d, n_out, init=init))\nend","category":"page"},{"location":"models/ImageClassifier_MLJFlux/","page":"ImageClassifier","title":"ImageClassifier","text":"It is important to note that in our build function, there is no final softmax. This is applied by default in all MLJFlux classifiers (override this using the finaliser hyperparameter).","category":"page"},{"location":"models/ImageClassifier_MLJFlux/","page":"ImageClassifier","title":"ImageClassifier","text":"Now that our builder is defined, we can instantiate the actual MLJFlux model. If you have a GPU, you can substitute in acceleration=CUDALibs() below to speed up training.","category":"page"},{"location":"models/ImageClassifier_MLJFlux/","page":"ImageClassifier","title":"ImageClassifier","text":"ImageClassifier = @load ImageClassifier pkg=MLJFlux\nclf = ImageClassifier(builder=MyConvBuilder(3, 16, 32, 32),\n batch_size=50,\n epochs=10,\n rng=123)","category":"page"},{"location":"models/ImageClassifier_MLJFlux/","page":"ImageClassifier","title":"ImageClassifier","text":"You can add Flux options such as optimiser and loss in the snippet above. Currently, loss must be a flux-compatible loss, and not an MLJ measure.","category":"page"},{"location":"models/ImageClassifier_MLJFlux/","page":"ImageClassifier","title":"ImageClassifier","text":"Next, we can bind the model with the data in a machine, and train using the first 500 images:","category":"page"},{"location":"models/ImageClassifier_MLJFlux/","page":"ImageClassifier","title":"ImageClassifier","text":"mach = machine(clf, images, labels);\nfit!(mach, rows=1:500, verbosity=2);\nreport(mach)\nchain = fitted_params(mach)\nFlux.params(chain)[2]","category":"page"},{"location":"models/ImageClassifier_MLJFlux/","page":"ImageClassifier","title":"ImageClassifier","text":"We can tack on 20 more epochs by modifying the epochs field, and iteratively fit some more:","category":"page"},{"location":"models/ImageClassifier_MLJFlux/","page":"ImageClassifier","title":"ImageClassifier","text":"clf.epochs = clf.epochs + 20\nfit!(mach, rows=1:500, verbosity=2);","category":"page"},{"location":"models/ImageClassifier_MLJFlux/","page":"ImageClassifier","title":"ImageClassifier","text":"We can also make predictions and calculate an out-of-sample loss estimate, using any MLJ measure (loss/score):","category":"page"},{"location":"models/ImageClassifier_MLJFlux/","page":"ImageClassifier","title":"ImageClassifier","text":"predicted_labels = predict(mach, rows=501:1000);\ncross_entropy(predicted_labels, labels[501:1000]) |> mean","category":"page"},{"location":"models/ImageClassifier_MLJFlux/","page":"ImageClassifier","title":"ImageClassifier","text":"The preceding fit!/predict/evaluate workflow can be alternatively executed as follows:","category":"page"},{"location":"models/ImageClassifier_MLJFlux/","page":"ImageClassifier","title":"ImageClassifier","text":"evaluate!(mach,\n resampling=Holdout(fraction_train=0.5),\n measure=cross_entropy,\n rows=1:1000,\n verbosity=0)","category":"page"},{"location":"models/ImageClassifier_MLJFlux/","page":"ImageClassifier","title":"ImageClassifier","text":"See also NeuralNetworkClassifier.","category":"page"},{"location":"models/DeterministicConstantRegressor_MLJModels/#DeterministicConstantRegressor_MLJModels","page":"DeterministicConstantRegressor","title":"DeterministicConstantRegressor","text":"","category":"section"},{"location":"models/DeterministicConstantRegressor_MLJModels/","page":"DeterministicConstantRegressor","title":"DeterministicConstantRegressor","text":"DeterministicConstantRegressor","category":"page"},{"location":"models/DeterministicConstantRegressor_MLJModels/","page":"DeterministicConstantRegressor","title":"DeterministicConstantRegressor","text":"A model type for constructing a deterministic constant regressor, based on MLJModels.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/DeterministicConstantRegressor_MLJModels/","page":"DeterministicConstantRegressor","title":"DeterministicConstantRegressor","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/DeterministicConstantRegressor_MLJModels/","page":"DeterministicConstantRegressor","title":"DeterministicConstantRegressor","text":"DeterministicConstantRegressor = @load DeterministicConstantRegressor pkg=MLJModels","category":"page"},{"location":"models/DeterministicConstantRegressor_MLJModels/","page":"DeterministicConstantRegressor","title":"DeterministicConstantRegressor","text":"Do model = DeterministicConstantRegressor() to construct an instance with default hyper-parameters. ","category":"page"},{"location":"models/SMOTE_Imbalance/#SMOTE_Imbalance","page":"SMOTE","title":"SMOTE","text":"","category":"section"},{"location":"models/SMOTE_Imbalance/","page":"SMOTE","title":"SMOTE","text":"Initiate a SMOTE model with the given hyper-parameters.","category":"page"},{"location":"models/SMOTE_Imbalance/","page":"SMOTE","title":"SMOTE","text":"SMOTE","category":"page"},{"location":"models/SMOTE_Imbalance/","page":"SMOTE","title":"SMOTE","text":"A model type for constructing a smote, based on Imbalance.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/SMOTE_Imbalance/","page":"SMOTE","title":"SMOTE","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/SMOTE_Imbalance/","page":"SMOTE","title":"SMOTE","text":"SMOTE = @load SMOTE pkg=Imbalance","category":"page"},{"location":"models/SMOTE_Imbalance/","page":"SMOTE","title":"SMOTE","text":"Do model = SMOTE() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in SMOTE(k=...).","category":"page"},{"location":"models/SMOTE_Imbalance/","page":"SMOTE","title":"SMOTE","text":"SMOTE implements the SMOTE algorithm to correct for class imbalance as in N. V. Chawla, K. W. Bowyer, L. O.Hall, W. P. Kegelmeyer, “SMOTE: synthetic minority over-sampling technique,” Journal of artificial intelligence research, 321-357, 2002.","category":"page"},{"location":"models/SMOTE_Imbalance/#Training-data","page":"SMOTE","title":"Training data","text":"","category":"section"},{"location":"models/SMOTE_Imbalance/","page":"SMOTE","title":"SMOTE","text":"In MLJ or MLJBase, wrap the model in a machine by","category":"page"},{"location":"models/SMOTE_Imbalance/","page":"SMOTE","title":"SMOTE","text":"mach = machine(model)","category":"page"},{"location":"models/SMOTE_Imbalance/","page":"SMOTE","title":"SMOTE","text":"There is no need to provide any data here because the model is a static transformer.","category":"page"},{"location":"models/SMOTE_Imbalance/","page":"SMOTE","title":"SMOTE","text":"Likewise, there is no need to fit!(mach).","category":"page"},{"location":"models/SMOTE_Imbalance/","page":"SMOTE","title":"SMOTE","text":"For default values of the hyper-parameters, model can be constructed by","category":"page"},{"location":"models/SMOTE_Imbalance/","page":"SMOTE","title":"SMOTE","text":"model = SMOTE()","category":"page"},{"location":"models/SMOTE_Imbalance/#Hyperparameters","page":"SMOTE","title":"Hyperparameters","text":"","category":"section"},{"location":"models/SMOTE_Imbalance/","page":"SMOTE","title":"SMOTE","text":"k=5: Number of nearest neighbors to consider in the SMOTE algorithm. Should be within the range [1, n - 1], where n is the number of observations; otherwise set to the nearest of these two values.\nratios=1.0: A parameter that controls the amount of oversampling to be done for each class\nCan be a float and in this case each class will be oversampled to the size of the majority class times the float. By default, all classes are oversampled to the size of the majority class\nCan be a dictionary mapping each class label to the float ratio for that class\nrng::Union{AbstractRNG, Integer}=default_rng(): Either an AbstractRNG object or an Integer seed to be used with Xoshiro if the Julia VERSION supports it. Otherwise, uses MersenneTwister`.","category":"page"},{"location":"models/SMOTE_Imbalance/#Transform-Inputs","page":"SMOTE","title":"Transform Inputs","text":"","category":"section"},{"location":"models/SMOTE_Imbalance/","page":"SMOTE","title":"SMOTE","text":"X: A matrix or table of floats where each row is an observation from the dataset\ny: An abstract vector of labels (e.g., strings) that correspond to the observations in X","category":"page"},{"location":"models/SMOTE_Imbalance/#Transform-Outputs","page":"SMOTE","title":"Transform Outputs","text":"","category":"section"},{"location":"models/SMOTE_Imbalance/","page":"SMOTE","title":"SMOTE","text":"Xover: A matrix or table that includes original data and the new observations due to oversampling. depending on whether the input X is a matrix or table respectively\nyover: An abstract vector of labels corresponding to Xover","category":"page"},{"location":"models/SMOTE_Imbalance/#Operations","page":"SMOTE","title":"Operations","text":"","category":"section"},{"location":"models/SMOTE_Imbalance/","page":"SMOTE","title":"SMOTE","text":"transform(mach, X, y): resample the data X and y using SMOTE, returning both the new and original observations","category":"page"},{"location":"models/SMOTE_Imbalance/#Example","page":"SMOTE","title":"Example","text":"","category":"section"},{"location":"models/SMOTE_Imbalance/","page":"SMOTE","title":"SMOTE","text":"using MLJ\nimport Imbalance\n\n## set probability of each class\nclass_probs = [0.5, 0.2, 0.3] \nnum_rows, num_continuous_feats = 100, 5\n## generate a table and categorical vector accordingly\nX, y = Imbalance.generate_imbalanced_data(num_rows, num_continuous_feats; \n class_probs, rng=42) \n\njulia> Imbalance.checkbalance(y)\n1: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 19 (39.6%) \n2: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 33 (68.8%) \n0: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 48 (100.0%) \n\n## load SMOTE\nSMOTE = @load SMOTE pkg=Imbalance\n\n## wrap the model in a machine\noversampler = SMOTE(k=5, ratios=Dict(0=>1.0, 1=> 0.9, 2=>0.8), rng=42)\nmach = machine(oversampler)\n\n## provide the data to transform (there is nothing to fit)\nXover, yover = transform(mach, X, y)\n\njulia> Imbalance.checkbalance(yover)\n2: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 38 (79.2%) \n1: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 43 (89.6%) \n0: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 48 (100.0%) \n","category":"page"},{"location":"adding_models_for_general_use/#Adding-Models-for-General-Use","page":"Adding Models for General Use","title":"Adding Models for General Use","text":"","category":"section"},{"location":"adding_models_for_general_use/","page":"Adding Models for General Use","title":"Adding Models for General Use","text":"To write a complete MLJ model interface for new or existing machine learning models, suitable for addition to the MLJ Model Registry, consult the MLJModelInterface.jl documentation.","category":"page"},{"location":"adding_models_for_general_use/","page":"Adding Models for General Use","title":"Adding Models for General Use","text":"For quick-and-dirty user-defined models see Simple User Defined Models. ","category":"page"},{"location":"models/HDBSCAN_MLJScikitLearnInterface/#HDBSCAN_MLJScikitLearnInterface","page":"HDBSCAN","title":"HDBSCAN","text":"","category":"section"},{"location":"models/HDBSCAN_MLJScikitLearnInterface/","page":"HDBSCAN","title":"HDBSCAN","text":"HDBSCAN","category":"page"},{"location":"models/HDBSCAN_MLJScikitLearnInterface/","page":"HDBSCAN","title":"HDBSCAN","text":"A model type for constructing a hdbscan, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/HDBSCAN_MLJScikitLearnInterface/","page":"HDBSCAN","title":"HDBSCAN","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/HDBSCAN_MLJScikitLearnInterface/","page":"HDBSCAN","title":"HDBSCAN","text":"HDBSCAN = @load HDBSCAN pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/HDBSCAN_MLJScikitLearnInterface/","page":"HDBSCAN","title":"HDBSCAN","text":"Do model = HDBSCAN() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in HDBSCAN(min_cluster_size=...).","category":"page"},{"location":"models/HDBSCAN_MLJScikitLearnInterface/","page":"HDBSCAN","title":"HDBSCAN","text":"Hierarchical Density-Based Spatial Clustering of Applications with Noise. Performs DBSCAN over varying epsilon values and integrates the result to find a clustering that gives the best stability over epsilon. This allows HDBSCAN to find clusters of varying densities (unlike DBSCAN), and be more robust to parameter selection. ","category":"page"},{"location":"models/MultiTaskElasticNetCVRegressor_MLJScikitLearnInterface/#MultiTaskElasticNetCVRegressor_MLJScikitLearnInterface","page":"MultiTaskElasticNetCVRegressor","title":"MultiTaskElasticNetCVRegressor","text":"","category":"section"},{"location":"models/MultiTaskElasticNetCVRegressor_MLJScikitLearnInterface/","page":"MultiTaskElasticNetCVRegressor","title":"MultiTaskElasticNetCVRegressor","text":"MultiTaskElasticNetCVRegressor","category":"page"},{"location":"models/MultiTaskElasticNetCVRegressor_MLJScikitLearnInterface/","page":"MultiTaskElasticNetCVRegressor","title":"MultiTaskElasticNetCVRegressor","text":"A model type for constructing a multi-target elastic net regressor with built-in cross-validation, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/MultiTaskElasticNetCVRegressor_MLJScikitLearnInterface/","page":"MultiTaskElasticNetCVRegressor","title":"MultiTaskElasticNetCVRegressor","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/MultiTaskElasticNetCVRegressor_MLJScikitLearnInterface/","page":"MultiTaskElasticNetCVRegressor","title":"MultiTaskElasticNetCVRegressor","text":"MultiTaskElasticNetCVRegressor = @load MultiTaskElasticNetCVRegressor pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/MultiTaskElasticNetCVRegressor_MLJScikitLearnInterface/","page":"MultiTaskElasticNetCVRegressor","title":"MultiTaskElasticNetCVRegressor","text":"Do model = MultiTaskElasticNetCVRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in MultiTaskElasticNetCVRegressor(l1_ratio=...).","category":"page"},{"location":"models/MultiTaskElasticNetCVRegressor_MLJScikitLearnInterface/#Hyper-parameters","page":"MultiTaskElasticNetCVRegressor","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/MultiTaskElasticNetCVRegressor_MLJScikitLearnInterface/","page":"MultiTaskElasticNetCVRegressor","title":"MultiTaskElasticNetCVRegressor","text":"l1_ratio = 0.5\neps = 0.001\nn_alphas = 100\nalphas = nothing\nfit_intercept = true\nmax_iter = 1000\ntol = 0.0001\ncv = 5\ncopy_X = true\nverbose = 0\nn_jobs = nothing\nrandom_state = nothing\nselection = cyclic","category":"page"},{"location":"models/XGBoostRegressor_XGBoost/#XGBoostRegressor_XGBoost","page":"XGBoostRegressor","title":"XGBoostRegressor","text":"","category":"section"},{"location":"models/XGBoostRegressor_XGBoost/","page":"XGBoostRegressor","title":"XGBoostRegressor","text":"XGBoostRegressor","category":"page"},{"location":"models/XGBoostRegressor_XGBoost/","page":"XGBoostRegressor","title":"XGBoostRegressor","text":"A model type for constructing a eXtreme Gradient Boosting Regressor, based on XGBoost.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/XGBoostRegressor_XGBoost/","page":"XGBoostRegressor","title":"XGBoostRegressor","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/XGBoostRegressor_XGBoost/","page":"XGBoostRegressor","title":"XGBoostRegressor","text":"XGBoostRegressor = @load XGBoostRegressor pkg=XGBoost","category":"page"},{"location":"models/XGBoostRegressor_XGBoost/","page":"XGBoostRegressor","title":"XGBoostRegressor","text":"Do model = XGBoostRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in XGBoostRegressor(test=...).","category":"page"},{"location":"models/XGBoostRegressor_XGBoost/","page":"XGBoostRegressor","title":"XGBoostRegressor","text":"Univariate continuous regression using xgboost.","category":"page"},{"location":"models/XGBoostRegressor_XGBoost/#Training-data","page":"XGBoostRegressor","title":"Training data","text":"","category":"section"},{"location":"models/XGBoostRegressor_XGBoost/","page":"XGBoostRegressor","title":"XGBoostRegressor","text":"In MLJ or MLJBase, bind an instance model to data with","category":"page"},{"location":"models/XGBoostRegressor_XGBoost/","page":"XGBoostRegressor","title":"XGBoostRegressor","text":"m = machine(model, X, y)","category":"page"},{"location":"models/XGBoostRegressor_XGBoost/","page":"XGBoostRegressor","title":"XGBoostRegressor","text":"where","category":"page"},{"location":"models/XGBoostRegressor_XGBoost/","page":"XGBoostRegressor","title":"XGBoostRegressor","text":"X: any table of input features whose columns have Continuous element scitype; check column scitypes with schema(X).\ny: is an AbstractVector target with Continuous elements; check the scitype with scitype(y).","category":"page"},{"location":"models/XGBoostRegressor_XGBoost/","page":"XGBoostRegressor","title":"XGBoostRegressor","text":"Train using fit!(m, rows=...).","category":"page"},{"location":"models/XGBoostRegressor_XGBoost/#Hyper-parameters","page":"XGBoostRegressor","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/XGBoostRegressor_XGBoost/","page":"XGBoostRegressor","title":"XGBoostRegressor","text":"See https://xgboost.readthedocs.io/en/stable/parameter.html.","category":"page"},{"location":"models/LinearCountRegressor_GLM/#LinearCountRegressor_GLM","page":"LinearCountRegressor","title":"LinearCountRegressor","text":"","category":"section"},{"location":"models/LinearCountRegressor_GLM/","page":"LinearCountRegressor","title":"LinearCountRegressor","text":"LinearCountRegressor","category":"page"},{"location":"models/LinearCountRegressor_GLM/","page":"LinearCountRegressor","title":"LinearCountRegressor","text":"A model type for constructing a linear count regressor, based on GLM.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/LinearCountRegressor_GLM/","page":"LinearCountRegressor","title":"LinearCountRegressor","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/LinearCountRegressor_GLM/","page":"LinearCountRegressor","title":"LinearCountRegressor","text":"LinearCountRegressor = @load LinearCountRegressor pkg=GLM","category":"page"},{"location":"models/LinearCountRegressor_GLM/","page":"LinearCountRegressor","title":"LinearCountRegressor","text":"Do model = LinearCountRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in LinearCountRegressor(fit_intercept=...).","category":"page"},{"location":"models/LinearCountRegressor_GLM/","page":"LinearCountRegressor","title":"LinearCountRegressor","text":"LinearCountRegressor is a generalized linear model, specialised to the case of a Count target variable (non-negative, unbounded integer) with user-specified link function. Options exist to specify an intercept or offset feature.","category":"page"},{"location":"models/LinearCountRegressor_GLM/#Training-data","page":"LinearCountRegressor","title":"Training data","text":"","category":"section"},{"location":"models/LinearCountRegressor_GLM/","page":"LinearCountRegressor","title":"LinearCountRegressor","text":"In MLJ or MLJBase, bind an instance model to data with one of:","category":"page"},{"location":"models/LinearCountRegressor_GLM/","page":"LinearCountRegressor","title":"LinearCountRegressor","text":"mach = machine(model, X, y)\nmach = machine(model, X, y, w)","category":"page"},{"location":"models/LinearCountRegressor_GLM/","page":"LinearCountRegressor","title":"LinearCountRegressor","text":"Here","category":"page"},{"location":"models/LinearCountRegressor_GLM/","page":"LinearCountRegressor","title":"LinearCountRegressor","text":"X: is any table of input features (eg, a DataFrame) whose columns are of scitype Continuous; check the scitype with schema(X)\ny: is the target, which can be any AbstractVector whose element scitype is Count; check the scitype with schema(y)\nw: is a vector of Real per-observation weights","category":"page"},{"location":"models/LinearCountRegressor_GLM/","page":"LinearCountRegressor","title":"LinearCountRegressor","text":"Train the machine using fit!(mach, rows=...).","category":"page"},{"location":"models/LinearCountRegressor_GLM/#Hyper-parameters","page":"LinearCountRegressor","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/LinearCountRegressor_GLM/","page":"LinearCountRegressor","title":"LinearCountRegressor","text":"fit_intercept=true: Whether to calculate the intercept for this model. If set to false, no intercept will be calculated (e.g. the data is expected to be centered)\ndistribution=Distributions.Poisson(): The distribution which the residuals/errors of the model should fit.\nlink=GLM.LogLink(): The function which links the linear prediction function to the probability of a particular outcome or class. This should be one of the following: GLM.IdentityLink(), GLM.InverseLink(), GLM.InverseSquareLink(), GLM.LogLink(), GLM.SqrtLink().\noffsetcol=nothing: Name of the column to be used as an offset, if any. An offset is a variable which is known to have a coefficient of 1.\nmaxiter::Integer=30: The maximum number of iterations allowed to achieve convergence.\natol::Real=1e-6: Absolute threshold for convergence. Convergence is achieved when the relative change in deviance is less than `max(rtol*dev, atol). This term exists to avoid failure when deviance is unchanged except for rounding errors.\nrtol::Real=1e-6: Relative threshold for convergence. Convergence is achieved when the relative change in deviance is less than `max(rtol*dev, atol). This term exists to avoid failure when deviance is unchanged except for rounding errors.\nminstepfac::Real=0.001: Minimum step fraction. Must be between 0 and 1. Lower bound for the factor used to update the linear fit.\nreport_keys: Vector of keys for the report. Possible keys are: :deviance, :dof_residual, :stderror, :vcov, :coef_table and :glm_model. By default only :glm_model is excluded.","category":"page"},{"location":"models/LinearCountRegressor_GLM/#Operations","page":"LinearCountRegressor","title":"Operations","text":"","category":"section"},{"location":"models/LinearCountRegressor_GLM/","page":"LinearCountRegressor","title":"LinearCountRegressor","text":"predict(mach, Xnew): return predictions of the target given new features Xnew having the same Scitype as X above. Predictions are probabilistic.\npredict_mean(mach, Xnew): instead return the mean of each prediction above\npredict_median(mach, Xnew): instead return the median of each prediction above.","category":"page"},{"location":"models/LinearCountRegressor_GLM/#Fitted-parameters","page":"LinearCountRegressor","title":"Fitted parameters","text":"","category":"section"},{"location":"models/LinearCountRegressor_GLM/","page":"LinearCountRegressor","title":"LinearCountRegressor","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/LinearCountRegressor_GLM/","page":"LinearCountRegressor","title":"LinearCountRegressor","text":"features: The names of the features encountered during model fitting.\ncoef: The linear coefficients determined by the model.\nintercept: The intercept determined by the model.","category":"page"},{"location":"models/LinearCountRegressor_GLM/#Report","page":"LinearCountRegressor","title":"Report","text":"","category":"section"},{"location":"models/LinearCountRegressor_GLM/","page":"LinearCountRegressor","title":"LinearCountRegressor","text":"The fields of report(mach) are:","category":"page"},{"location":"models/LinearCountRegressor_GLM/","page":"LinearCountRegressor","title":"LinearCountRegressor","text":"deviance: Measure of deviance of fitted model with respect to a perfectly fitted model. For a linear model, this is the weighted residual sum of squares\ndof_residual: The degrees of freedom for residuals, when meaningful.\nstderror: The standard errors of the coefficients.\nvcov: The estimated variance-covariance matrix of the coefficient estimates.\ncoef_table: Table which displays coefficients and summarizes their significance and confidence intervals.\nglm_model: The raw fitted model returned by GLM.lm. Note this points to training data. Refer to the GLM.jl documentation for usage.","category":"page"},{"location":"models/LinearCountRegressor_GLM/#Examples","page":"LinearCountRegressor","title":"Examples","text":"","category":"section"},{"location":"models/LinearCountRegressor_GLM/","page":"LinearCountRegressor","title":"LinearCountRegressor","text":"using MLJ\nimport MLJ.Distributions.Poisson\n\n## Generate some data whose target y looks Poisson when conditioned on\n## X:\nN = 10_000\nw = [1.0, -2.0, 3.0]\nmu(x) = exp(w'x) ## mean for a log link function\nXmat = rand(N, 3)\nX = MLJ.table(Xmat)\ny = map(1:N) do i\n x = Xmat[i, :]\n rand(Poisson(mu(x)))\nend;\n\nCountRegressor = @load LinearCountRegressor pkg=GLM\nmodel = CountRegressor(fit_intercept=false)\nmach = machine(model, X, y)\nfit!(mach)\n\nXnew = MLJ.table(rand(3, 3))\nyhat = predict(mach, Xnew)\nyhat_point = predict_mean(mach, Xnew)\n\n## get coefficients approximating `w`:\njulia> fitted_params(mach).coef\n3-element Vector{Float64}:\n 0.9969008753103842\n -2.0255901752504775\n 3.014407534033522\n\nreport(mach)","category":"page"},{"location":"models/LinearCountRegressor_GLM/","page":"LinearCountRegressor","title":"LinearCountRegressor","text":"See also LinearRegressor, LinearBinaryClassifier","category":"page"},{"location":"models/ElasticNetCVRegressor_MLJScikitLearnInterface/#ElasticNetCVRegressor_MLJScikitLearnInterface","page":"ElasticNetCVRegressor","title":"ElasticNetCVRegressor","text":"","category":"section"},{"location":"models/ElasticNetCVRegressor_MLJScikitLearnInterface/","page":"ElasticNetCVRegressor","title":"ElasticNetCVRegressor","text":"ElasticNetCVRegressor","category":"page"},{"location":"models/ElasticNetCVRegressor_MLJScikitLearnInterface/","page":"ElasticNetCVRegressor","title":"ElasticNetCVRegressor","text":"A model type for constructing a elastic net regression with built-in cross-validation, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/ElasticNetCVRegressor_MLJScikitLearnInterface/","page":"ElasticNetCVRegressor","title":"ElasticNetCVRegressor","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/ElasticNetCVRegressor_MLJScikitLearnInterface/","page":"ElasticNetCVRegressor","title":"ElasticNetCVRegressor","text":"ElasticNetCVRegressor = @load ElasticNetCVRegressor pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/ElasticNetCVRegressor_MLJScikitLearnInterface/","page":"ElasticNetCVRegressor","title":"ElasticNetCVRegressor","text":"Do model = ElasticNetCVRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in ElasticNetCVRegressor(l1_ratio=...).","category":"page"},{"location":"models/ElasticNetCVRegressor_MLJScikitLearnInterface/#Hyper-parameters","page":"ElasticNetCVRegressor","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/ElasticNetCVRegressor_MLJScikitLearnInterface/","page":"ElasticNetCVRegressor","title":"ElasticNetCVRegressor","text":"l1_ratio = 0.5\neps = 0.001\nn_alphas = 100\nalphas = nothing\nfit_intercept = true\nprecompute = auto\nmax_iter = 1000\ntol = 0.0001\ncv = 5\ncopy_X = true\nverbose = 0\nn_jobs = nothing\npositive = false\nrandom_state = nothing\nselection = cyclic","category":"page"},{"location":"models/NeuralNetworkRegressor_BetaML/#NeuralNetworkRegressor_BetaML","page":"NeuralNetworkRegressor","title":"NeuralNetworkRegressor","text":"","category":"section"},{"location":"models/NeuralNetworkRegressor_BetaML/","page":"NeuralNetworkRegressor","title":"NeuralNetworkRegressor","text":"mutable struct NeuralNetworkRegressor <: MLJModelInterface.Deterministic","category":"page"},{"location":"models/NeuralNetworkRegressor_BetaML/","page":"NeuralNetworkRegressor","title":"NeuralNetworkRegressor","text":"A simple but flexible Feedforward Neural Network, from the Beta Machine Learning Toolkit (BetaML) for regression of a single dimensional target.","category":"page"},{"location":"models/NeuralNetworkRegressor_BetaML/#Parameters:","page":"NeuralNetworkRegressor","title":"Parameters:","text":"","category":"section"},{"location":"models/NeuralNetworkRegressor_BetaML/","page":"NeuralNetworkRegressor","title":"NeuralNetworkRegressor","text":"layers: Array of layer objects [def: nothing, i.e. basic network]. See subtypes(BetaML.AbstractLayer) for supported layers\nloss: Loss (cost) function [def: BetaML.squared_cost]. Should always assume y and ŷ as matrices, even if the regression task is 1-D\nwarning: Warning\nIf you change the parameter loss, you need to either provide its derivative on the parameter dloss or use autodiff with dloss=nothing.\ndloss: Derivative of the loss function [def: BetaML.dsquared_cost, i.e. use the derivative of the squared cost]. Use nothing for autodiff.\nepochs: Number of epochs, i.e. passages trough the whole training sample [def: 200]\nbatch_size: Size of each individual batch [def: 16]\nopt_alg: The optimisation algorithm to update the gradient at each batch [def: BetaML.ADAM()]. See subtypes(BetaML.OptimisationAlgorithm) for supported optimizers\nshuffle: Whether to randomly shuffle the data at each iteration (epoch) [def: true]\ndescr: An optional title and/or description for this model\ncb: A call back function to provide information during training [def: fitting_info]\nrng: Random Number Generator (see FIXEDSEED) [deafult: Random.GLOBAL_RNG]","category":"page"},{"location":"models/NeuralNetworkRegressor_BetaML/#Notes:","page":"NeuralNetworkRegressor","title":"Notes:","text":"","category":"section"},{"location":"models/NeuralNetworkRegressor_BetaML/","page":"NeuralNetworkRegressor","title":"NeuralNetworkRegressor","text":"data must be numerical\nthe label should be be a n-records vector.","category":"page"},{"location":"models/NeuralNetworkRegressor_BetaML/#Example:","page":"NeuralNetworkRegressor","title":"Example:","text":"","category":"section"},{"location":"models/NeuralNetworkRegressor_BetaML/","page":"NeuralNetworkRegressor","title":"NeuralNetworkRegressor","text":"julia> using MLJ\n\njulia> X, y = @load_boston;\n\njulia> modelType = @load NeuralNetworkRegressor pkg = \"BetaML\" verbosity=0\nBetaML.Nn.NeuralNetworkRegressor\n\njulia> layers = [BetaML.DenseLayer(12,20,f=BetaML.relu),BetaML.DenseLayer(20,20,f=BetaML.relu),BetaML.DenseLayer(20,1,f=BetaML.relu)];\n\njulia> model = modelType(layers=layers,opt_alg=BetaML.ADAM());\nNeuralNetworkRegressor(\n layers = BetaML.Nn.AbstractLayer[BetaML.Nn.DenseLayer([-0.23249759178069676 -0.4125090172711131 … 0.41401934928739 -0.33017881111237535; -0.27912169279319965 0.270551221249931 … 0.19258414323473344 0.1703002982374256; … ; 0.31186742456482447 0.14776438287394805 … 0.3624993442655036 0.1438885872964824; 0.24363744610286758 -0.3221033024934767 … 0.14886090419299408 0.038411663101909355], [-0.42360286004241765, -0.34355377040029594, 0.11510963232946697, 0.29078650404397893, -0.04940236502546075, 0.05142849152316714, -0.177685375947775, 0.3857630523957018, -0.25454667127064756, -0.1726731848206195, 0.29832456225553444, -0.21138505291162835, -0.15763643112604903, -0.08477044513587562, -0.38436681165349196, 0.20538016429104916, -0.25008157754468335, 0.268681800562054, 0.10600581996650865, 0.4262194464325672], BetaML.Utils.relu, BetaML.Utils.drelu), BetaML.Nn.DenseLayer([-0.08534180387478185 0.19659398307677617 … -0.3413633217504578 -0.0484925247381256; 0.0024419192794883915 -0.14614102508129 … -0.21912059923003044 0.2680725396694708; … ; 0.25151545823147886 -0.27532269951606037 … 0.20739970895058063 0.2891938885916349; -0.1699020711688904 -0.1350423717084296 … 0.16947589410758873 0.3629006047373296], [0.2158116357688406, -0.3255582642532289, -0.057314442103850394, 0.29029696770539953, 0.24994080694366455, 0.3624239027782297, -0.30674318230919984, -0.3854738338935017, 0.10809721838554087, 0.16073511121016176, -0.005923262068960489, 0.3157147976348795, -0.10938918304264739, -0.24521229198853187, -0.307167732178712, 0.0808907777008302, -0.014577497150872254, -0.0011287181458157214, 0.07522282588658086, 0.043366500526073104], BetaML.Utils.relu, BetaML.Utils.drelu), BetaML.Nn.DenseLayer([-0.021367697115938555 -0.28326652172347155 … 0.05346175368370165 -0.26037328415871647], [-0.2313659199724562], BetaML.Utils.relu, BetaML.Utils.drelu)], \n loss = BetaML.Utils.squared_cost, \n dloss = BetaML.Utils.dsquared_cost, \n epochs = 100, \n batch_size = 32, \n opt_alg = BetaML.Nn.ADAM(BetaML.Nn.var\"#90#93\"(), 1.0, 0.9, 0.999, 1.0e-8, BetaML.Nn.Learnable[], BetaML.Nn.Learnable[]), \n shuffle = true, \n descr = \"\", \n cb = BetaML.Nn.fitting_info, \n rng = Random._GLOBAL_RNG())\n\njulia> mach = machine(model, X, y);\n\njulia> fit!(mach);\n\njulia> ŷ = predict(mach, X);\n\njulia> hcat(y,ŷ)\n506×2 Matrix{Float64}:\n 24.0 30.7726\n 21.6 28.0811\n 34.7 31.3194\n ⋮ \n 23.9 30.9032\n 22.0 29.49\n 11.9 27.2438","category":"page"},{"location":"learning_mlj/#Learning-MLJ","page":"Learning MLJ","title":"Learning MLJ","text":"","category":"section"},{"location":"learning_mlj/","page":"Learning MLJ","title":"Learning MLJ","text":"MLJ Cheatsheet","category":"page"},{"location":"learning_mlj/","page":"Learning MLJ","title":"Learning MLJ","text":"See also Getting help and reporting problems.","category":"page"},{"location":"learning_mlj/","page":"Learning MLJ","title":"Learning MLJ","text":"The present document, although littered with examples, is primarily intended as a complete reference. ","category":"page"},{"location":"learning_mlj/#Where-to-start?","page":"Learning MLJ","title":"Where to start?","text":"","category":"section"},{"location":"learning_mlj/#Completely-new-to-Julia?","page":"Learning MLJ","title":"Completely new to Julia?","text":"","category":"section"},{"location":"learning_mlj/","page":"Learning MLJ","title":"Learning MLJ","text":"Julia's learning resources page | Learn X in Y minutes | HelloJulia","category":"page"},{"location":"learning_mlj/#New-to-data-science?","page":"Learning MLJ","title":"New to data science?","text":"","category":"section"},{"location":"learning_mlj/","page":"Learning MLJ","title":"Learning MLJ","text":"Julia Data Science","category":"page"},{"location":"learning_mlj/#New-to-machine-learning?","page":"Learning MLJ","title":"New to machine learning?","text":"","category":"section"},{"location":"learning_mlj/","page":"Learning MLJ","title":"Learning MLJ","text":"Introduction to Statistical Learning with Julia versions of the R labs here","category":"page"},{"location":"learning_mlj/#Know-some-ML-and-just-want-MLJ-basics?","page":"Learning MLJ","title":"Know some ML and just want MLJ basics?","text":"","category":"section"},{"location":"learning_mlj/","page":"Learning MLJ","title":"Learning MLJ","text":"Getting Started | Common MLJ Workflows","category":"page"},{"location":"learning_mlj/#An-ML-practitioner-transitioning-from-another-platform?","page":"Learning MLJ","title":"An ML practitioner transitioning from another platform?","text":"","category":"section"},{"location":"learning_mlj/","page":"Learning MLJ","title":"Learning MLJ","text":"MLJ for Data Scientists in Two Hours | MLJTutorial","category":"page"},{"location":"learning_mlj/#Other-resources","page":"Learning MLJ","title":"Other resources","text":"","category":"section"},{"location":"learning_mlj/","page":"Learning MLJ","title":"Learning MLJ","text":"Data Science Tutorials: MLJ tutorials including end-to-end examples, and \"Introduction to Statistical Learning\" labs\nMLCourse: Teaching material for an introductory machine learning course at EPFL (for an interactive preview see here).\nJulia Boards the Titanic Blog post on using MLJ for users new to Julia. \nAnalyzing the Glass Dataset: A gentle introduction to data science using Julia and MLJ (three-part blog post)\nLightning Tour: A compressed demonstration of key MLJ functionality\nMLJ JuliaCon2020 Workshop: older version of MLJTutorial with video\nLearning Networks: For advanced MLJ users wanting to wrap workflows more complicated than linear pipelines\nMachine Learning Property Loans for Fun and Profit - Blog post demonstrating the use of MLJ to predict prospects for investment in property development loans. \nPredicting a Successful Mt Everest Climb - Blog post using MLJ to discover factors correlating with success in expeditions to climb the world's highest peak.","category":"page"},{"location":"models/LassoLarsICRegressor_MLJScikitLearnInterface/#LassoLarsICRegressor_MLJScikitLearnInterface","page":"LassoLarsICRegressor","title":"LassoLarsICRegressor","text":"","category":"section"},{"location":"models/LassoLarsICRegressor_MLJScikitLearnInterface/","page":"LassoLarsICRegressor","title":"LassoLarsICRegressor","text":"LassoLarsICRegressor","category":"page"},{"location":"models/LassoLarsICRegressor_MLJScikitLearnInterface/","page":"LassoLarsICRegressor","title":"LassoLarsICRegressor","text":"A model type for constructing a Lasso model with LARS using BIC or AIC for model selection, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/LassoLarsICRegressor_MLJScikitLearnInterface/","page":"LassoLarsICRegressor","title":"LassoLarsICRegressor","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/LassoLarsICRegressor_MLJScikitLearnInterface/","page":"LassoLarsICRegressor","title":"LassoLarsICRegressor","text":"LassoLarsICRegressor = @load LassoLarsICRegressor pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/LassoLarsICRegressor_MLJScikitLearnInterface/","page":"LassoLarsICRegressor","title":"LassoLarsICRegressor","text":"Do model = LassoLarsICRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in LassoLarsICRegressor(criterion=...).","category":"page"},{"location":"models/LassoLarsICRegressor_MLJScikitLearnInterface/#Hyper-parameters","page":"LassoLarsICRegressor","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/LassoLarsICRegressor_MLJScikitLearnInterface/","page":"LassoLarsICRegressor","title":"LassoLarsICRegressor","text":"criterion = aic\nfit_intercept = true\nverbose = false\nnormalize = false\nprecompute = auto\nmax_iter = 500\neps = 2.220446049250313e-16\ncopy_X = true\npositive = false","category":"page"},{"location":"models/GaussianMixtureRegressor_BetaML/#GaussianMixtureRegressor_BetaML","page":"GaussianMixtureRegressor","title":"GaussianMixtureRegressor","text":"","category":"section"},{"location":"models/GaussianMixtureRegressor_BetaML/","page":"GaussianMixtureRegressor","title":"GaussianMixtureRegressor","text":"mutable struct GaussianMixtureRegressor <: MLJModelInterface.Deterministic","category":"page"},{"location":"models/GaussianMixtureRegressor_BetaML/","page":"GaussianMixtureRegressor","title":"GaussianMixtureRegressor","text":"A non-linear regressor derived from fitting the data on a probabilistic model (Gaussian Mixture Model). Relatively fast but generally not very precise, except for data with a structure matching the chosen underlying mixture.","category":"page"},{"location":"models/GaussianMixtureRegressor_BetaML/","page":"GaussianMixtureRegressor","title":"GaussianMixtureRegressor","text":"This is the single-target version of the model. If you want to predict several labels (y) at once, use the MLJ model MultitargetGaussianMixtureRegressor.","category":"page"},{"location":"models/GaussianMixtureRegressor_BetaML/#Hyperparameters:","page":"GaussianMixtureRegressor","title":"Hyperparameters:","text":"","category":"section"},{"location":"models/GaussianMixtureRegressor_BetaML/","page":"GaussianMixtureRegressor","title":"GaussianMixtureRegressor","text":"n_classes::Int64: Number of mixtures (latent classes) to consider [def: 3]\ninitial_probmixtures::Vector{Float64}: Initial probabilities of the categorical distribution (n_classes x 1) [default: []]\nmixtures::Union{Type, Vector{<:BetaML.GMM.AbstractMixture}}: An array (of length n_classes) of the mixtures to employ (see the [?GMM](@ref GMM) module). Each mixture object can be provided with or without its parameters (e.g. mean and variance for the gaussian ones). Fully qualified mixtures are useful only if theinitialisationstrategyparameter is set to \"gived\" This parameter can also be given symply in term of a _type. In this case it is automatically extended to a vector of n_classesmixtures of the specified type. Note that mixing of different mixture types is not currently supported. [def:[DiagonalGaussian() for i in 1:n_classes]`]\ntol::Float64: Tolerance to stop the algorithm [default: 10^(-6)]\nminimum_variance::Float64: Minimum variance for the mixtures [default: 0.05]\nminimum_covariance::Float64: Minimum covariance for the mixtures with full covariance matrix [default: 0]. This should be set different than minimum_variance (see notes).\ninitialisation_strategy::String: The computation method of the vector of the initial mixtures. One of the following:\n\"grid\": using a grid approach\n\"given\": using the mixture provided in the fully qualified mixtures parameter\n\"kmeans\": use first kmeans (itself initialised with a \"grid\" strategy) to set the initial mixture centers [default]\nNote that currently \"random\" and \"shuffle\" initialisations are not supported in gmm-based algorithms.\nmaximum_iterations::Int64: Maximum number of iterations [def: typemax(Int64), i.e. ∞]\nrng::Random.AbstractRNG: Random Number Generator [deafult: Random.GLOBAL_RNG]","category":"page"},{"location":"models/GaussianMixtureRegressor_BetaML/#Example:","page":"GaussianMixtureRegressor","title":"Example:","text":"","category":"section"},{"location":"models/GaussianMixtureRegressor_BetaML/","page":"GaussianMixtureRegressor","title":"GaussianMixtureRegressor","text":"julia> using MLJ\n\njulia> X, y = @load_boston;\n\njulia> modelType = @load GaussianMixtureRegressor pkg = \"BetaML\" verbosity=0\nBetaML.GMM.GaussianMixtureRegressor\n\njulia> model = modelType()\nGaussianMixtureRegressor(\n n_classes = 3, \n initial_probmixtures = Float64[], \n mixtures = BetaML.GMM.DiagonalGaussian{Float64}[BetaML.GMM.DiagonalGaussian{Float64}(nothing, nothing), BetaML.GMM.DiagonalGaussian{Float64}(nothing, nothing), BetaML.GMM.DiagonalGaussian{Float64}(nothing, nothing)], \n tol = 1.0e-6, \n minimum_variance = 0.05, \n minimum_covariance = 0.0, \n initialisation_strategy = \"kmeans\", \n maximum_iterations = 9223372036854775807, \n rng = Random._GLOBAL_RNG())\n\njulia> mach = machine(model, X, y);\n\njulia> fit!(mach);\n[ Info: Training machine(GaussianMixtureRegressor(n_classes = 3, …), …).\nIter. 1: Var. of the post 21.74887448784976 Log-likelihood -21687.09917379566\n\njulia> ŷ = predict(mach, X)\n506-element Vector{Float64}:\n 24.703442835305577\n 24.70344283512716\n ⋮\n 17.172486989759676\n 17.172486989759644","category":"page"},{"location":"models/MultitargetNeuralNetworkRegressor_MLJFlux/#MultitargetNeuralNetworkRegressor_MLJFlux","page":"MultitargetNeuralNetworkRegressor","title":"MultitargetNeuralNetworkRegressor","text":"","category":"section"},{"location":"models/MultitargetNeuralNetworkRegressor_MLJFlux/","page":"MultitargetNeuralNetworkRegressor","title":"MultitargetNeuralNetworkRegressor","text":"MultitargetNeuralNetworkRegressor","category":"page"},{"location":"models/MultitargetNeuralNetworkRegressor_MLJFlux/","page":"MultitargetNeuralNetworkRegressor","title":"MultitargetNeuralNetworkRegressor","text":"A model type for constructing a multitarget neural network regressor, based on MLJFlux.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/MultitargetNeuralNetworkRegressor_MLJFlux/","page":"MultitargetNeuralNetworkRegressor","title":"MultitargetNeuralNetworkRegressor","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/MultitargetNeuralNetworkRegressor_MLJFlux/","page":"MultitargetNeuralNetworkRegressor","title":"MultitargetNeuralNetworkRegressor","text":"MultitargetNeuralNetworkRegressor = @load MultitargetNeuralNetworkRegressor pkg=MLJFlux","category":"page"},{"location":"models/MultitargetNeuralNetworkRegressor_MLJFlux/","page":"MultitargetNeuralNetworkRegressor","title":"MultitargetNeuralNetworkRegressor","text":"Do model = MultitargetNeuralNetworkRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in MultitargetNeuralNetworkRegressor(builder=...).","category":"page"},{"location":"models/MultitargetNeuralNetworkRegressor_MLJFlux/","page":"MultitargetNeuralNetworkRegressor","title":"MultitargetNeuralNetworkRegressor","text":"MultitargetNeuralNetworkRegressor is for training a data-dependent Flux.jl neural network to predict a multi-valued Continuous target, represented as a table, given a table of Continuous features. Users provide a recipe for constructing the network, based on properties of the data that is encountered, by specifying an appropriate builder. See MLJFlux documentation for more on builders.","category":"page"},{"location":"models/MultitargetNeuralNetworkRegressor_MLJFlux/#Training-data","page":"MultitargetNeuralNetworkRegressor","title":"Training data","text":"","category":"section"},{"location":"models/MultitargetNeuralNetworkRegressor_MLJFlux/","page":"MultitargetNeuralNetworkRegressor","title":"MultitargetNeuralNetworkRegressor","text":"In MLJ or MLJBase, bind an instance model to data with","category":"page"},{"location":"models/MultitargetNeuralNetworkRegressor_MLJFlux/","page":"MultitargetNeuralNetworkRegressor","title":"MultitargetNeuralNetworkRegressor","text":"mach = machine(model, X, y)","category":"page"},{"location":"models/MultitargetNeuralNetworkRegressor_MLJFlux/","page":"MultitargetNeuralNetworkRegressor","title":"MultitargetNeuralNetworkRegressor","text":"Here:","category":"page"},{"location":"models/MultitargetNeuralNetworkRegressor_MLJFlux/","page":"MultitargetNeuralNetworkRegressor","title":"MultitargetNeuralNetworkRegressor","text":"X is either a Matrix or any table of input features (eg, a DataFrame) whose columns are of scitype Continuous; check column scitypes with schema(X). If X is a Matrix, it is assumed to have columns corresponding to features and rows corresponding to observations.\ny is the target, which can be any table or matrix of output targets whose element scitype is Continuous; check column scitypes with schema(y). If y is a Matrix, it is assumed to have columns corresponding to variables and rows corresponding to observations.","category":"page"},{"location":"models/MultitargetNeuralNetworkRegressor_MLJFlux/#Hyper-parameters","page":"MultitargetNeuralNetworkRegressor","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/MultitargetNeuralNetworkRegressor_MLJFlux/","page":"MultitargetNeuralNetworkRegressor","title":"MultitargetNeuralNetworkRegressor","text":"builder=MLJFlux.Linear(σ=Flux.relu): An MLJFlux builder that constructs a neural network. Possible builders include: Linear, Short, and MLP. See MLJFlux documentation for more on builders, and the example below for using the @builder convenience macro.\noptimiser::Flux.Adam(): A Flux.Optimise optimiser. The optimiser performs the updating of the weights of the network. For further reference, see the Flux optimiser documentation. To choose a learning rate (the update rate of the optimizer), a good rule of thumb is to start out at 10e-3, and tune using powers of 10 between 1 and 1e-7.\nloss=Flux.mse: The loss function which the network will optimize. Should be a function which can be called in the form loss(yhat, y). Possible loss functions are listed in the Flux loss function documentation. For a regression task, natural loss functions are:\nFlux.mse\nFlux.mae\nFlux.msle\nFlux.huber_loss\nCurrently MLJ measures are not supported as loss functions here.\nepochs::Int=10: The duration of training, in epochs. Typically, one epoch represents one pass through the complete the training dataset.\nbatch_size::int=1: the batch size to be used for training, representing the number of samples per update of the network weights. Typically, batch size is between 8 and\nIncreassing batch size may accelerate training if acceleration=CUDALibs() and a\nGPU is available.\nlambda::Float64=0: The strength of the weight regularization penalty. Can be any value in the range [0, ∞).\nalpha::Float64=0: The L2/L1 mix of regularization, in the range [0, 1]. A value of 0 represents L2 regularization, and a value of 1 represents L1 regularization.\nrng::Union{AbstractRNG, Int64}: The random number generator or seed used during training.\noptimizer_changes_trigger_retraining::Bool=false: Defines what happens when re-fitting a machine if the associated optimiser has changed. If true, the associated machine will retrain from scratch on fit! call, otherwise it will not.\nacceleration::AbstractResource=CPU1(): Defines on what hardware training is done. For Training on GPU, use CUDALibs().","category":"page"},{"location":"models/MultitargetNeuralNetworkRegressor_MLJFlux/#Operations","page":"MultitargetNeuralNetworkRegressor","title":"Operations","text":"","category":"section"},{"location":"models/MultitargetNeuralNetworkRegressor_MLJFlux/","page":"MultitargetNeuralNetworkRegressor","title":"MultitargetNeuralNetworkRegressor","text":"predict(mach, Xnew): return predictions of the target given new features Xnew having the same scitype as X above. Predictions are deterministic.","category":"page"},{"location":"models/MultitargetNeuralNetworkRegressor_MLJFlux/#Fitted-parameters","page":"MultitargetNeuralNetworkRegressor","title":"Fitted parameters","text":"","category":"section"},{"location":"models/MultitargetNeuralNetworkRegressor_MLJFlux/","page":"MultitargetNeuralNetworkRegressor","title":"MultitargetNeuralNetworkRegressor","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/MultitargetNeuralNetworkRegressor_MLJFlux/","page":"MultitargetNeuralNetworkRegressor","title":"MultitargetNeuralNetworkRegressor","text":"chain: The trained \"chain\" (Flux.jl model), namely the series of layers, functions, and activations which make up the neural network.","category":"page"},{"location":"models/MultitargetNeuralNetworkRegressor_MLJFlux/#Report","page":"MultitargetNeuralNetworkRegressor","title":"Report","text":"","category":"section"},{"location":"models/MultitargetNeuralNetworkRegressor_MLJFlux/","page":"MultitargetNeuralNetworkRegressor","title":"MultitargetNeuralNetworkRegressor","text":"The fields of report(mach) are:","category":"page"},{"location":"models/MultitargetNeuralNetworkRegressor_MLJFlux/","page":"MultitargetNeuralNetworkRegressor","title":"MultitargetNeuralNetworkRegressor","text":"training_losses: A vector of training losses (penalised if lambda != 0) in historical order, of length epochs + 1. The first element is the pre-training loss.","category":"page"},{"location":"models/MultitargetNeuralNetworkRegressor_MLJFlux/#Examples","page":"MultitargetNeuralNetworkRegressor","title":"Examples","text":"","category":"section"},{"location":"models/MultitargetNeuralNetworkRegressor_MLJFlux/","page":"MultitargetNeuralNetworkRegressor","title":"MultitargetNeuralNetworkRegressor","text":"In this example we apply a multi-target regression model to synthetic data:","category":"page"},{"location":"models/MultitargetNeuralNetworkRegressor_MLJFlux/","page":"MultitargetNeuralNetworkRegressor","title":"MultitargetNeuralNetworkRegressor","text":"using MLJ\nimport MLJFlux\nusing Flux","category":"page"},{"location":"models/MultitargetNeuralNetworkRegressor_MLJFlux/","page":"MultitargetNeuralNetworkRegressor","title":"MultitargetNeuralNetworkRegressor","text":"First, we generate some synthetic data (needs MLJBase 0.20.16 or higher):","category":"page"},{"location":"models/MultitargetNeuralNetworkRegressor_MLJFlux/","page":"MultitargetNeuralNetworkRegressor","title":"MultitargetNeuralNetworkRegressor","text":"X, y = make_regression(100, 9; n_targets = 2) ## both tables\nschema(y)\nschema(X)","category":"page"},{"location":"models/MultitargetNeuralNetworkRegressor_MLJFlux/","page":"MultitargetNeuralNetworkRegressor","title":"MultitargetNeuralNetworkRegressor","text":"Splitting off a test set:","category":"page"},{"location":"models/MultitargetNeuralNetworkRegressor_MLJFlux/","page":"MultitargetNeuralNetworkRegressor","title":"MultitargetNeuralNetworkRegressor","text":"(X, Xtest), (y, ytest) = partition((X, y), 0.7, multi=true);","category":"page"},{"location":"models/MultitargetNeuralNetworkRegressor_MLJFlux/","page":"MultitargetNeuralNetworkRegressor","title":"MultitargetNeuralNetworkRegressor","text":"Next, we can define a builder, making use of a convenience macro to do so. In the following @builder call, n_in is a proxy for the number input features and n_out the number of target variables (both known at fit! time), while rng is a proxy for a RNG (which will be passed from the rng field of model defined below).","category":"page"},{"location":"models/MultitargetNeuralNetworkRegressor_MLJFlux/","page":"MultitargetNeuralNetworkRegressor","title":"MultitargetNeuralNetworkRegressor","text":"builder = MLJFlux.@builder begin\n init=Flux.glorot_uniform(rng)\n Chain(\n Dense(n_in, 64, relu, init=init),\n Dense(64, 32, relu, init=init),\n Dense(32, n_out, init=init),\n )\nend","category":"page"},{"location":"models/MultitargetNeuralNetworkRegressor_MLJFlux/","page":"MultitargetNeuralNetworkRegressor","title":"MultitargetNeuralNetworkRegressor","text":"Instantiating the regression model:","category":"page"},{"location":"models/MultitargetNeuralNetworkRegressor_MLJFlux/","page":"MultitargetNeuralNetworkRegressor","title":"MultitargetNeuralNetworkRegressor","text":"MultitargetNeuralNetworkRegressor = @load MultitargetNeuralNetworkRegressor\nmodel = MultitargetNeuralNetworkRegressor(builder=builder, rng=123, epochs=20)","category":"page"},{"location":"models/MultitargetNeuralNetworkRegressor_MLJFlux/","page":"MultitargetNeuralNetworkRegressor","title":"MultitargetNeuralNetworkRegressor","text":"We will arrange for standardization of the the target by wrapping our model in TransformedTargetModel, and standardization of the features by inserting the wrapped model in a pipeline:","category":"page"},{"location":"models/MultitargetNeuralNetworkRegressor_MLJFlux/","page":"MultitargetNeuralNetworkRegressor","title":"MultitargetNeuralNetworkRegressor","text":"pipe = Standardizer |> TransformedTargetModel(model, target=Standardizer)","category":"page"},{"location":"models/MultitargetNeuralNetworkRegressor_MLJFlux/","page":"MultitargetNeuralNetworkRegressor","title":"MultitargetNeuralNetworkRegressor","text":"If we fit with a high verbosity (>1), we will see the losses during training. We can also see the losses in the output of report(mach)","category":"page"},{"location":"models/MultitargetNeuralNetworkRegressor_MLJFlux/","page":"MultitargetNeuralNetworkRegressor","title":"MultitargetNeuralNetworkRegressor","text":"mach = machine(pipe, X, y)\nfit!(mach, verbosity=2)\n\n## first element initial loss, 2:end per epoch training losses\nreport(mach).transformed_target_model_deterministic.model.training_losses","category":"page"},{"location":"models/MultitargetNeuralNetworkRegressor_MLJFlux/","page":"MultitargetNeuralNetworkRegressor","title":"MultitargetNeuralNetworkRegressor","text":"For experimenting with learning rate, see the NeuralNetworkRegressor example.","category":"page"},{"location":"models/MultitargetNeuralNetworkRegressor_MLJFlux/","page":"MultitargetNeuralNetworkRegressor","title":"MultitargetNeuralNetworkRegressor","text":"pipe.transformed_target_model_deterministic.model.optimiser.eta = 0.0001","category":"page"},{"location":"models/MultitargetNeuralNetworkRegressor_MLJFlux/","page":"MultitargetNeuralNetworkRegressor","title":"MultitargetNeuralNetworkRegressor","text":"With the learning rate fixed, we can now compute a CV estimate of the performance (using all data bound to mach) and compare this with performance on the test set:","category":"page"},{"location":"models/MultitargetNeuralNetworkRegressor_MLJFlux/","page":"MultitargetNeuralNetworkRegressor","title":"MultitargetNeuralNetworkRegressor","text":"## custom MLJ loss:\nmulti_loss(yhat, y) = l2(MLJ.matrix(yhat), MLJ.matrix(y)) |> mean\n\n## CV estimate, based on `(X, y)`:\nevaluate!(mach, resampling=CV(nfolds=5), measure=multi_loss)\n\n## loss for `(Xtest, test)`:\nfit!(mach) ## trains on all data `(X, y)`\nyhat = predict(mach, Xtest)\nmulti_loss(yhat, ytest)","category":"page"},{"location":"models/MultitargetNeuralNetworkRegressor_MLJFlux/","page":"MultitargetNeuralNetworkRegressor","title":"MultitargetNeuralNetworkRegressor","text":"See also NeuralNetworkRegressor","category":"page"},{"location":"models/Standardizer_MLJModels/#Standardizer_MLJModels","page":"Standardizer","title":"Standardizer","text":"","category":"section"},{"location":"models/Standardizer_MLJModels/","page":"Standardizer","title":"Standardizer","text":"Standardizer","category":"page"},{"location":"models/Standardizer_MLJModels/","page":"Standardizer","title":"Standardizer","text":"A model type for constructing a standardizer, based on MLJModels.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/Standardizer_MLJModels/","page":"Standardizer","title":"Standardizer","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/Standardizer_MLJModels/","page":"Standardizer","title":"Standardizer","text":"Standardizer = @load Standardizer pkg=MLJModels","category":"page"},{"location":"models/Standardizer_MLJModels/","page":"Standardizer","title":"Standardizer","text":"Do model = Standardizer() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in Standardizer(features=...).","category":"page"},{"location":"models/Standardizer_MLJModels/","page":"Standardizer","title":"Standardizer","text":"Use this model to standardize (whiten) a Continuous vector, or relevant columns of a table. The rescalings applied by this transformer to new data are always those learned during the training phase, which are generally different from what would actually standardize the new data.","category":"page"},{"location":"models/Standardizer_MLJModels/#Training-data","page":"Standardizer","title":"Training data","text":"","category":"section"},{"location":"models/Standardizer_MLJModels/","page":"Standardizer","title":"Standardizer","text":"In MLJ or MLJBase, bind an instance model to data with","category":"page"},{"location":"models/Standardizer_MLJModels/","page":"Standardizer","title":"Standardizer","text":"mach = machine(model, X)","category":"page"},{"location":"models/Standardizer_MLJModels/","page":"Standardizer","title":"Standardizer","text":"where","category":"page"},{"location":"models/Standardizer_MLJModels/","page":"Standardizer","title":"Standardizer","text":"X: any Tables.jl compatible table or any abstract vector with Continuous element scitype (any abstract float vector). Only features in a table with Continuous scitype can be standardized; check column scitypes with schema(X).","category":"page"},{"location":"models/Standardizer_MLJModels/","page":"Standardizer","title":"Standardizer","text":"Train the machine using fit!(mach, rows=...).","category":"page"},{"location":"models/Standardizer_MLJModels/#Hyper-parameters","page":"Standardizer","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/Standardizer_MLJModels/","page":"Standardizer","title":"Standardizer","text":"features: one of the following, with the behavior indicated below:\n[] (empty, the default): standardize all features (columns) having Continuous element scitype\nnon-empty vector of feature names (symbols): standardize only the Continuous features in the vector (if ignore=false) or Continuous features not named in the vector (ignore=true).\nfunction or other callable: standardize a feature if the callable returns true on its name. For example, Standardizer(features = name -> name in [:x1, :x3], ignore = true, count=true) has the same effect as Standardizer(features = [:x1, :x3], ignore = true, count=true), namely to standardize all Continuous and Count features, with the exception of :x1 and :x3.\nNote this behavior is further modified if the ordered_factor or count flags are set to true; see below\nignore=false: whether to ignore or standardize specified features, as explained above\nordered_factor=false: if true, standardize any OrderedFactor feature wherever a Continuous feature would be standardized, as described above\ncount=false: if true, standardize any Count feature wherever a Continuous feature would be standardized, as described above","category":"page"},{"location":"models/Standardizer_MLJModels/#Operations","page":"Standardizer","title":"Operations","text":"","category":"section"},{"location":"models/Standardizer_MLJModels/","page":"Standardizer","title":"Standardizer","text":"transform(mach, Xnew): return Xnew with relevant features standardized according to the rescalings learned during fitting of mach.\ninverse_transform(mach, Z): apply the inverse transformation to Z, so that inverse_transform(mach, transform(mach, Xnew)) is approximately the same as Xnew; unavailable if ordered_factor or count flags were set to true.","category":"page"},{"location":"models/Standardizer_MLJModels/#Fitted-parameters","page":"Standardizer","title":"Fitted parameters","text":"","category":"section"},{"location":"models/Standardizer_MLJModels/","page":"Standardizer","title":"Standardizer","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/Standardizer_MLJModels/","page":"Standardizer","title":"Standardizer","text":"features_fit - the names of features that will be standardized\nmeans - the corresponding untransformed mean values\nstds - the corresponding untransformed standard deviations","category":"page"},{"location":"models/Standardizer_MLJModels/#Report","page":"Standardizer","title":"Report","text":"","category":"section"},{"location":"models/Standardizer_MLJModels/","page":"Standardizer","title":"Standardizer","text":"The fields of report(mach) are:","category":"page"},{"location":"models/Standardizer_MLJModels/","page":"Standardizer","title":"Standardizer","text":"features_fit: the names of features that will be standardized","category":"page"},{"location":"models/Standardizer_MLJModels/#Examples","page":"Standardizer","title":"Examples","text":"","category":"section"},{"location":"models/Standardizer_MLJModels/","page":"Standardizer","title":"Standardizer","text":"using MLJ\n\nX = (ordinal1 = [1, 2, 3],\n ordinal2 = coerce([:x, :y, :x], OrderedFactor),\n ordinal3 = [10.0, 20.0, 30.0],\n ordinal4 = [-20.0, -30.0, -40.0],\n nominal = coerce([\"Your father\", \"he\", \"is\"], Multiclass));\n\njulia> schema(X)\n┌──────────┬──────────────────┐\n│ names │ scitypes │\n├──────────┼──────────────────┤\n│ ordinal1 │ Count │\n│ ordinal2 │ OrderedFactor{2} │\n│ ordinal3 │ Continuous │\n│ ordinal4 │ Continuous │\n│ nominal │ Multiclass{3} │\n└──────────┴──────────────────┘\n\nstand1 = Standardizer();\n\njulia> transform(fit!(machine(stand1, X)), X)\n(ordinal1 = [1, 2, 3],\n ordinal2 = CategoricalValue{Symbol,UInt32}[:x, :y, :x],\n ordinal3 = [-1.0, 0.0, 1.0],\n ordinal4 = [1.0, 0.0, -1.0],\n nominal = CategoricalValue{String,UInt32}[\"Your father\", \"he\", \"is\"],)\n\nstand2 = Standardizer(features=[:ordinal3, ], ignore=true, count=true);\n\njulia> transform(fit!(machine(stand2, X)), X)\n(ordinal1 = [-1.0, 0.0, 1.0],\n ordinal2 = CategoricalValue{Symbol,UInt32}[:x, :y, :x],\n ordinal3 = [10.0, 20.0, 30.0],\n ordinal4 = [1.0, 0.0, -1.0],\n nominal = CategoricalValue{String,UInt32}[\"Your father\", \"he\", \"is\"],)","category":"page"},{"location":"models/Standardizer_MLJModels/","page":"Standardizer","title":"Standardizer","text":"See also OneHotEncoder, ContinuousEncoder.","category":"page"},{"location":"models/BernoulliNBClassifier_MLJScikitLearnInterface/#BernoulliNBClassifier_MLJScikitLearnInterface","page":"BernoulliNBClassifier","title":"BernoulliNBClassifier","text":"","category":"section"},{"location":"models/BernoulliNBClassifier_MLJScikitLearnInterface/","page":"BernoulliNBClassifier","title":"BernoulliNBClassifier","text":"BernoulliNBClassifier","category":"page"},{"location":"models/BernoulliNBClassifier_MLJScikitLearnInterface/","page":"BernoulliNBClassifier","title":"BernoulliNBClassifier","text":"A model type for constructing a Bernoulli naive Bayes classifier, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/BernoulliNBClassifier_MLJScikitLearnInterface/","page":"BernoulliNBClassifier","title":"BernoulliNBClassifier","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/BernoulliNBClassifier_MLJScikitLearnInterface/","page":"BernoulliNBClassifier","title":"BernoulliNBClassifier","text":"BernoulliNBClassifier = @load BernoulliNBClassifier pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/BernoulliNBClassifier_MLJScikitLearnInterface/","page":"BernoulliNBClassifier","title":"BernoulliNBClassifier","text":"Do model = BernoulliNBClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in BernoulliNBClassifier(alpha=...).","category":"page"},{"location":"models/BernoulliNBClassifier_MLJScikitLearnInterface/","page":"BernoulliNBClassifier","title":"BernoulliNBClassifier","text":"Binomial naive bayes classifier. It is suitable for classification with binary features; features will be binarized based on the binarize keyword (unless it's nothing in which case the features are assumed to be binary).","category":"page"},{"location":"models/MultitargetKNNRegressor_NearestNeighborModels/#MultitargetKNNRegressor_NearestNeighborModels","page":"MultitargetKNNRegressor","title":"MultitargetKNNRegressor","text":"","category":"section"},{"location":"models/MultitargetKNNRegressor_NearestNeighborModels/","page":"MultitargetKNNRegressor","title":"MultitargetKNNRegressor","text":"MultitargetKNNRegressor","category":"page"},{"location":"models/MultitargetKNNRegressor_NearestNeighborModels/","page":"MultitargetKNNRegressor","title":"MultitargetKNNRegressor","text":"A model type for constructing a multitarget K-nearest neighbor regressor, based on NearestNeighborModels.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/MultitargetKNNRegressor_NearestNeighborModels/","page":"MultitargetKNNRegressor","title":"MultitargetKNNRegressor","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/MultitargetKNNRegressor_NearestNeighborModels/","page":"MultitargetKNNRegressor","title":"MultitargetKNNRegressor","text":"MultitargetKNNRegressor = @load MultitargetKNNRegressor pkg=NearestNeighborModels","category":"page"},{"location":"models/MultitargetKNNRegressor_NearestNeighborModels/","page":"MultitargetKNNRegressor","title":"MultitargetKNNRegressor","text":"Do model = MultitargetKNNRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in MultitargetKNNRegressor(K=...).","category":"page"},{"location":"models/MultitargetKNNRegressor_NearestNeighborModels/","page":"MultitargetKNNRegressor","title":"MultitargetKNNRegressor","text":"Multi-target K-Nearest Neighbors regressor (MultitargetKNNRegressor) is a variation of KNNRegressor that assumes the target variable is vector-valued with Continuous components. (Target data must be presented as a table, however.)","category":"page"},{"location":"models/MultitargetKNNRegressor_NearestNeighborModels/#Training-data","page":"MultitargetKNNRegressor","title":"Training data","text":"","category":"section"},{"location":"models/MultitargetKNNRegressor_NearestNeighborModels/","page":"MultitargetKNNRegressor","title":"MultitargetKNNRegressor","text":"In MLJ or MLJBase, bind an instance model to data with","category":"page"},{"location":"models/MultitargetKNNRegressor_NearestNeighborModels/","page":"MultitargetKNNRegressor","title":"MultitargetKNNRegressor","text":"mach = machine(model, X, y)","category":"page"},{"location":"models/MultitargetKNNRegressor_NearestNeighborModels/","page":"MultitargetKNNRegressor","title":"MultitargetKNNRegressor","text":"OR","category":"page"},{"location":"models/MultitargetKNNRegressor_NearestNeighborModels/","page":"MultitargetKNNRegressor","title":"MultitargetKNNRegressor","text":"mach = machine(model, X, y, w)","category":"page"},{"location":"models/MultitargetKNNRegressor_NearestNeighborModels/","page":"MultitargetKNNRegressor","title":"MultitargetKNNRegressor","text":"Here:","category":"page"},{"location":"models/MultitargetKNNRegressor_NearestNeighborModels/","page":"MultitargetKNNRegressor","title":"MultitargetKNNRegressor","text":"X is any table of input features (eg, a DataFrame) whose columns are of scitype Continuous; check column scitypes with schema(X).\ny is the target, which can be any table of responses whose element scitype is Continuous; check column scitypes with schema(y).\nw is the observation weights which can either be nothing(default) or an AbstractVector whoose element scitype is Count or Continuous. This is different from weights kernel which is an hyperparameter to the model, see below.","category":"page"},{"location":"models/MultitargetKNNRegressor_NearestNeighborModels/","page":"MultitargetKNNRegressor","title":"MultitargetKNNRegressor","text":"Train the machine using fit!(mach, rows=...).","category":"page"},{"location":"models/MultitargetKNNRegressor_NearestNeighborModels/#Hyper-parameters","page":"MultitargetKNNRegressor","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/MultitargetKNNRegressor_NearestNeighborModels/","page":"MultitargetKNNRegressor","title":"MultitargetKNNRegressor","text":"K::Int=5 : number of neighbors\nalgorithm::Symbol = :kdtree : one of (:kdtree, :brutetree, :balltree)\nmetric::Metric = Euclidean() : any Metric from Distances.jl for the distance between points. For algorithm = :kdtree only metrics which are instances of Union{Distances.Chebyshev, Distances.Cityblock, Distances.Euclidean, Distances.Minkowski, Distances.WeightedCityblock, Distances.WeightedEuclidean, Distances.WeightedMinkowski} are supported.\nleafsize::Int = algorithm == 10 : determines the number of points at which to stop splitting the tree. This option is ignored and always taken as 0 for algorithm = :brutetree, since brutetree isn't actually a tree.\nreorder::Bool = true : if true then points which are close in distance are placed close in memory. In this case, a copy of the original data will be made so that the original data is left unmodified. Setting this to true can significantly improve performance of the specified algorithm (except :brutetree). This option is ignored and always taken as false for algorithm = :brutetree.\nweights::KNNKernel=Uniform() : kernel used in assigning weights to the k-nearest neighbors for each observation. An instance of one of the types in list_kernels(). User-defined weighting functions can be passed by wrapping the function in a UserDefinedKernel kernel (do ?NearestNeighborModels.UserDefinedKernel for more info). If observation weights w are passed during machine construction then the weight assigned to each neighbor vote is the product of the kernel generated weight for that neighbor and the corresponding observation weight.","category":"page"},{"location":"models/MultitargetKNNRegressor_NearestNeighborModels/#Operations","page":"MultitargetKNNRegressor","title":"Operations","text":"","category":"section"},{"location":"models/MultitargetKNNRegressor_NearestNeighborModels/","page":"MultitargetKNNRegressor","title":"MultitargetKNNRegressor","text":"predict(mach, Xnew): Return predictions of the target given features Xnew, which should have same scitype as X above.","category":"page"},{"location":"models/MultitargetKNNRegressor_NearestNeighborModels/#Fitted-parameters","page":"MultitargetKNNRegressor","title":"Fitted parameters","text":"","category":"section"},{"location":"models/MultitargetKNNRegressor_NearestNeighborModels/","page":"MultitargetKNNRegressor","title":"MultitargetKNNRegressor","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/MultitargetKNNRegressor_NearestNeighborModels/","page":"MultitargetKNNRegressor","title":"MultitargetKNNRegressor","text":"tree: An instance of either KDTree, BruteTree or BallTree depending on the value of the algorithm hyperparameter (See hyper-parameters section above). These are data structures that stores the training data with the view of making quicker nearest neighbor searches on test data points.","category":"page"},{"location":"models/MultitargetKNNRegressor_NearestNeighborModels/#Examples","page":"MultitargetKNNRegressor","title":"Examples","text":"","category":"section"},{"location":"models/MultitargetKNNRegressor_NearestNeighborModels/","page":"MultitargetKNNRegressor","title":"MultitargetKNNRegressor","text":"using MLJ\n\n## Create Data\nX, y = make_regression(10, 5, n_targets=2)\n\n## load MultitargetKNNRegressor\nMultitargetKNNRegressor = @load MultitargetKNNRegressor pkg=NearestNeighborModels\n\n## view possible kernels\nNearestNeighborModels.list_kernels()\n\n## MutlitargetKNNRegressor instantiation\nmodel = MultitargetKNNRegressor(weights = NearestNeighborModels.Inverse())\n\n## Wrap model and required data in an MLJ machine and fit.\nmach = machine(model, X, y) |> fit! \n\n## Predict\ny_hat = predict(mach, X)\n","category":"page"},{"location":"models/MultitargetKNNRegressor_NearestNeighborModels/","page":"MultitargetKNNRegressor","title":"MultitargetKNNRegressor","text":"See also KNNRegressor","category":"page"},{"location":"models/ABODDetector_OutlierDetectionNeighbors/#ABODDetector_OutlierDetectionNeighbors","page":"ABODDetector","title":"ABODDetector","text":"","category":"section"},{"location":"models/ABODDetector_OutlierDetectionNeighbors/","page":"ABODDetector","title":"ABODDetector","text":"ABODDetector(k = 5,\n metric = Euclidean(),\n algorithm = :kdtree,\n static = :auto,\n leafsize = 10,\n reorder = true,\n parallel = false,\n enhanced = false)","category":"page"},{"location":"models/ABODDetector_OutlierDetectionNeighbors/","page":"ABODDetector","title":"ABODDetector","text":"Determine outliers based on the angles to its nearest neighbors. This implements the FastABOD variant described in the paper, that is, it uses the variance of angles to its nearest neighbors, not to the whole dataset, see [1]. ","category":"page"},{"location":"models/ABODDetector_OutlierDetectionNeighbors/","page":"ABODDetector","title":"ABODDetector","text":"Notice: The scores are inverted, to conform to our notion that higher scores describe higher outlierness.","category":"page"},{"location":"models/ABODDetector_OutlierDetectionNeighbors/#Parameters","page":"ABODDetector","title":"Parameters","text":"","category":"section"},{"location":"models/ABODDetector_OutlierDetectionNeighbors/","page":"ABODDetector","title":"ABODDetector","text":"k::Integer","category":"page"},{"location":"models/ABODDetector_OutlierDetectionNeighbors/","page":"ABODDetector","title":"ABODDetector","text":"Number of neighbors (must be greater than 0).","category":"page"},{"location":"models/ABODDetector_OutlierDetectionNeighbors/","page":"ABODDetector","title":"ABODDetector","text":"metric::Metric","category":"page"},{"location":"models/ABODDetector_OutlierDetectionNeighbors/","page":"ABODDetector","title":"ABODDetector","text":"This is one of the Metric types defined in the Distances.jl package. It is possible to define your own metrics by creating new types that are subtypes of Metric.","category":"page"},{"location":"models/ABODDetector_OutlierDetectionNeighbors/","page":"ABODDetector","title":"ABODDetector","text":"algorithm::Symbol","category":"page"},{"location":"models/ABODDetector_OutlierDetectionNeighbors/","page":"ABODDetector","title":"ABODDetector","text":"One of (:kdtree, :balltree). In a kdtree, points are recursively split into groups using hyper-planes. Therefore a KDTree only works with axis aligned metrics which are: Euclidean, Chebyshev, Minkowski and Cityblock. A brutetree linearly searches all points in a brute force fashion and works with any Metric. A balltree recursively splits points into groups bounded by hyper-spheres and works with any Metric.","category":"page"},{"location":"models/ABODDetector_OutlierDetectionNeighbors/","page":"ABODDetector","title":"ABODDetector","text":"static::Union{Bool, Symbol}","category":"page"},{"location":"models/ABODDetector_OutlierDetectionNeighbors/","page":"ABODDetector","title":"ABODDetector","text":"One of (true, false, :auto). Whether the input data for fitting and transform should be statically or dynamically allocated. If true, the data is statically allocated. If false, the data is dynamically allocated. If :auto, the data is dynamically allocated if the product of all dimensions except the last is greater than 100.","category":"page"},{"location":"models/ABODDetector_OutlierDetectionNeighbors/","page":"ABODDetector","title":"ABODDetector","text":"leafsize::Int","category":"page"},{"location":"models/ABODDetector_OutlierDetectionNeighbors/","page":"ABODDetector","title":"ABODDetector","text":"Determines at what number of points to stop splitting the tree further. There is a trade-off between traversing the tree and having to evaluate the metric function for increasing number of points.","category":"page"},{"location":"models/ABODDetector_OutlierDetectionNeighbors/","page":"ABODDetector","title":"ABODDetector","text":"reorder::Bool","category":"page"},{"location":"models/ABODDetector_OutlierDetectionNeighbors/","page":"ABODDetector","title":"ABODDetector","text":"While building the tree this will put points close in distance close in memory since this helps with cache locality. In this case, a copy of the original data will be made so that the original data is left unmodified. This can have a significant impact on performance and is by default set to true.","category":"page"},{"location":"models/ABODDetector_OutlierDetectionNeighbors/","page":"ABODDetector","title":"ABODDetector","text":"parallel::Bool","category":"page"},{"location":"models/ABODDetector_OutlierDetectionNeighbors/","page":"ABODDetector","title":"ABODDetector","text":"Parallelize score and predict using all threads available. The number of threads can be set with the JULIA_NUM_THREADS environment variable. Note: fit is not parallel.","category":"page"},{"location":"models/ABODDetector_OutlierDetectionNeighbors/","page":"ABODDetector","title":"ABODDetector","text":"enhanced::Bool","category":"page"},{"location":"models/ABODDetector_OutlierDetectionNeighbors/","page":"ABODDetector","title":"ABODDetector","text":"When enhanced=true, it uses the enhanced ABOD (EABOD) adaptation proposed by [2].","category":"page"},{"location":"models/ABODDetector_OutlierDetectionNeighbors/#Examples","page":"ABODDetector","title":"Examples","text":"","category":"section"},{"location":"models/ABODDetector_OutlierDetectionNeighbors/","page":"ABODDetector","title":"ABODDetector","text":"using OutlierDetection: ABODDetector, fit, transform\ndetector = ABODDetector()\nX = rand(10, 100)\nmodel, result = fit(detector, X; verbosity=0)\ntest_scores = transform(detector, model, X)","category":"page"},{"location":"models/ABODDetector_OutlierDetectionNeighbors/#References","page":"ABODDetector","title":"References","text":"","category":"section"},{"location":"models/ABODDetector_OutlierDetectionNeighbors/","page":"ABODDetector","title":"ABODDetector","text":"[1] Kriegel, Hans-Peter; S hubert, Matthias; Zimek, Arthur (2008): Angle-based outlier detection in high-dimensional data.","category":"page"},{"location":"models/ABODDetector_OutlierDetectionNeighbors/","page":"ABODDetector","title":"ABODDetector","text":"[2] Li, Xiaojie; Lv, Jian Cheng; Cheng, Dongdong (2015): Angle-Based Outlier Detection Algorithm with More Stable Relationships.","category":"page"},{"location":"models/TfidfTransformer_MLJText/#TfidfTransformer_MLJText","page":"TfidfTransformer","title":"TfidfTransformer","text":"","category":"section"},{"location":"models/TfidfTransformer_MLJText/","page":"TfidfTransformer","title":"TfidfTransformer","text":"TfidfTransformer","category":"page"},{"location":"models/TfidfTransformer_MLJText/","page":"TfidfTransformer","title":"TfidfTransformer","text":"A model type for constructing a TF-IFD transformer, based on MLJText.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/TfidfTransformer_MLJText/","page":"TfidfTransformer","title":"TfidfTransformer","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/TfidfTransformer_MLJText/","page":"TfidfTransformer","title":"TfidfTransformer","text":"TfidfTransformer = @load TfidfTransformer pkg=MLJText","category":"page"},{"location":"models/TfidfTransformer_MLJText/","page":"TfidfTransformer","title":"TfidfTransformer","text":"Do model = TfidfTransformer() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in TfidfTransformer(max_doc_freq=...).","category":"page"},{"location":"models/TfidfTransformer_MLJText/","page":"TfidfTransformer","title":"TfidfTransformer","text":"The transformer converts a collection of documents, tokenized or pre-parsed as bags of words/ngrams, to a matrix of TF-IDF scores. Here \"TF\" means term-frequency while \"IDF\" means inverse document frequency (defined below). The TF-IDF score is the product of the two. This is a common term weighting scheme in information retrieval, that has also found good use in document classification. The goal of using TF-IDF instead of the raw frequencies of occurrence of a token in a given document is to scale down the impact of tokens that occur very frequently in a given corpus and that are hence empirically less informative than features that occur in a small fraction of the training corpus.","category":"page"},{"location":"models/TfidfTransformer_MLJText/","page":"TfidfTransformer","title":"TfidfTransformer","text":"In textbooks and implementations there is variation in the definition of IDF. Here two IDF definitions are available. The default, smoothed option provides the IDF for a term t as log((1 + n)/(1 + df(t))) + 1, where n is the total number of documents and df(t) the number of documents in which t appears. Setting smooth_df = false provides an IDF of log(n/df(t)) + 1.","category":"page"},{"location":"models/TfidfTransformer_MLJText/#Training-data","page":"TfidfTransformer","title":"Training data","text":"","category":"section"},{"location":"models/TfidfTransformer_MLJText/","page":"TfidfTransformer","title":"TfidfTransformer","text":"In MLJ or MLJBase, bind an instance model to data with","category":"page"},{"location":"models/TfidfTransformer_MLJText/","page":"TfidfTransformer","title":"TfidfTransformer","text":"mach = machine(model, X)","category":"page"},{"location":"models/TfidfTransformer_MLJText/","page":"TfidfTransformer","title":"TfidfTransformer","text":"Here:","category":"page"},{"location":"models/TfidfTransformer_MLJText/","page":"TfidfTransformer","title":"TfidfTransformer","text":"X is any vector whose elements are either tokenized documents or bags of words/ngrams. Specifically, each element is one of the following:\nA vector of abstract strings (tokens), e.g., [\"I\", \"like\", \"Sam\", \".\", \"Sam\", \"is\", \"nice\", \".\"] (scitype AbstractVector{Textual})\nA dictionary of counts, indexed on abstract strings, e.g., Dict(\"I\"=>1, \"Sam\"=>2, \"Sam is\"=>1) (scitype Multiset{Textual}})\nA dictionary of counts, indexed on plain ngrams, e.g., Dict((\"I\",)=>1, (\"Sam\",)=>2, (\"I\", \"Sam\")=>1) (scitype Multiset{<:NTuple{N,Textual} where N}); here a plain ngram is a tuple of abstract strings.","category":"page"},{"location":"models/TfidfTransformer_MLJText/","page":"TfidfTransformer","title":"TfidfTransformer","text":"Train the machine using fit!(mach, rows=...).","category":"page"},{"location":"models/TfidfTransformer_MLJText/#Hyper-parameters","page":"TfidfTransformer","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/TfidfTransformer_MLJText/","page":"TfidfTransformer","title":"TfidfTransformer","text":"max_doc_freq=1.0: Restricts the vocabulary that the transformer will consider. Terms that occur in > max_doc_freq documents will not be considered by the transformer. For example, if max_doc_freq is set to 0.9, terms that are in more than 90% of the documents will be removed.\nmin_doc_freq=0.0: Restricts the vocabulary that the transformer will consider. Terms that occur in < max_doc_freq documents will not be considered by the transformer. A value of 0.01 means that only terms that are at least in 1% of the documents will be included.\nsmooth_idf=true: Control which definition of IDF to use (see above).","category":"page"},{"location":"models/TfidfTransformer_MLJText/#Operations","page":"TfidfTransformer","title":"Operations","text":"","category":"section"},{"location":"models/TfidfTransformer_MLJText/","page":"TfidfTransformer","title":"TfidfTransformer","text":"transform(mach, Xnew): Based on the vocabulary and IDF learned in training, return the matrix of TF-IDF scores for Xnew, a vector of the same form as X above. The matrix has size (n, p), where n = length(Xnew) and p the size of the vocabulary. Tokens/ngrams not appearing in the learned vocabulary are scored zero.","category":"page"},{"location":"models/TfidfTransformer_MLJText/#Fitted-parameters","page":"TfidfTransformer","title":"Fitted parameters","text":"","category":"section"},{"location":"models/TfidfTransformer_MLJText/","page":"TfidfTransformer","title":"TfidfTransformer","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/TfidfTransformer_MLJText/","page":"TfidfTransformer","title":"TfidfTransformer","text":"vocab: A vector containing the strings used in the transformer's vocabulary.\nidf_vector: The transformer's calculated IDF vector.","category":"page"},{"location":"models/TfidfTransformer_MLJText/#Examples","page":"TfidfTransformer","title":"Examples","text":"","category":"section"},{"location":"models/TfidfTransformer_MLJText/","page":"TfidfTransformer","title":"TfidfTransformer","text":"TfidfTransformer accepts a variety of inputs. The example below transforms tokenized documents:","category":"page"},{"location":"models/TfidfTransformer_MLJText/","page":"TfidfTransformer","title":"TfidfTransformer","text":"using MLJ\nimport TextAnalysis\n\nTfidfTransformer = @load TfidfTransformer pkg=MLJText\n\ndocs = [\"Hi my name is Sam.\", \"How are you today?\"]\ntfidf_transformer = TfidfTransformer()\n\njulia> tokenized_docs = TextAnalysis.tokenize.(docs)\n2-element Vector{Vector{String}}:\n [\"Hi\", \"my\", \"name\", \"is\", \"Sam\", \".\"]\n [\"How\", \"are\", \"you\", \"today\", \"?\"]\n\nmach = machine(tfidf_transformer, tokenized_docs)\nfit!(mach)\n\nfitted_params(mach)\n\ntfidf_mat = transform(mach, tokenized_docs)","category":"page"},{"location":"models/TfidfTransformer_MLJText/","page":"TfidfTransformer","title":"TfidfTransformer","text":"Alternatively, one can provide documents pre-parsed as ngrams counts:","category":"page"},{"location":"models/TfidfTransformer_MLJText/","page":"TfidfTransformer","title":"TfidfTransformer","text":"using MLJ\nimport TextAnalysis\n\ndocs = [\"Hi my name is Sam.\", \"How are you today?\"]\ncorpus = TextAnalysis.Corpus(TextAnalysis.NGramDocument.(docs, 1, 2))\nngram_docs = TextAnalysis.ngrams.(corpus)\n\njulia> ngram_docs[1]\nDict{AbstractString, Int64} with 11 entries:\n \"is\" => 1\n \"my\" => 1\n \"name\" => 1\n \".\" => 1\n \"Hi\" => 1\n \"Sam\" => 1\n \"my name\" => 1\n \"Hi my\" => 1\n \"name is\" => 1\n \"Sam .\" => 1\n \"is Sam\" => 1\n\ntfidf_transformer = TfidfTransformer()\nmach = machine(tfidf_transformer, ngram_docs)\nMLJ.fit!(mach)\nfitted_params(mach)\n\ntfidf_mat = transform(mach, ngram_docs)","category":"page"},{"location":"models/TfidfTransformer_MLJText/","page":"TfidfTransformer","title":"TfidfTransformer","text":"See also CountTransformer, BM25Transformer","category":"page"},{"location":"machines/#Machines","page":"Machines","title":"Machines","text":"","category":"section"},{"location":"machines/","page":"Machines","title":"Machines","text":"Recall from Getting Started that a machine binds a model (i.e., a choice of algorithm + hyperparameters) to data (see more at Constructing machines below). A machine is also the object storing learned parameters. Under the hood, calling fit! on a machine calls either MLJBase.fit or MLJBase.update, depending on the machine's internal state (as recorded in private fields old_model and old_rows). These lower-level fit and update methods, which are not ordinarily called directly by the user, dispatch on the model and a view of the data defined by the optional rows keyword argument of fit! (all rows by default).","category":"page"},{"location":"machines/#Warm-restarts","page":"Machines","title":"Warm restarts","text":"","category":"section"},{"location":"machines/","page":"Machines","title":"Machines","text":"If a model update method has been implemented for the model, calls to fit! will avoid redundant calculations for certain kinds of model mutations. The main use-case is increasing an iteration parameter, such as the number of epochs in a neural network. To test if SomeIterativeModel supports this feature, check iteration_parameter(SomeIterativeModel) is different from nothing.","category":"page"},{"location":"machines/","page":"Machines","title":"Machines","text":"using MLJ; color_off() # hide\ntree = (@load DecisionTreeClassifier pkg=DecisionTree verbosity=0)()\nforest = EnsembleModel(model=tree, n=10);\nX, y = @load_iris;\nmach = machine(forest, X, y)\nfit!(mach, verbosity=2);","category":"page"},{"location":"machines/","page":"Machines","title":"Machines","text":"Generally, changing a hyperparameter triggers retraining on calls to subsequent fit!:","category":"page"},{"location":"machines/","page":"Machines","title":"Machines","text":"forest.bagging_fraction=0.5\nfit!(mach, verbosity=2);","category":"page"},{"location":"machines/","page":"Machines","title":"Machines","text":"However, for this iterative model, increasing the iteration parameter only adds models to the existing ensemble:","category":"page"},{"location":"machines/","page":"Machines","title":"Machines","text":"forest.n=15\nfit!(mach, verbosity=2);","category":"page"},{"location":"machines/","page":"Machines","title":"Machines","text":"Call fit! again without making a change and no retraining occurs:","category":"page"},{"location":"machines/","page":"Machines","title":"Machines","text":"fit!(mach);","category":"page"},{"location":"machines/","page":"Machines","title":"Machines","text":"However, retraining can be forced:","category":"page"},{"location":"machines/","page":"Machines","title":"Machines","text":"fit!(mach, force=true);","category":"page"},{"location":"machines/","page":"Machines","title":"Machines","text":"And is re-triggered if the view of the data changes:","category":"page"},{"location":"machines/","page":"Machines","title":"Machines","text":"fit!(mach, rows=1:100);","category":"page"},{"location":"machines/","page":"Machines","title":"Machines","text":"fit!(mach, rows=1:100);","category":"page"},{"location":"machines/","page":"Machines","title":"Machines","text":"If an iterative model exposes its iteration parameter as a hyperparameter, and it implements the warm restart behavior above, then it can be wrapped in a \"control strategy\", like an early stopping criterion. See Controlling Iterative Models for details.","category":"page"},{"location":"machines/#Inspecting-machines","page":"Machines","title":"Inspecting machines","text":"","category":"section"},{"location":"machines/","page":"Machines","title":"Machines","text":"There are two principal methods for inspecting the outcomes of training in MLJ. To obtain a named-tuple describing the learned parameters (in a user-friendly way where possible) use fitted_params(mach). All other training-related outcomes are inspected with report(mach).","category":"page"},{"location":"machines/","page":"Machines","title":"Machines","text":"X, y = @load_iris\npca = (@load PCA verbosity=0)()\nmach = machine(pca, X)\nfit!(mach)","category":"page"},{"location":"machines/","page":"Machines","title":"Machines","text":"fitted_params(mach)\nreport(mach)","category":"page"},{"location":"machines/","page":"Machines","title":"Machines","text":"fitted_params(::Machine)\nreport(::Machine)","category":"page"},{"location":"machines/#MLJModelInterface.fitted_params-Tuple{Machine}","page":"Machines","title":"MLJModelInterface.fitted_params","text":"fitted_params(mach)\n\nReturn the learned parameters for a machine mach that has been fit!, for example the coefficients in a linear model.\n\nThis is a named tuple and human-readable if possible.\n\nIf mach is a machine for a composite model, such as a model constructed using the pipeline syntax model1 |> model2 |> ..., then the returned named tuple has the composite type's field names as keys. The corresponding value is the fitted parameters for the machine in the underlying learning network bound to that model. (If multiple machines share the same model, then the value is a vector.)\n\nusing MLJ\n@load LogisticClassifier pkg=MLJLinearModels\nX, y = @load_crabs;\npipe = Standardizer() |> LogisticClassifier()\nmach = machine(pipe, X, y) |> fit!\n\njulia> fitted_params(mach).logistic_classifier\n(classes = CategoricalArrays.CategoricalValue{String,UInt32}[\"B\", \"O\"],\n coefs = Pair{Symbol,Float64}[:FL => 3.7095037897680405, :RW => 0.1135739140854546, :CL => -1.6036892745322038, :CW => -4.415667573486482, :BD => 3.238476051092471],\n intercept = 0.0883301599726305,)\n\nAdditional keys, machines and fitted_params_given_machine, give a list of all machines in the underlying network, and a dictionary of fitted parameters keyed on those machines.\n\nSee also report\n\n\n\n\n\n","category":"method"},{"location":"machines/#MLJBase.report-Tuple{Machine}","page":"Machines","title":"MLJBase.report","text":"report(mach)\n\nReturn the report for a machine mach that has been fit!, for example the coefficients in a linear model.\n\nThis is a named tuple and human-readable if possible.\n\nIf mach is a machine for a composite model, such as a model constructed using the pipeline syntax model1 |> model2 |> ..., then the returned named tuple has the composite type's field names as keys. The corresponding value is the report for the machine in the underlying learning network bound to that model. (If multiple machines share the same model, then the value is a vector.)\n\nusing MLJ\n@load LinearBinaryClassifier pkg=GLM\nX, y = @load_crabs;\npipe = Standardizer() |> LinearBinaryClassifier()\nmach = machine(pipe, X, y) |> fit!\n\njulia> report(mach).linear_binary_classifier\n(deviance = 3.8893386087844543e-7,\n dof_residual = 195.0,\n stderror = [18954.83496713119, 6502.845740757159, 48484.240246060406, 34971.131004997274, 20654.82322484894, 2111.1294584763386],\n vcov = [3.592857686311793e8 9.122732393971942e6 … -8.454645589364915e7 5.38856837634321e6; 9.122732393971942e6 4.228700272808351e7 … -4.978433790526467e7 -8.442545425533723e6; … ; -8.454645589364915e7 -4.978433790526467e7 … 4.2662172244975924e8 2.1799125705781363e7; 5.38856837634321e6 -8.442545425533723e6 … 2.1799125705781363e7 4.456867590446599e6],)\n\n\nAdditional keys, machines and report_given_machine, give a list of all machines in the underlying network, and a dictionary of reports keyed on those machines.\n\nSee also fitted_params\n\n\n\n\n\n","category":"method"},{"location":"machines/#Training-losses-and-feature-importances","page":"Machines","title":"Training losses and feature importances","text":"","category":"section"},{"location":"machines/","page":"Machines","title":"Machines","text":"Training losses and feature importances, if reported by a model, will be available in the machine's report (see above). However, there are also direct access methods where supported:","category":"page"},{"location":"machines/","page":"Machines","title":"Machines","text":"training_losses(mach::Machine) -> vector_of_losses","category":"page"},{"location":"machines/","page":"Machines","title":"Machines","text":"Here vector_of_losses will be in historical order (most recent loss last). This kind of access is supported for model = mach.model if supports_training_losses(model) == true.","category":"page"},{"location":"machines/","page":"Machines","title":"Machines","text":"feature_importances(mach::Machine) -> vector_of_pairs","category":"page"},{"location":"machines/","page":"Machines","title":"Machines","text":"Here a vector_of_pairs is a vector of elements of the form feature => importance_value, where feature is a symbol. For example, vector_of_pairs = [:gender => 0.23, :height => 0.7, :weight => 0.1]. If a model does not support feature importances for some model hyperparameters, every importance_value will be zero. This kind of access is supported for model = mach.model if reports_feature_importances(model) == true.","category":"page"},{"location":"machines/","page":"Machines","title":"Machines","text":"If a model can report multiple types of feature importances, then there will be a model hyper-parameter controlling the active type.","category":"page"},{"location":"machines/#Constructing-machines","page":"Machines","title":"Constructing machines","text":"","category":"section"},{"location":"machines/","page":"Machines","title":"Machines","text":"A machine is constructed with the syntax machine(model, args...) where the possibilities for args (called training arguments) are summarized in the table below. Here X and y represent inputs and target, respectively, and Xout is the output of a transform call. Machines for supervised models may have additional training arguments, such as a vector of per-observation weights (in which case supports_weights(model) == true).","category":"page"},{"location":"machines/","page":"Machines","title":"Machines","text":"model supertype machine constructor calls operation calls (first compulsory)\nDeterministic <: Supervised machine(model, X, y, extras...) predict(mach, Xnew), transform(mach, Xnew), inverse_transform(mach, Xout)\nProbabilistic <: Supervised machine(model, X, y, extras...) predict(mach, Xnew), predict_mean(mach, Xnew), predict_median(mach, Xnew), predict_mode(mach, Xnew), transform(mach, Xnew), inverse_transform(mach, Xout)\nUnsupervised (except Static) machine(model, X) transform(mach, Xnew), inverse_transform(mach, Xout), predict(mach, Xnew)\nStatic machine(model) transform(mach, Xnews...), inverse_transform(mach, Xout)","category":"page"},{"location":"machines/","page":"Machines","title":"Machines","text":"All operations on machines (predict, transform, etc) have exactly one argument (Xnew or Xout above) after mach, the machine instance. An exception is a machine bound to a Static model, which can have any number of arguments after mach. For more on Static transformers (which have no training arguments) see Static transformers.","category":"page"},{"location":"machines/","page":"Machines","title":"Machines","text":"A machine is reconstructed from a file using the syntax machine(\"my_machine.jlso\"), or machine(\"my_machine.jlso\", args...) if retraining using new data. See Saving machines below.","category":"page"},{"location":"machines/#Lowering-memory-demands","page":"Machines","title":"Lowering memory demands","text":"","category":"section"},{"location":"machines/","page":"Machines","title":"Machines","text":"For large data sets, you may be able to save memory by suppressing data caching that some models perform to increase speed. To do this, specify cache=false, as in","category":"page"},{"location":"machines/","page":"Machines","title":"Machines","text":"machine(model, X, y, cache=false)","category":"page"},{"location":"machines/#Constructing-machines-in-learning-networks","page":"Machines","title":"Constructing machines in learning networks","text":"","category":"section"},{"location":"machines/","page":"Machines","title":"Machines","text":"Instead of data X, y, etc, the machine constructor is provided Node or Source objects (\"dynamic data\") when building a learning network. See Learning Networks for more on this advanced feature.","category":"page"},{"location":"machines/#Saving-machines","page":"Machines","title":"Saving machines","text":"","category":"section"},{"location":"machines/","page":"Machines","title":"Machines","text":"Users can save and restore MLJ machines using any external serialization package by suitably preparing their Machine object, and applying a post-processing step to the deserialized object. This is explained under Using an arbitrary serializer below.","category":"page"},{"location":"machines/","page":"Machines","title":"Machines","text":"However, if a user is happy to use Julia's standard library Serialization module, there is a simplified workflow described first.","category":"page"},{"location":"machines/","page":"Machines","title":"Machines","text":"The usual serialization provisos apply. For example, when deserializing you need to have all code on which the serialization object depended loaded at the time of deserialization also. If a hyper-parameter happens to be a user-defined function, then that function must be defined at deserialization. And you should only deserialize objects from trusted sources.","category":"page"},{"location":"machines/#Using-Julia's-native-serializer","page":"Machines","title":"Using Julia's native serializer","text":"","category":"section"},{"location":"machines/","page":"Machines","title":"Machines","text":"MLJBase.save","category":"page"},{"location":"machines/#MLJModelInterface.save","page":"Machines","title":"MLJModelInterface.save","text":"MLJ.save(filename, mach::Machine)\nMLJ.save(io, mach::Machine)\n\nMLJBase.save(filename, mach::Machine)\nMLJBase.save(io, mach::Machine)\n\nSerialize the machine mach to a file with path filename, or to an input/output stream io (at least IOBuffer instances are supported) using the Serialization module.\n\nTo serialise using a different format, see serializable.\n\nMachines are deserialized using the machine constructor as shown in the example below.\n\nThe implementation of save for machines changed in MLJ 0.18 (MLJBase 0.20). You can only restore a machine saved using older versions of MLJ using an older version.\n\nExample\n\nusing MLJ\nTree = @load DecisionTreeClassifier\nX, y = @load_iris\nmach = fit!(machine(Tree(), X, y))\n\nMLJ.save(\"tree.jls\", mach)\nmach_predict_only = machine(\"tree.jls\")\npredict(mach_predict_only, X)\n\n# using a buffer:\nio = IOBuffer()\nMLJ.save(io, mach)\nseekstart(io)\npredict_only_mach = machine(io)\npredict(predict_only_mach, X)\n\nwarning: Only load files from trusted sources\nMaliciously constructed JLS files, like pickles, and most other general purpose serialization formats, can allow for arbitrary code execution during loading. This means it is possible for someone to use a JLS file that looks like a serialized MLJ machine as a Trojan horse.\n\nSee also serializable, machine.\n\n\n\n\n\n","category":"function"},{"location":"machines/#Using-an-arbitrary-serializer","page":"Machines","title":"Using an arbitrary serializer","text":"","category":"section"},{"location":"machines/","page":"Machines","title":"Machines","text":"Since machines contain training data, serializing a machine directly is not recommended. Also, the learned parameters of models implemented in a language other than Julia may not have persistent representations, which means serializing them is useless. To address these two issues, users:","category":"page"},{"location":"machines/","page":"Machines","title":"Machines","text":"Call serializable(mach) on a machine mach they wish to save (to remove data and create persistent learned parameters)\nSerialize the returned object using SomeSerializationPkg","category":"page"},{"location":"machines/","page":"Machines","title":"Machines","text":"To restore the original machine (minus training data) they:","category":"page"},{"location":"machines/","page":"Machines","title":"Machines","text":"Deserialize using SomeSerializationPkg to obtain a new object mach\nCall restore!(mach) to ensure mach can be used to predict or transform new data.","category":"page"},{"location":"machines/","page":"Machines","title":"Machines","text":"MLJBase.serializable\nMLJBase.restore!","category":"page"},{"location":"machines/#MLJBase.serializable","page":"Machines","title":"MLJBase.serializable","text":"serializable(mach::Machine)\n\nReturns a shallow copy of the machine to make it serializable. In particular, all training data is removed and, if necessary, learned parameters are replaced with persistent representations.\n\nAny general purpose Julia serializer may be applied to the output of serializable (eg, JLSO, BSON, JLD) but you must call restore!(mach) on the deserialised object mach before using it. See the example below.\n\nIf using Julia's standard Serialization library, a shorter workflow is available using the MLJBase.save (or MLJ.save) method.\n\nA machine returned by serializable is characterized by the property mach.state == -1.\n\nExample using JLSO\n\nusing MLJ\nusing JLSO\nTree = @load DecisionTreeClassifier\ntree = Tree()\nX, y = @load_iris\nmach = fit!(machine(tree, X, y))\n\n# This machine can now be serialized\nsmach = serializable(mach)\nJLSO.save(\"machine.jlso\", :machine => smach)\n\n# Deserialize and restore learned parameters to useable form:\nloaded_mach = JLSO.load(\"machine.jlso\")[:machine]\nrestore!(loaded_mach)\n\npredict(loaded_mach, X)\npredict(mach, X)\n\nSee also restore!, MLJBase.save.\n\n\n\n\n\n","category":"function"},{"location":"machines/#MLJBase.restore!","page":"Machines","title":"MLJBase.restore!","text":"restore!(mach::Machine)\n\nRestore the state of a machine that is currently serializable but which may not be otherwise usable. For such a machine, mach, one has mach.state=1. Intended for restoring deserialized machine objects to a useable form.\n\nFor an example see serializable.\n\n\n\n\n\n","category":"function"},{"location":"machines/#Internals","page":"Machines","title":"Internals","text":"","category":"section"},{"location":"machines/","page":"Machines","title":"Machines","text":"For a supervised machine, the predict method calls a lower-level MLJBase.predict method, dispatched on the underlying model and the fitresult (see below). To see predict in action, as well as its unsupervised cousins transform and inverse_transform, see Getting Started.","category":"page"},{"location":"machines/","page":"Machines","title":"Machines","text":"Except for model, a Machine instance has several fields which the user should not directly access; these include:","category":"page"},{"location":"machines/","page":"Machines","title":"Machines","text":"model - the struct containing the hyperparameters to be used in calls to fit!\nfitresult - the learned parameters in a raw form, initially undefined\nargs - a tuple of the data, each element wrapped in a source node; see Learning Networks (in the supervised learning example above, args = (source(X), source(y)))\nreport - outputs of training not encoded in fitresult (eg, feature rankings), initially undefined\nold_model - a deep copy of the model used in the last call to fit!\nold_rows - a copy of the row indices used in the last call to fit!\ncache","category":"page"},{"location":"machines/","page":"Machines","title":"Machines","text":"The interested reader can learn more about machine internals by examining the simplified code excerpt in Internals.","category":"page"},{"location":"machines/#API-Reference","page":"Machines","title":"API Reference","text":"","category":"section"},{"location":"machines/","page":"Machines","title":"Machines","text":"MLJBase.machine\nfit!\nfit_only!","category":"page"},{"location":"machines/#MLJBase.machine","page":"Machines","title":"MLJBase.machine","text":"machine(model, args...; cache=true, scitype_check_level=1)\n\nConstruct a Machine object binding a model, storing hyper-parameters of some machine learning algorithm, to some data, args. Calling fit! on a Machine instance mach stores outcomes of applying the algorithm in mach, which can be inspected using fitted_params(mach) (learned paramters) and report(mach) (other outcomes). This in turn enables generalization to new data using operations such as predict or transform:\n\nusing MLJModels\nX, y = make_regression()\n\nPCA = @load PCA pkg=MultivariateStats\nmodel = PCA()\nmach = machine(model, X)\nfit!(mach, rows=1:50)\ntransform(mach, selectrows(X, 51:100)) # or transform(mach, rows=51:100)\n\nDecisionTreeRegressor = @load DecisionTreeRegressor pkg=DecisionTree\nmodel = DecisionTreeRegressor()\nmach = machine(model, X, y)\nfit!(mach, rows=1:50)\npredict(mach, selectrows(X, 51:100)) # or predict(mach, rows=51:100)\n\nSpecify cache=false to prioritize memory management over speed.\n\nWhen building a learning network, Node objects can be substituted for the concrete data but no type or dimension checks are applied.\n\nChecks on the types of training data\n\nA model articulates its data requirements using scientific types, i.e., using the scitype function instead of the typeof function.\n\nIf scitype_check_level > 0 then the scitype of each arg in args is computed, and this is compared with the scitypes expected by the model, unless args contains Unknown scitypes and scitype_check_level < 4, in which case no further action is taken. Whether warnings are issued or errors thrown depends the level. For details, see default_scitype_check_level, a method to inspect or change the default level (1 at startup).\n\nMachines with model placeholders\n\nA symbol can be substituted for a model in machine constructors to act as a placeholder for a model specified at training time. The symbol must be the field name for a struct whose corresponding value is a model, as shown in the following example:\n\nmutable struct MyComposite\n transformer\n classifier\nend\n\nmy_composite = MyComposite(Standardizer(), ConstantClassifier)\n\nX, y = make_blobs()\nmach = machine(:classifier, X, y)\nfit!(mach, composite=my_composite)\n\nThe last two lines are equivalent to\n\nmach = machine(ConstantClassifier(), X, y)\nfit!(mach)\n\nDelaying model specification is used when exporting learning networks as new stand-alone model types. See prefit and the MLJ documentation on learning networks.\n\nSee also fit!, default_scitype_check_level, MLJBase.save, serializable.\n\n\n\n\n\n","category":"function"},{"location":"machines/#StatsAPI.fit!","page":"Machines","title":"StatsAPI.fit!","text":"fit!(mach::Machine, rows=nothing, verbosity=1, force=false, composite=nothing)\n\nFit the machine mach. In the case that mach has Node arguments, first train all other machines on which mach depends.\n\nTo attempt to fit a machine without touching any other machine, use fit_only!. For more on options and the the internal logic of fitting see fit_only!\n\n\n\n\n\nfit!(N::Node;\n rows=nothing,\n verbosity=1,\n force=false,\n acceleration=CPU1())\n\nTrain all machines required to call the node N, in an appropriate order, but parallelizing where possible using specified acceleration mode. These machines are those returned by machines(N).\n\nSupported modes of acceleration: CPU1(), CPUThreads().\n\n\n\n\n\n","category":"function"},{"location":"machines/#MLJBase.fit_only!","page":"Machines","title":"MLJBase.fit_only!","text":"MLJBase.fit_only!(\n mach::Machine;\n rows=nothing,\n verbosity=1,\n force=false,\n composite=nothing,\n)\n\nWithout mutating any other machine on which it may depend, perform one of the following actions to the machine mach, using the data and model bound to it, and restricting the data to rows if specified:\n\nAb initio training. Ignoring any previous learned parameters and cache, compute and store new learned parameters. Increment mach.state.\nTraining update. Making use of previous learned parameters and/or cache, replace or mutate existing learned parameters. The effect is the same (or nearly the same) as in ab initio training, but may be faster or use less memory, assuming the model supports an update option (implements MLJBase.update). Increment mach.state.\nNo-operation. Leave existing learned parameters untouched. Do not increment mach.state.\n\nIf the model, model, bound to mach is a symbol, then instead perform the action using the true model given by getproperty(composite, model). See also machine.\n\nTraining action logic\n\nFor the action to be a no-operation, either mach.frozen == true or or none of the following apply:\n\n(i) mach has never been trained (mach.state == 0).\n(ii) force == true.\n(iii) The state of some other machine on which mach depends has changed since the last time mach was trained (ie, the last time mach.state was last incremented).\n(iv) The specified rows have changed since the last retraining and mach.model does not have Static type.\n(v) mach.model is a model and different from the last model used for training, but has the same type.\n(vi) mach.model is a model but has a type different from the last model used for training.\n(vii) mach.model is a symbol and (composite, mach.model) is different from the last model used for training, but has the same type.\n(viii) mach.model is a symbol and (composite, mach.model) has a different type from the last model used for training.\n\nIn any of the cases (i) - (iv), (vi), or (viii), mach is trained ab initio. If (v) or (vii) is true, then a training update is applied.\n\nTo freeze or unfreeze mach, use freeze!(mach) or thaw!(mach).\n\nImplementation details\n\nThe data to which a machine is bound is stored in mach.args. Each element of args is either a Node object, or, in the case that concrete data was bound to the machine, it is concrete data wrapped in a Source node. In all cases, to obtain concrete data for actual training, each argument N is called, as in N() or N(rows=rows), and either MLJBase.fit (ab initio training) or MLJBase.update (training update) is dispatched on mach.model and this data. See the \"Adding models for general use\" section of the MLJ documentation for more on these lower-level training methods.\n\n\n\n\n\n","category":"function"},{"location":"models/AutoEncoder_BetaML/#AutoEncoder_BetaML","page":"AutoEncoder","title":"AutoEncoder","text":"","category":"section"},{"location":"models/AutoEncoder_BetaML/","page":"AutoEncoder","title":"AutoEncoder","text":"mutable struct AutoEncoder <: MLJModelInterface.Unsupervised","category":"page"},{"location":"models/AutoEncoder_BetaML/","page":"AutoEncoder","title":"AutoEncoder","text":"A ready-to use AutoEncoder, from the Beta Machine Learning Toolkit (BetaML) for ecoding and decoding of data using neural networks","category":"page"},{"location":"models/AutoEncoder_BetaML/#Parameters:","page":"AutoEncoder","title":"Parameters:","text":"","category":"section"},{"location":"models/AutoEncoder_BetaML/","page":"AutoEncoder","title":"AutoEncoder","text":"e_layers: The layers (vector of AbstractLayers) responsable of the encoding of the data [def: nothing, i.e. two dense layers with the inner one of innerdims]. See subtypes(BetaML.AbstractLayer) for supported layers\nd_layers: The layers (vector of AbstractLayers) responsable of the decoding of the data [def: nothing, i.e. two dense layers with the inner one of innerdims]. See subtypes(BetaML.AbstractLayer) for supported layers\noutdims: The number of neurons (i.e. dimensions) of the encoded data. If the value is a float it is consiered a percentual (to be rounded) of the dimensionality of the data [def: 0.33]\ninnerdims: Inner layer dimension (i.e. number of neurons). If the value is a float it is considered a percentual (to be rounded) of the dimensionality of the data [def: nothing that applies a specific heuristic]. Consider that the underlying neural network is trying to predict multiple values at the same times. Normally this requires many more neurons than a scalar prediction. If e_layers or d_layers are specified, this parameter is ignored for the respective part.\nloss: Loss (cost) function [def: BetaML.squared_cost]. Should always assume y and ŷ as (n x d) matrices.\nwarning: Warning\nIf you change the parameter loss, you need to either provide its derivative on the parameter dloss or use autodiff with dloss=nothing.\ndloss: Derivative of the loss function [def: BetaML.dsquared_cost if loss==squared_cost, nothing otherwise, i.e. use the derivative of the squared cost or autodiff]\nepochs: Number of epochs, i.e. passages trough the whole training sample [def: 200]\nbatch_size: Size of each individual batch [def: 8]\nopt_alg: The optimisation algorithm to update the gradient at each batch [def: BetaML.ADAM()] See subtypes(BetaML.OptimisationAlgorithm) for supported optimizers\nshuffle: Whether to randomly shuffle the data at each iteration (epoch) [def: true]\ntunemethod: The method - and its parameters - to employ for hyperparameters autotuning. See SuccessiveHalvingSearch for the default method. To implement automatic hyperparameter tuning during the (first) fit! call simply set autotune=true and eventually change the default tunemethod options (including the parameter ranges, the resources to employ and the loss function to adopt).\ndescr: An optional title and/or description for this model\nrng: Random Number Generator (see FIXEDSEED) [deafult: Random.GLOBAL_RNG]","category":"page"},{"location":"models/AutoEncoder_BetaML/#Notes:","page":"AutoEncoder","title":"Notes:","text":"","category":"section"},{"location":"models/AutoEncoder_BetaML/","page":"AutoEncoder","title":"AutoEncoder","text":"data must be numerical\nuse transform to obtain the encoded data, and inverse_trasnform to decode to the original data","category":"page"},{"location":"models/AutoEncoder_BetaML/#Example:","page":"AutoEncoder","title":"Example:","text":"","category":"section"},{"location":"models/AutoEncoder_BetaML/","page":"AutoEncoder","title":"AutoEncoder","text":"julia> using MLJ\n\njulia> X, y = @load_iris;\n\njulia> modelType = @load AutoEncoder pkg = \"BetaML\" verbosity=0;\n\njulia> model = modelType(outdims=2,innerdims=10);\n\njulia> mach = machine(model, X)\nuntrained Machine; caches model-specific representations of data\n model: AutoEncoder(e_layers = nothing, …)\n args: \n 1:\tSource @334 ⏎ Table{AbstractVector{Continuous}}\n\njulia> fit!(mach,verbosity=2)\n[ Info: Training machine(AutoEncoder(e_layers = nothing, …), …).\n***\n*** Training for 200 epochs with algorithm BetaML.Nn.ADAM.\nTraining.. \t avg loss on epoch 1 (1): \t 35.48243542158747\nTraining.. \t avg loss on epoch 20 (20): \t 0.07528042222678126\nTraining.. \t avg loss on epoch 40 (40): \t 0.06293071729378613\nTraining.. \t avg loss on epoch 60 (60): \t 0.057035588828991145\nTraining.. \t avg loss on epoch 80 (80): \t 0.056313167754822875\nTraining.. \t avg loss on epoch 100 (100): \t 0.055521461091809436\nTraining the Neural Network... 52%|██████████████████████████████████████ | ETA: 0:00:01Training.. \t avg loss on epoch 120 (120): \t 0.06015206472927942\nTraining.. \t avg loss on epoch 140 (140): \t 0.05536835903285201\nTraining.. \t avg loss on epoch 160 (160): \t 0.05877560142428245\nTraining.. \t avg loss on epoch 180 (180): \t 0.05476302769966953\nTraining.. \t avg loss on epoch 200 (200): \t 0.049240864053557445\nTraining the Neural Network... 100%|█████████████████████████████████████████████████████████████████████████| Time: 0:00:01\nTraining of 200 epoch completed. Final epoch error: 0.049240864053557445.\ntrained Machine; caches model-specific representations of data\n model: AutoEncoder(e_layers = nothing, …)\n args: \n 1:\tSource @334 ⏎ Table{AbstractVector{Continuous}}\n\n\njulia> X_latent = transform(mach, X)\n150×2 Matrix{Float64}:\n 7.01701 -2.77285\n 6.50615 -2.9279\n 6.5233 -2.60754\n ⋮ \n 6.70196 -10.6059\n 6.46369 -11.1117\n 6.20212 -10.1323\n\njulia> X_recovered = inverse_transform(mach,X_latent)\n150×4 Matrix{Float64}:\n 5.04973 3.55838 1.43251 0.242215\n 4.73689 3.19985 1.44085 0.295257\n 4.65128 3.25308 1.30187 0.244354\n ⋮ \n 6.50077 2.93602 5.3303 1.87647\n 6.38639 2.83864 5.54395 2.04117\n 6.01595 2.67659 5.03669 1.83234\n\njulia> BetaML.relative_mean_error(MLJ.matrix(X),X_recovered)\n0.03387721261716176\n\n","category":"page"},{"location":"models/SVMLinearRegressor_MLJScikitLearnInterface/#SVMLinearRegressor_MLJScikitLearnInterface","page":"SVMLinearRegressor","title":"SVMLinearRegressor","text":"","category":"section"},{"location":"models/SVMLinearRegressor_MLJScikitLearnInterface/","page":"SVMLinearRegressor","title":"SVMLinearRegressor","text":"SVMLinearRegressor","category":"page"},{"location":"models/SVMLinearRegressor_MLJScikitLearnInterface/","page":"SVMLinearRegressor","title":"SVMLinearRegressor","text":"A model type for constructing a linear support vector regressor, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/SVMLinearRegressor_MLJScikitLearnInterface/","page":"SVMLinearRegressor","title":"SVMLinearRegressor","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/SVMLinearRegressor_MLJScikitLearnInterface/","page":"SVMLinearRegressor","title":"SVMLinearRegressor","text":"SVMLinearRegressor = @load SVMLinearRegressor pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/SVMLinearRegressor_MLJScikitLearnInterface/","page":"SVMLinearRegressor","title":"SVMLinearRegressor","text":"Do model = SVMLinearRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in SVMLinearRegressor(epsilon=...).","category":"page"},{"location":"models/SVMLinearRegressor_MLJScikitLearnInterface/#Hyper-parameters","page":"SVMLinearRegressor","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/SVMLinearRegressor_MLJScikitLearnInterface/","page":"SVMLinearRegressor","title":"SVMLinearRegressor","text":"epsilon = 0.0\ntol = 0.0001\nC = 1.0\nloss = epsilon_insensitive\nfit_intercept = true\nintercept_scaling = 1.0\ndual = true\nrandom_state = nothing\nmax_iter = 1000","category":"page"},{"location":"models/DecisionTreeRegressor_BetaML/#DecisionTreeRegressor_BetaML","page":"DecisionTreeRegressor","title":"DecisionTreeRegressor","text":"","category":"section"},{"location":"models/DecisionTreeRegressor_BetaML/","page":"DecisionTreeRegressor","title":"DecisionTreeRegressor","text":"mutable struct DecisionTreeRegressor <: MLJModelInterface.Deterministic","category":"page"},{"location":"models/DecisionTreeRegressor_BetaML/","page":"DecisionTreeRegressor","title":"DecisionTreeRegressor","text":"A simple Decision Tree model for regression with support for Missing data, from the Beta Machine Learning Toolkit (BetaML).","category":"page"},{"location":"models/DecisionTreeRegressor_BetaML/#Hyperparameters:","page":"DecisionTreeRegressor","title":"Hyperparameters:","text":"","category":"section"},{"location":"models/DecisionTreeRegressor_BetaML/","page":"DecisionTreeRegressor","title":"DecisionTreeRegressor","text":"max_depth::Int64: The maximum depth the tree is allowed to reach. When this is reached the node is forced to become a leaf [def: 0, i.e. no limits]\nmin_gain::Float64: The minimum information gain to allow for a node's partition [def: 0]\nmin_records::Int64: The minimum number of records a node must holds to consider for a partition of it [def: 2]\nmax_features::Int64: The maximum number of (random) features to consider at each partitioning [def: 0, i.e. look at all features]\nsplitting_criterion::Function: This is the name of the function to be used to compute the information gain of a specific partition. This is done by measuring the difference betwwen the \"impurity\" of the labels of the parent node with those of the two child nodes, weighted by the respective number of items. [def: variance]. Either variance or a custom function. It can also be an anonymous function.\nrng::Random.AbstractRNG: A Random Number Generator to be used in stochastic parts of the code [deafult: Random.GLOBAL_RNG]","category":"page"},{"location":"models/DecisionTreeRegressor_BetaML/#Example:","page":"DecisionTreeRegressor","title":"Example:","text":"","category":"section"},{"location":"models/DecisionTreeRegressor_BetaML/","page":"DecisionTreeRegressor","title":"DecisionTreeRegressor","text":"julia> using MLJ\n\njulia> X, y = @load_boston;\n\njulia> modelType = @load DecisionTreeRegressor pkg = \"BetaML\" verbosity=0\nBetaML.Trees.DecisionTreeRegressor\n\njulia> model = modelType()\nDecisionTreeRegressor(\n max_depth = 0, \n min_gain = 0.0, \n min_records = 2, \n max_features = 0, \n splitting_criterion = BetaML.Utils.variance, \n rng = Random._GLOBAL_RNG())\n\njulia> mach = machine(model, X, y);\n\njulia> fit!(mach);\n[ Info: Training machine(DecisionTreeRegressor(max_depth = 0, …), …).\n\njulia> ŷ = predict(mach, X);\n\njulia> hcat(y,ŷ)\n506×2 Matrix{Float64}:\n 24.0 26.35\n 21.6 21.6\n 34.7 34.8\n ⋮ \n 23.9 23.75\n 22.0 22.2\n 11.9 13.2","category":"page"},{"location":"models/LinearSVC_LIBSVM/#LinearSVC_LIBSVM","page":"LinearSVC","title":"LinearSVC","text":"","category":"section"},{"location":"models/LinearSVC_LIBSVM/","page":"LinearSVC","title":"LinearSVC","text":"LinearSVC","category":"page"},{"location":"models/LinearSVC_LIBSVM/","page":"LinearSVC","title":"LinearSVC","text":"A model type for constructing a linear support vector classifier, based on LIBSVM.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/LinearSVC_LIBSVM/","page":"LinearSVC","title":"LinearSVC","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/LinearSVC_LIBSVM/","page":"LinearSVC","title":"LinearSVC","text":"LinearSVC = @load LinearSVC pkg=LIBSVM","category":"page"},{"location":"models/LinearSVC_LIBSVM/","page":"LinearSVC","title":"LinearSVC","text":"Do model = LinearSVC() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in LinearSVC(solver=...).","category":"page"},{"location":"models/LinearSVC_LIBSVM/","page":"LinearSVC","title":"LinearSVC","text":"Reference for algorithm and core C-library: Rong-En Fan et al (2008): \"LIBLINEAR: A Library for Large Linear Classification.\" Journal of Machine Learning Research 9 1871-1874. Available at https://www.csie.ntu.edu.tw/~cjlin/papers/liblinear.pdf. ","category":"page"},{"location":"models/LinearSVC_LIBSVM/","page":"LinearSVC","title":"LinearSVC","text":"This model type is similar to SVC from the same package with the setting kernel=LIBSVM.Kernel.KERNEL.Linear, but is optimized for the linear case.","category":"page"},{"location":"models/LinearSVC_LIBSVM/#Training-data","page":"LinearSVC","title":"Training data","text":"","category":"section"},{"location":"models/LinearSVC_LIBSVM/","page":"LinearSVC","title":"LinearSVC","text":"In MLJ or MLJBase, bind an instance model to data with one of:","category":"page"},{"location":"models/LinearSVC_LIBSVM/","page":"LinearSVC","title":"LinearSVC","text":"mach = machine(model, X, y)\nmach = machine(model, X, y, w)","category":"page"},{"location":"models/LinearSVC_LIBSVM/","page":"LinearSVC","title":"LinearSVC","text":"where","category":"page"},{"location":"models/LinearSVC_LIBSVM/","page":"LinearSVC","title":"LinearSVC","text":"X: any table of input features (eg, a DataFrame) whose columns each have Continuous element scitype; check column scitypes with schema(X)\ny: is the target, which can be any AbstractVector whose element scitype is <:OrderedFactor or <:Multiclass; check the scitype with scitype(y)\nw: a dictionary of class weights, keyed on levels(y).","category":"page"},{"location":"models/LinearSVC_LIBSVM/","page":"LinearSVC","title":"LinearSVC","text":"Train the machine using fit!(mach, rows=...).","category":"page"},{"location":"models/LinearSVC_LIBSVM/#Hyper-parameters","page":"LinearSVC","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/LinearSVC_LIBSVM/","page":"LinearSVC","title":"LinearSVC","text":"solver=LIBSVM.Linearsolver.L2R_L2LOSS_SVC_DUAL: linear solver, which must be one of the following from the LIBSVM.jl package:\nLIBSVM.Linearsolver.L2R_LR: L2-regularized logistic regression (primal))\nLIBSVM.Linearsolver.L2R_L2LOSS_SVC_DUAL: L2-regularized L2-loss support vector classification (dual)\nLIBSVM.Linearsolver.L2R_L2LOSS_SVC: L2-regularized L2-loss support vector classification (primal)\nLIBSVM.Linearsolver.L2R_L1LOSS_SVC_DUAL: L2-regularized L1-loss support vector classification (dual)\nLIBSVM.Linearsolver.MCSVM_CS: support vector classification by Crammer and Singer) LIBSVM.Linearsolver.L1R_L2LOSS_SVC: L1-regularized L2-loss support vector classification)\nLIBSVM.Linearsolver.L1R_LR: L1-regularized logistic regression\nLIBSVM.Linearsolver.L2R_LR_DUAL: L2-regularized logistic regression (dual)\ntolerance::Float64=Inf: tolerance for the stopping criterion;\ncost=1.0 (range (0, Inf)): the parameter denoted C in the cited reference; for greater regularization, decrease cost\nbias= -1.0: if bias >= 0, instance x becomes [x; bias]; if bias < 0, no bias term added (default -1)","category":"page"},{"location":"models/LinearSVC_LIBSVM/#Operations","page":"LinearSVC","title":"Operations","text":"","category":"section"},{"location":"models/LinearSVC_LIBSVM/","page":"LinearSVC","title":"LinearSVC","text":"predict(mach, Xnew): return predictions of the target given features Xnew having the same scitype as X above.","category":"page"},{"location":"models/LinearSVC_LIBSVM/#Fitted-parameters","page":"LinearSVC","title":"Fitted parameters","text":"","category":"section"},{"location":"models/LinearSVC_LIBSVM/","page":"LinearSVC","title":"LinearSVC","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/LinearSVC_LIBSVM/","page":"LinearSVC","title":"LinearSVC","text":"libsvm_model: the trained model object created by the LIBSVM.jl package\nencoding: class encoding used internally by libsvm_model - a dictionary of class labels keyed on the internal integer representation","category":"page"},{"location":"models/LinearSVC_LIBSVM/#Examples","page":"LinearSVC","title":"Examples","text":"","category":"section"},{"location":"models/LinearSVC_LIBSVM/","page":"LinearSVC","title":"LinearSVC","text":"using MLJ\nimport LIBSVM\n\nLinearSVC = @load LinearSVC pkg=LIBSVM ## model type\nmodel = LinearSVC(solver=LIBSVM.Linearsolver.L2R_LR) ## instance\n\nX, y = @load_iris ## table, vector\nmach = machine(model, X, y) |> fit!\n\nXnew = (sepal_length = [6.4, 7.2, 7.4],\n sepal_width = [2.8, 3.0, 2.8],\n petal_length = [5.6, 5.8, 6.1],\n petal_width = [2.1, 1.6, 1.9],)\n\njulia> yhat = predict(mach, Xnew)\n3-element CategoricalArrays.CategoricalArray{String,1,UInt32}:\n \"virginica\"\n \"versicolor\"\n \"virginica\"","category":"page"},{"location":"models/LinearSVC_LIBSVM/#Incorporating-class-weights","page":"LinearSVC","title":"Incorporating class weights","text":"","category":"section"},{"location":"models/LinearSVC_LIBSVM/","page":"LinearSVC","title":"LinearSVC","text":"weights = Dict(\"virginica\" => 1, \"versicolor\" => 20, \"setosa\" => 1)\nmach = machine(model, X, y, weights) |> fit!\n\njulia> yhat = predict(mach, Xnew)\n3-element CategoricalArrays.CategoricalArray{String,1,UInt32}:\n \"versicolor\"\n \"versicolor\"\n \"versicolor\"","category":"page"},{"location":"models/LinearSVC_LIBSVM/","page":"LinearSVC","title":"LinearSVC","text":"See also the SVC and NuSVC classifiers, and LIVSVM.jl and the original C implementation documentation.","category":"page"},{"location":"model_browser/#Model-Browser","page":"Model Browser","title":"Model Browser","text":"","category":"section"},{"location":"model_browser/","page":"Model Browser","title":"Model Browser","text":"Models may appear under multiple categories.","category":"page"},{"location":"model_browser/","page":"Model Browser","title":"Model Browser","text":"Below an encoder is any transformer that does not fall under another category, such as \"Missing Value Imputation\" or \"Dimension Reduction\".","category":"page"},{"location":"model_browser/#Categories","page":"Model Browser","title":"Categories","text":"","category":"section"},{"location":"model_browser/","page":"Model Browser","title":"Model Browser","text":"Regression | Classification | Outlier Detection | Iterative Models | Ensemble Models | Clustering | Dimension Reduction | Bayesian Models | Class Imbalance | Encoders | Static Models | Missing Value Imputation | Distribution Fitter | Text Analysis | Image Processing","category":"page"},{"location":"model_browser/#Regression","page":"Model Browser","title":"Regression","text":"","category":"section"},{"location":"model_browser/","page":"Model Browser","title":"Model Browser","text":"ARDRegressor (MLJScikitLearnInterface.jl)\nAdaBoostRegressor (MLJScikitLearnInterface.jl)\nBaggingRegressor (MLJScikitLearnInterface.jl)\nBayesianRidgeRegressor (MLJScikitLearnInterface.jl)\nCatBoostRegressor (CatBoost.jl)\nConstantRegressor (MLJModels.jl)\nDecisionTreeRegressor (BetaML.jl)\nDecisionTreeRegressor (DecisionTree.jl/MLJDecisionTreeInterface.jl)\nDeterministicConstantRegressor (MLJModels.jl)\nDummyRegressor (MLJScikitLearnInterface.jl)\nElasticNetCVRegressor (MLJScikitLearnInterface.jl)\nElasticNetRegressor (MLJLinearModels.jl)\nElasticNetRegressor (MLJScikitLearnInterface.jl)\nEpsilonSVR (LIBSVM.jl/MLJLIBSVMInterface.jl)\nEvoLinearRegressor (EvoLinear.jl)\nEvoSplineRegressor (EvoLinear.jl)\nEvoTreeCount (EvoTrees.jl)\nEvoTreeGaussian (EvoTrees.jl)\nEvoTreeMLE (EvoTrees.jl)\nEvoTreeRegressor (EvoTrees.jl)\nExtraTreesRegressor (MLJScikitLearnInterface.jl)\nGaussianMixtureRegressor (BetaML.jl)\nGaussianProcessRegressor (MLJScikitLearnInterface.jl)\nGradientBoostingRegressor (MLJScikitLearnInterface.jl)\nHistGradientBoostingRegressor (MLJScikitLearnInterface.jl)\nHuberRegressor (MLJLinearModels.jl)\nHuberRegressor (MLJScikitLearnInterface.jl)\nKNNRegressor (NearestNeighborModels.jl)\nKNeighborsRegressor (MLJScikitLearnInterface.jl)\nKPLSRegressor (PartialLeastSquaresRegressor.jl)\nLADRegressor (MLJLinearModels.jl)\nLGBMRegressor (LightGBM.jl)\nLarsCVRegressor (MLJScikitLearnInterface.jl)\nLarsRegressor (MLJScikitLearnInterface.jl)\nLassoCVRegressor (MLJScikitLearnInterface.jl)\nLassoLarsCVRegressor (MLJScikitLearnInterface.jl)\nLassoLarsICRegressor (MLJScikitLearnInterface.jl)\nLassoLarsRegressor (MLJScikitLearnInterface.jl)\nLassoRegressor (MLJLinearModels.jl)\nLassoRegressor (MLJScikitLearnInterface.jl)\nLinearCountRegressor (GLM.jl/MLJGLMInterface.jl)\nLinearRegressor (GLM.jl/MLJGLMInterface.jl)\nLinearRegressor (MLJLinearModels.jl)\nLinearRegressor (MLJScikitLearnInterface.jl)\nLinearRegressor (MultivariateStats.jl/MLJMultivariateStatsInterface.jl)\nMultiTaskElasticNetCVRegressor (MLJScikitLearnInterface.jl)\nMultiTaskElasticNetRegressor (MLJScikitLearnInterface.jl)\nMultiTaskLassoCVRegressor (MLJScikitLearnInterface.jl)\nMultiTaskLassoRegressor (MLJScikitLearnInterface.jl)\nMultitargetGaussianMixtureRegressor (BetaML.jl)\nMultitargetKNNRegressor (NearestNeighborModels.jl)\nMultitargetLinearRegressor (MultivariateStats.jl/MLJMultivariateStatsInterface.jl)\nMultitargetNeuralNetworkRegressor (BetaML.jl)\nMultitargetNeuralNetworkRegressor (MLJFlux.jl)\nMultitargetRidgeRegressor (MultivariateStats.jl/MLJMultivariateStatsInterface.jl)\nMultitargetSRRegressor (SymbolicRegression.jl)\nNeuralNetworkRegressor (BetaML.jl)\nNeuralNetworkRegressor (MLJFlux.jl)\nNuSVR (LIBSVM.jl/MLJLIBSVMInterface.jl)\nOrthogonalMatchingPursuitCVRegressor (MLJScikitLearnInterface.jl)\nOrthogonalMatchingPursuitRegressor (MLJScikitLearnInterface.jl)\nPLSRegressor (PartialLeastSquaresRegressor.jl)\nPassiveAggressiveRegressor (MLJScikitLearnInterface.jl)\nQuantileRegressor (MLJLinearModels.jl)\nRANSACRegressor (MLJScikitLearnInterface.jl)\nRandomForestRegressor (BetaML.jl)\nRandomForestRegressor (DecisionTree.jl/MLJDecisionTreeInterface.jl)\nRandomForestRegressor (MLJScikitLearnInterface.jl)\nRidgeRegressor (MLJLinearModels.jl)\nRidgeRegressor (MLJScikitLearnInterface.jl)\nRidgeRegressor (MultivariateStats.jl/MLJMultivariateStatsInterface.jl)\nRobustRegressor (MLJLinearModels.jl)\nSGDRegressor (MLJScikitLearnInterface.jl)\nSRRegressor (SymbolicRegression.jl)\nSVMLinearRegressor (MLJScikitLearnInterface.jl)\nSVMNuRegressor (MLJScikitLearnInterface.jl)\nSVMRegressor (MLJScikitLearnInterface.jl)\nStableForestRegressor (SIRUS.jl)\nStableRulesRegressor (SIRUS.jl)\nTheilSenRegressor (MLJScikitLearnInterface.jl)\nXGBoostCount (XGBoost.jl/MLJXGBoostInterface.jl)\nXGBoostRegressor (XGBoost.jl/MLJXGBoostInterface.jl)","category":"page"},{"location":"model_browser/#Classification","page":"Model Browser","title":"Classification","text":"","category":"section"},{"location":"model_browser/","page":"Model Browser","title":"Model Browser","text":"AdaBoostClassifier (MLJScikitLearnInterface.jl)\nAdaBoostStumpClassifier (DecisionTree.jl/MLJDecisionTreeInterface.jl)\nBaggingClassifier (MLJScikitLearnInterface.jl)\nBayesianLDA (MLJScikitLearnInterface.jl)\nBayesianLDA (MultivariateStats.jl/MLJMultivariateStatsInterface.jl)\nBayesianQDA (MLJScikitLearnInterface.jl)\nBayesianSubspaceLDA (MultivariateStats.jl/MLJMultivariateStatsInterface.jl)\nBernoulliNBClassifier (MLJScikitLearnInterface.jl)\nCatBoostClassifier (CatBoost.jl)\nComplementNBClassifier (MLJScikitLearnInterface.jl)\nConstantClassifier (MLJModels.jl)\nDecisionTreeClassifier (BetaML.jl)\nDecisionTreeClassifier (DecisionTree.jl/MLJDecisionTreeInterface.jl)\nDeterministicConstantClassifier (MLJModels.jl)\nDummyClassifier (MLJScikitLearnInterface.jl)\nEvoTreeClassifier (EvoTrees.jl)\nExtraTreesClassifier (MLJScikitLearnInterface.jl)\nGaussianNBClassifier (MLJScikitLearnInterface.jl)\nGaussianNBClassifier (NaiveBayes.jl/MLJNaiveBayesInterface.jl)\nGaussianProcessClassifier (MLJScikitLearnInterface.jl)\nGradientBoostingClassifier (MLJScikitLearnInterface.jl)\nHistGradientBoostingClassifier (MLJScikitLearnInterface.jl)\nImageClassifier (MLJFlux.jl)\nKNNClassifier (NearestNeighborModels.jl)\nKNeighborsClassifier (MLJScikitLearnInterface.jl)\nKernelPerceptronClassifier (BetaML.jl)\nLDA (MultivariateStats.jl/MLJMultivariateStatsInterface.jl)\nLGBMClassifier (LightGBM.jl)\nLinearBinaryClassifier (GLM.jl/MLJGLMInterface.jl)\nLinearSVC (LIBSVM.jl/MLJLIBSVMInterface.jl)\nLogisticCVClassifier (MLJScikitLearnInterface.jl)\nLogisticClassifier (MLJLinearModels.jl)\nLogisticClassifier (MLJScikitLearnInterface.jl)\nMultinomialClassifier (MLJLinearModels.jl)\nMultinomialNBClassifier (MLJScikitLearnInterface.jl)\nMultinomialNBClassifier (NaiveBayes.jl/MLJNaiveBayesInterface.jl)\nMultitargetKNNClassifier (NearestNeighborModels.jl)\nNeuralNetworkClassifier (BetaML.jl)\nNeuralNetworkClassifier (MLJFlux.jl)\nNuSVC (LIBSVM.jl/MLJLIBSVMInterface.jl)\nOneRuleClassifier (OneRule.jl)\nPassiveAggressiveClassifier (MLJScikitLearnInterface.jl)\nPegasosClassifier (BetaML.jl)\nPerceptronClassifier (BetaML.jl)\nPerceptronClassifier (MLJScikitLearnInterface.jl)\nProbabilisticNuSVC (LIBSVM.jl/MLJLIBSVMInterface.jl)\nProbabilisticSGDClassifier (MLJScikitLearnInterface.jl)\nProbabilisticSVC (LIBSVM.jl/MLJLIBSVMInterface.jl)\nRandomForestClassifier (BetaML.jl)\nRandomForestClassifier (DecisionTree.jl/MLJDecisionTreeInterface.jl)\nRandomForestClassifier (MLJScikitLearnInterface.jl)\nRidgeCVClassifier (MLJScikitLearnInterface.jl)\nRidgeCVRegressor (MLJScikitLearnInterface.jl)\nRidgeClassifier (MLJScikitLearnInterface.jl)\nSGDClassifier (MLJScikitLearnInterface.jl)\nSVC (LIBSVM.jl/MLJLIBSVMInterface.jl)\nSVMClassifier (MLJScikitLearnInterface.jl)\nSVMLinearClassifier (MLJScikitLearnInterface.jl)\nSVMNuClassifier (MLJScikitLearnInterface.jl)\nStableForestClassifier (SIRUS.jl)\nStableRulesClassifier (SIRUS.jl)\nSubspaceLDA (MultivariateStats.jl/MLJMultivariateStatsInterface.jl)\nXGBoostClassifier (XGBoost.jl/MLJXGBoostInterface.jl)","category":"page"},{"location":"model_browser/#Outlier-Detection","page":"Model Browser","title":"Outlier Detection","text":"","category":"section"},{"location":"model_browser/","page":"Model Browser","title":"Model Browser","text":"ABODDetector (OutlierDetectionNeighbors.jl)\nABODDetector (OutlierDetectionPython.jl)\nCBLOFDetector (OutlierDetectionPython.jl)\nCDDetector (OutlierDetectionPython.jl)\nCOFDetector (OutlierDetectionNeighbors.jl)\nCOFDetector (OutlierDetectionPython.jl)\nCOPODDetector (OutlierDetectionPython.jl)\nDNNDetector (OutlierDetectionNeighbors.jl)\nECODDetector (OutlierDetectionPython.jl)\nGMMDetector (OutlierDetectionPython.jl)\nHBOSDetector (OutlierDetectionPython.jl)\nIForestDetector (OutlierDetectionPython.jl)\nINNEDetector (OutlierDetectionPython.jl)\nKDEDetector (OutlierDetectionPython.jl)\nKNNDetector (OutlierDetectionNeighbors.jl)\nKNNDetector (OutlierDetectionPython.jl)\nLMDDDetector (OutlierDetectionPython.jl)\nLOCIDetector (OutlierDetectionPython.jl)\nLODADetector (OutlierDetectionPython.jl)\nLOFDetector (OutlierDetectionNeighbors.jl)\nLOFDetector (OutlierDetectionPython.jl)\nMCDDetector (OutlierDetectionPython.jl)\nOCSVMDetector (OutlierDetectionPython.jl)\nOneClassSVM (LIBSVM.jl/MLJLIBSVMInterface.jl)\nPCADetector (OutlierDetectionPython.jl)\nRODDetector (OutlierDetectionPython.jl)\nSODDetector (OutlierDetectionPython.jl)\nSOSDetector (OutlierDetectionPython.jl)","category":"page"},{"location":"model_browser/#Iterative-Models","page":"Model Browser","title":"Iterative Models","text":"","category":"section"},{"location":"model_browser/","page":"Model Browser","title":"Model Browser","text":"CatBoostClassifier (CatBoost.jl)\nCatBoostRegressor (CatBoost.jl)\nEvoSplineRegressor (EvoLinear.jl)\nEvoTreeClassifier (EvoTrees.jl)\nEvoTreeCount (EvoTrees.jl)\nEvoTreeGaussian (EvoTrees.jl)\nEvoTreeMLE (EvoTrees.jl)\nEvoTreeRegressor (EvoTrees.jl)\nExtraTreesClassifier (MLJScikitLearnInterface.jl)\nExtraTreesRegressor (MLJScikitLearnInterface.jl)\nImageClassifier (MLJFlux.jl)\nLGBMClassifier (LightGBM.jl)\nLGBMRegressor (LightGBM.jl)\nMultitargetNeuralNetworkRegressor (MLJFlux.jl)\nNeuralNetworkClassifier (MLJFlux.jl)\nNeuralNetworkRegressor (MLJFlux.jl)\nPerceptronClassifier (BetaML.jl)\nPerceptronClassifier (MLJScikitLearnInterface.jl)\nRandomForestClassifier (BetaML.jl)\nRandomForestClassifier (DecisionTree.jl/MLJDecisionTreeInterface.jl)\nRandomForestClassifier (MLJScikitLearnInterface.jl)\nRandomForestImputer (BetaML.jl)\nRandomForestRegressor (BetaML.jl)\nRandomForestRegressor (DecisionTree.jl/MLJDecisionTreeInterface.jl)\nRandomForestRegressor (MLJScikitLearnInterface.jl)\nXGBoostClassifier (XGBoost.jl/MLJXGBoostInterface.jl)\nXGBoostCount (XGBoost.jl/MLJXGBoostInterface.jl)\nXGBoostRegressor (XGBoost.jl/MLJXGBoostInterface.jl)","category":"page"},{"location":"model_browser/#Ensemble-Models","page":"Model Browser","title":"Ensemble Models","text":"","category":"section"},{"location":"model_browser/","page":"Model Browser","title":"Model Browser","text":"BaggingClassifier (MLJScikitLearnInterface.jl)\nBaggingRegressor (MLJScikitLearnInterface.jl)\nCatBoostClassifier (CatBoost.jl)\nCatBoostRegressor (CatBoost.jl)\nEvoSplineRegressor (EvoLinear.jl)\nEvoTreeClassifier (EvoTrees.jl)\nEvoTreeCount (EvoTrees.jl)\nEvoTreeGaussian (EvoTrees.jl)\nEvoTreeMLE (EvoTrees.jl)\nEvoTreeRegressor (EvoTrees.jl)\nLGBMClassifier (LightGBM.jl)\nLGBMRegressor (LightGBM.jl)\nRandomForestClassifier (BetaML.jl)\nRandomForestClassifier (DecisionTree.jl/MLJDecisionTreeInterface.jl)\nRandomForestClassifier (MLJScikitLearnInterface.jl)\nRandomForestImputer (BetaML.jl)\nRandomForestRegressor (BetaML.jl)\nRandomForestRegressor (DecisionTree.jl/MLJDecisionTreeInterface.jl)\nRandomForestRegressor (MLJScikitLearnInterface.jl)\nXGBoostClassifier (XGBoost.jl/MLJXGBoostInterface.jl)\nXGBoostCount (XGBoost.jl/MLJXGBoostInterface.jl)\nXGBoostRegressor (XGBoost.jl/MLJXGBoostInterface.jl)","category":"page"},{"location":"model_browser/#Clustering","page":"Model Browser","title":"Clustering","text":"","category":"section"},{"location":"model_browser/","page":"Model Browser","title":"Model Browser","text":"AffinityPropagation (MLJScikitLearnInterface.jl)\nAgglomerativeClustering (MLJScikitLearnInterface.jl)\nBirch (MLJScikitLearnInterface.jl)\nBisectingKMeans (MLJScikitLearnInterface.jl)\nDBSCAN (Clustering.jl/MLJClusteringInterface.jl)\nDBSCAN (MLJScikitLearnInterface.jl)\nFeatureAgglomeration (MLJScikitLearnInterface.jl)\nGaussianMixtureClusterer (BetaML.jl)\nHDBSCAN (MLJScikitLearnInterface.jl)\nHierarchicalClustering (Clustering.jl/MLJClusteringInterface.jl)\nKMeans (Clustering.jl/MLJClusteringInterface.jl)\nKMeans (MLJScikitLearnInterface.jl)\nKMeans (ParallelKMeans.jl)\nKMeansClusterer (BetaML.jl)\nKMedoids (Clustering.jl/MLJClusteringInterface.jl)\nKMedoidsClusterer (BetaML.jl)\nMeanShift (MLJScikitLearnInterface.jl)\nMiniBatchKMeans (MLJScikitLearnInterface.jl)\nOPTICS (MLJScikitLearnInterface.jl)\nSelfOrganizingMap (SelfOrganizingMaps.jl)\nSpectralClustering (MLJScikitLearnInterface.jl)","category":"page"},{"location":"model_browser/#Dimension-Reduction","page":"Model Browser","title":"Dimension Reduction","text":"","category":"section"},{"location":"model_browser/","page":"Model Browser","title":"Model Browser","text":"AutoEncoder (BetaML.jl)\nBayesianLDA (MLJScikitLearnInterface.jl)\nBayesianLDA (MultivariateStats.jl/MLJMultivariateStatsInterface.jl)\nBayesianQDA (MLJScikitLearnInterface.jl)\nBayesianSubspaceLDA (MultivariateStats.jl/MLJMultivariateStatsInterface.jl)\nBirch (MLJScikitLearnInterface.jl)\nBisectingKMeans (MLJScikitLearnInterface.jl)\nFactorAnalysis (MultivariateStats.jl/MLJMultivariateStatsInterface.jl)\nFeatureSelector (MLJModels.jl)\nKMeans (Clustering.jl/MLJClusteringInterface.jl)\nKMeans (MLJScikitLearnInterface.jl)\nKMeans (ParallelKMeans.jl)\nKMedoids (Clustering.jl/MLJClusteringInterface.jl)\nKernelPCA (MultivariateStats.jl/MLJMultivariateStatsInterface.jl)\nLDA (MultivariateStats.jl/MLJMultivariateStatsInterface.jl)\nMiniBatchKMeans (MLJScikitLearnInterface.jl)\nPCA (MultivariateStats.jl/MLJMultivariateStatsInterface.jl)\nPPCA (MultivariateStats.jl/MLJMultivariateStatsInterface.jl)\nSelfOrganizingMap (SelfOrganizingMaps.jl)\nSubspaceLDA (MultivariateStats.jl/MLJMultivariateStatsInterface.jl)\nTSVDTransformer (TSVD.jl/MLJTSVDInterface.jl)","category":"page"},{"location":"model_browser/#Bayesian-Models","page":"Model Browser","title":"Bayesian Models","text":"","category":"section"},{"location":"model_browser/","page":"Model Browser","title":"Model Browser","text":"ARDRegressor (MLJScikitLearnInterface.jl)\nBayesianLDA (MLJScikitLearnInterface.jl)\nBayesianLDA (MultivariateStats.jl/MLJMultivariateStatsInterface.jl)\nBayesianQDA (MLJScikitLearnInterface.jl)\nBayesianRidgeRegressor (MLJScikitLearnInterface.jl)\nBayesianSubspaceLDA (MultivariateStats.jl/MLJMultivariateStatsInterface.jl)\nBernoulliNBClassifier (MLJScikitLearnInterface.jl)\nComplementNBClassifier (MLJScikitLearnInterface.jl)\nGaussianNBClassifier (MLJScikitLearnInterface.jl)\nGaussianNBClassifier (NaiveBayes.jl/MLJNaiveBayesInterface.jl)\nGaussianProcessClassifier (MLJScikitLearnInterface.jl)\nGaussianProcessRegressor (MLJScikitLearnInterface.jl)\nMultinomialNBClassifier (MLJScikitLearnInterface.jl)\nMultinomialNBClassifier (NaiveBayes.jl/MLJNaiveBayesInterface.jl)","category":"page"},{"location":"model_browser/#Class-Imbalance","page":"Model Browser","title":"Class Imbalance","text":"","category":"section"},{"location":"model_browser/","page":"Model Browser","title":"Model Browser","text":"BorderlineSMOTE1 (Imbalance.jl)\nClusterUndersampler (Imbalance.jl)\nENNUndersampler (Imbalance.jl)\nROSE (Imbalance.jl)\nRandomOversampler (Imbalance.jl)\nRandomUndersampler (Imbalance.jl)\nRandomWalkOversampler (Imbalance.jl)\nSMOTE (Imbalance.jl)\nSMOTEN (Imbalance.jl)\nSMOTENC (Imbalance.jl)\nTomekUndersampler (Imbalance.jl)","category":"page"},{"location":"model_browser/#Encoders","page":"Model Browser","title":"Encoders","text":"","category":"section"},{"location":"model_browser/","page":"Model Browser","title":"Model Browser","text":"BM25Transformer (MLJText.jl)\nContinuousEncoder (MLJModels.jl)\nCountTransformer (MLJText.jl)\nICA (MultivariateStats.jl/MLJMultivariateStatsInterface.jl)\nOneHotEncoder (MLJModels.jl)\nStandardizer (MLJModels.jl)\nTfidfTransformer (MLJText.jl)\nUnivariateBoxCoxTransformer (MLJModels.jl)\nUnivariateDiscretizer (MLJModels.jl)\nUnivariateStandardizer (MLJModels.jl)\nUnivariateTimeTypeToContinuous (MLJModels.jl)","category":"page"},{"location":"model_browser/#Static-Models","page":"Model Browser","title":"Static Models","text":"","category":"section"},{"location":"model_browser/","page":"Model Browser","title":"Model Browser","text":"AgglomerativeClustering (MLJScikitLearnInterface.jl)\nDBSCAN (Clustering.jl/MLJClusteringInterface.jl)\nDBSCAN (MLJScikitLearnInterface.jl)\nFeatureAgglomeration (MLJScikitLearnInterface.jl)\nHDBSCAN (MLJScikitLearnInterface.jl)\nInteractionTransformer (MLJModels.jl)\nOPTICS (MLJScikitLearnInterface.jl)\nSpectralClustering (MLJScikitLearnInterface.jl)","category":"page"},{"location":"model_browser/#Missing-Value-Imputation","page":"Model Browser","title":"Missing Value Imputation","text":"","category":"section"},{"location":"model_browser/","page":"Model Browser","title":"Model Browser","text":"FillImputer (MLJModels.jl)\nGaussianMixtureImputer (BetaML.jl)\nGeneralImputer (BetaML.jl)\nRandomForestImputer (BetaML.jl)\nSimpleImputer (BetaML.jl)\nUnivariateFillImputer (MLJModels.jl)","category":"page"},{"location":"model_browser/#Distribution-Fitter","page":"Model Browser","title":"Distribution Fitter","text":"","category":"section"},{"location":"model_browser/","page":"Model Browser","title":"Model Browser","text":"GaussianMixtureClusterer (BetaML.jl)\nGaussianMixtureImputer (BetaML.jl)\nGaussianMixtureRegressor (BetaML.jl)\nMultitargetGaussianMixtureRegressor (BetaML.jl)","category":"page"},{"location":"model_browser/#Text-Analysis","page":"Model Browser","title":"Text Analysis","text":"","category":"section"},{"location":"model_browser/","page":"Model Browser","title":"Model Browser","text":"BM25Transformer (MLJText.jl)\nCountTransformer (MLJText.jl)\nTfidfTransformer (MLJText.jl)","category":"page"},{"location":"model_browser/#Image-Processing","page":"Model Browser","title":"Image Processing","text":"","category":"section"},{"location":"model_browser/","page":"Model Browser","title":"Model Browser","text":"ImageClassifier (MLJFlux.jl)","category":"page"},{"location":"linear_pipelines/#Linear-Pipelines","page":"Linear Pipelines","title":"Linear Pipelines","text":"","category":"section"},{"location":"linear_pipelines/","page":"Linear Pipelines","title":"Linear Pipelines","text":"In MLJ a pipeline is a composite model in which models are chained together in a linear (non-branching) chain. For other arrangements, including custom architectures via learning networks, see Composing Models.","category":"page"},{"location":"linear_pipelines/","page":"Linear Pipelines","title":"Linear Pipelines","text":"For purposes of illustration, consider a supervised learning problem with the following toy data:","category":"page"},{"location":"linear_pipelines/","page":"Linear Pipelines","title":"Linear Pipelines","text":"using MLJ\nMLJ.color_off()","category":"page"},{"location":"linear_pipelines/","page":"Linear Pipelines","title":"Linear Pipelines","text":"using MLJ\nX = (age = [23, 45, 34, 25, 67],\n gender = categorical(['m', 'm', 'f', 'm', 'f']));\ny = [67.0, 81.5, 55.6, 90.0, 61.1]\n nothing # hide","category":"page"},{"location":"linear_pipelines/","page":"Linear Pipelines","title":"Linear Pipelines","text":"We would like to train using a K-nearest neighbor model, but the model type KNNRegressor assumes the features are all Continuous. This can be fixed by first:","category":"page"},{"location":"linear_pipelines/","page":"Linear Pipelines","title":"Linear Pipelines","text":"coercing the :age feature to have Continuous type by replacing X with coerce(X, :age=>Continuous)\nstandardizing continuous features and one-hot encoding the Multiclass features using the ContinuousEncoder model","category":"page"},{"location":"linear_pipelines/","page":"Linear Pipelines","title":"Linear Pipelines","text":"However, we can avoid separately applying these preprocessing steps (two of which require fit! steps) by combining them with the supervised KKNRegressor model in a new pipeline model, using Julia's |> syntax:","category":"page"},{"location":"linear_pipelines/","page":"Linear Pipelines","title":"Linear Pipelines","text":"KNNRegressor = @load KNNRegressor pkg=NearestNeighborModels\npipe = (X -> coerce(X, :age=>Continuous)) |> ContinuousEncoder() |> KNNRegressor(K=2)","category":"page"},{"location":"linear_pipelines/","page":"Linear Pipelines","title":"Linear Pipelines","text":"We see above that pipe is a model whose hyperparameters are themselves other models or a function. (The names of these hyper-parameters are automatically generated. To specify your own names, use the explicit Pipeline constructor instead.)","category":"page"},{"location":"linear_pipelines/","page":"Linear Pipelines","title":"Linear Pipelines","text":"The |> syntax can also be used to extend an existing pipeline or concatenate two existing pipelines. So, we could instead have defined:","category":"page"},{"location":"linear_pipelines/","page":"Linear Pipelines","title":"Linear Pipelines","text":"pipe_transformer = (X -> coerce(X, :age=>Continuous)) |> ContinuousEncoder()\npipe = pipe_transformer |> KNNRegressor(K=2)","category":"page"},{"location":"linear_pipelines/","page":"Linear Pipelines","title":"Linear Pipelines","text":"A pipeline is just a model like any other. For example, we can evaluate its performance on the data above:","category":"page"},{"location":"linear_pipelines/","page":"Linear Pipelines","title":"Linear Pipelines","text":"evaluate(pipe, X, y, resampling=CV(nfolds=3), measure=mae)","category":"page"},{"location":"linear_pipelines/","page":"Linear Pipelines","title":"Linear Pipelines","text":"To include target transformations in a pipeline, wrap the supervised component using TransformedTargetModel.","category":"page"},{"location":"linear_pipelines/","page":"Linear Pipelines","title":"Linear Pipelines","text":"Pipeline","category":"page"},{"location":"linear_pipelines/#MLJBase.Pipeline","page":"Linear Pipelines","title":"MLJBase.Pipeline","text":"Pipeline(component1, component2, ... , componentk; options...)\nPipeline(name1=component1, name2=component2, ..., namek=componentk; options...)\ncomponent1 |> component2 |> ... |> componentk\n\nCreate an instance of a composite model type which sequentially composes the specified components in order. This means component1 receives inputs, whose output is passed to component2, and so forth. A \"component\" is either a Model instance, a model type (converted immediately to its default instance) or any callable object. Here the \"output\" of a model is what predict returns if it is Supervised, or what transform returns if it is Unsupervised.\n\nNames for the component fields are automatically generated unless explicitly specified, as in\n\nPipeline(encoder=ContinuousEncoder(drop_last=false),\n stand=Standardizer())\n\nThe Pipeline constructor accepts keyword options discussed further below.\n\nOrdinary functions (and other callables) may be inserted in the pipeline as shown in the following example:\n\nPipeline(X->coerce(X, :age=>Continuous), OneHotEncoder, ConstantClassifier)\n\nSyntactic sugar\n\nThe |> operator is overloaded to construct pipelines out of models, callables, and existing pipelines:\n\nLinearRegressor = @load LinearRegressor pkg=MLJLinearModels add=true\nPCA = @load PCA pkg=MultivariateStats add=true\n\npipe1 = MLJBase.table |> ContinuousEncoder |> Standardizer\npipe2 = PCA |> LinearRegressor\npipe1 |> pipe2\n\nAt most one of the components may be a supervised model, but this model can appear in any position. A pipeline with a Supervised component is itself Supervised and implements the predict operation. It is otherwise Unsupervised (possibly Static) and implements transform.\n\nSpecial operations\n\nIf all the components are invertible unsupervised models (ie, implement inverse_transform) then inverse_transform is implemented for the pipeline. If there are no supervised models, then predict is nevertheless implemented, assuming the last component is a model that implements it (some clustering models). Similarly, calling transform on a supervised pipeline calls transform on the supervised component.\n\nOptional key-word arguments\n\nprediction_type - prediction type of the pipeline; possible values: :deterministic, :probabilistic, :interval (default=:deterministic if not inferable)\noperation - operation applied to the supervised component model, when present; possible values: predict, predict_mean, predict_median, predict_mode (default=predict)\ncache - whether the internal machines created for component models should cache model-specific representations of data (see machine) (default=true)\n\nwarning: Warning\nSet cache=false to guarantee data anonymization.\n\nTo build more complicated non-branching pipelines, refer to the MLJ manual sections on composing models.\n\n\n\n\n\n","category":"function"},{"location":"models/InteractionTransformer_MLJModels/#InteractionTransformer_MLJModels","page":"InteractionTransformer","title":"InteractionTransformer","text":"","category":"section"},{"location":"models/InteractionTransformer_MLJModels/","page":"InteractionTransformer","title":"InteractionTransformer","text":"InteractionTransformer","category":"page"},{"location":"models/InteractionTransformer_MLJModels/","page":"InteractionTransformer","title":"InteractionTransformer","text":"A model type for constructing a interaction transformer, based on MLJModels.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/InteractionTransformer_MLJModels/","page":"InteractionTransformer","title":"InteractionTransformer","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/InteractionTransformer_MLJModels/","page":"InteractionTransformer","title":"InteractionTransformer","text":"InteractionTransformer = @load InteractionTransformer pkg=MLJModels","category":"page"},{"location":"models/InteractionTransformer_MLJModels/","page":"InteractionTransformer","title":"InteractionTransformer","text":"Do model = InteractionTransformer() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in InteractionTransformer(order=...).","category":"page"},{"location":"models/InteractionTransformer_MLJModels/","page":"InteractionTransformer","title":"InteractionTransformer","text":"Generates all polynomial interaction terms up to the given order for the subset of chosen columns. Any column that contains elements with scitype <:Infinite is a valid basis to generate interactions. If features is not specified, all such columns with scitype <:Infinite in the table are used as a basis.","category":"page"},{"location":"models/InteractionTransformer_MLJModels/","page":"InteractionTransformer","title":"InteractionTransformer","text":"In MLJ or MLJBase, you can transform features X with the single call","category":"page"},{"location":"models/InteractionTransformer_MLJModels/","page":"InteractionTransformer","title":"InteractionTransformer","text":"transform(machine(model), X)","category":"page"},{"location":"models/InteractionTransformer_MLJModels/","page":"InteractionTransformer","title":"InteractionTransformer","text":"See also the example below.","category":"page"},{"location":"models/InteractionTransformer_MLJModels/#Hyper-parameters","page":"InteractionTransformer","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/InteractionTransformer_MLJModels/","page":"InteractionTransformer","title":"InteractionTransformer","text":"order: Maximum order of interactions to be generated.\nfeatures: Restricts interations generation to those columns","category":"page"},{"location":"models/InteractionTransformer_MLJModels/#Operations","page":"InteractionTransformer","title":"Operations","text":"","category":"section"},{"location":"models/InteractionTransformer_MLJModels/","page":"InteractionTransformer","title":"InteractionTransformer","text":"transform(machine(model), X): Generates polynomial interaction terms out of table X using the hyper-parameters specified in model.","category":"page"},{"location":"models/InteractionTransformer_MLJModels/#Example","page":"InteractionTransformer","title":"Example","text":"","category":"section"},{"location":"models/InteractionTransformer_MLJModels/","page":"InteractionTransformer","title":"InteractionTransformer","text":"using MLJ\n\nX = (\n A = [1, 2, 3],\n B = [4, 5, 6],\n C = [7, 8, 9],\n D = [\"x₁\", \"x₂\", \"x₃\"]\n)\nit = InteractionTransformer(order=3)\nmach = machine(it)\n\njulia> transform(mach, X)\n(A = [1, 2, 3],\n B = [4, 5, 6],\n C = [7, 8, 9],\n D = [\"x₁\", \"x₂\", \"x₃\"],\n A_B = [4, 10, 18],\n A_C = [7, 16, 27],\n B_C = [28, 40, 54],\n A_B_C = [28, 80, 162],)\n\nit = InteractionTransformer(order=2, features=[:A, :B])\nmach = machine(it)\n\njulia> transform(mach, X)\n(A = [1, 2, 3],\n B = [4, 5, 6],\n C = [7, 8, 9],\n D = [\"x₁\", \"x₂\", \"x₃\"],\n A_B = [4, 10, 18],)\n","category":"page"},{"location":"models/HierarchicalClustering_Clustering/#HierarchicalClustering_Clustering","page":"HierarchicalClustering","title":"HierarchicalClustering","text":"","category":"section"},{"location":"models/HierarchicalClustering_Clustering/","page":"HierarchicalClustering","title":"HierarchicalClustering","text":"HierarchicalClustering","category":"page"},{"location":"models/HierarchicalClustering_Clustering/","page":"HierarchicalClustering","title":"HierarchicalClustering","text":"A model type for constructing a hierarchical clusterer, based on Clustering.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/HierarchicalClustering_Clustering/","page":"HierarchicalClustering","title":"HierarchicalClustering","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/HierarchicalClustering_Clustering/","page":"HierarchicalClustering","title":"HierarchicalClustering","text":"HierarchicalClustering = @load HierarchicalClustering pkg=Clustering","category":"page"},{"location":"models/HierarchicalClustering_Clustering/","page":"HierarchicalClustering","title":"HierarchicalClustering","text":"Do model = HierarchicalClustering() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in HierarchicalClustering(linkage=...).","category":"page"},{"location":"models/HierarchicalClustering_Clustering/","page":"HierarchicalClustering","title":"HierarchicalClustering","text":"Hierarchical Clustering is a clustering algorithm that organizes the data in a dendrogram based on distances between groups of points and computes cluster assignments by cutting the dendrogram at a given height. More information is available at the Clustering.jl documentation. Use predict to get cluster assignments. The dendrogram and the dendrogram cutter are accessed from the machine report (see below).","category":"page"},{"location":"models/HierarchicalClustering_Clustering/","page":"HierarchicalClustering","title":"HierarchicalClustering","text":"This is a static implementation, i.e., it does not generalize to new data instances, and there is no training data. For clusterers that do generalize, see KMeans or KMedoids.","category":"page"},{"location":"models/HierarchicalClustering_Clustering/","page":"HierarchicalClustering","title":"HierarchicalClustering","text":"In MLJ or MLJBase, create a machine with","category":"page"},{"location":"models/HierarchicalClustering_Clustering/","page":"HierarchicalClustering","title":"HierarchicalClustering","text":"mach = machine(model)","category":"page"},{"location":"models/HierarchicalClustering_Clustering/#Hyper-parameters","page":"HierarchicalClustering","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/HierarchicalClustering_Clustering/","page":"HierarchicalClustering","title":"HierarchicalClustering","text":"linkage = :single: linkage method (:single, :average, :complete, :ward, :ward_presquared)\nmetric = SqEuclidean: metric (see Distances.jl for available metrics)\nbranchorder = :r: branchorder (:r, :barjoseph, :optimal)\nh = nothing: height at which the dendrogram is cut\nk = 3: number of clusters.","category":"page"},{"location":"models/HierarchicalClustering_Clustering/","page":"HierarchicalClustering","title":"HierarchicalClustering","text":"If both k and h are specified, it is guaranteed that the number of clusters is not less than k and their height is not above h.","category":"page"},{"location":"models/HierarchicalClustering_Clustering/#Operations","page":"HierarchicalClustering","title":"Operations","text":"","category":"section"},{"location":"models/HierarchicalClustering_Clustering/","page":"HierarchicalClustering","title":"HierarchicalClustering","text":"predict(mach, X): return cluster label assignments, as an unordered CategoricalVector. Here X is any table of input features (eg, a DataFrame) whose columns are of scitype Continuous; check column scitypes with schema(X).","category":"page"},{"location":"models/HierarchicalClustering_Clustering/#Report","page":"HierarchicalClustering","title":"Report","text":"","category":"section"},{"location":"models/HierarchicalClustering_Clustering/","page":"HierarchicalClustering","title":"HierarchicalClustering","text":"After calling predict(mach), the fields of report(mach) are:","category":"page"},{"location":"models/HierarchicalClustering_Clustering/","page":"HierarchicalClustering","title":"HierarchicalClustering","text":"dendrogram: the dendrogram that was computed when calling predict.\ncutter: a dendrogram cutter that can be called with a height h or a number of clusters k, to obtain a new assignment of the data points to clusters (see example below).","category":"page"},{"location":"models/HierarchicalClustering_Clustering/#Examples","page":"HierarchicalClustering","title":"Examples","text":"","category":"section"},{"location":"models/HierarchicalClustering_Clustering/","page":"HierarchicalClustering","title":"HierarchicalClustering","text":"using MLJ\n\nX, labels = make_moons(400, noise=0.09, rng=1) ## synthetic data with 2 clusters; X\n\nHierarchicalClustering = @load HierarchicalClustering pkg=Clustering\nmodel = HierarchicalClustering(linkage = :complete)\nmach = machine(model)\n\n## compute and output cluster assignments for observations in `X`:\nyhat = predict(mach, X)\n\n## plot dendrogram:\nusing StatsPlots\nplot(report(mach).dendrogram)\n\n## make new predictions by cutting the dendrogram at another height\nreport(mach).cutter(h = 2.5)","category":"page"},{"location":"models/SMOTENC_Imbalance/#SMOTENC_Imbalance","page":"SMOTENC","title":"SMOTENC","text":"","category":"section"},{"location":"models/SMOTENC_Imbalance/","page":"SMOTENC","title":"SMOTENC","text":"Initiate a SMOTENC model with the given hyper-parameters.","category":"page"},{"location":"models/SMOTENC_Imbalance/","page":"SMOTENC","title":"SMOTENC","text":"SMOTENC","category":"page"},{"location":"models/SMOTENC_Imbalance/","page":"SMOTENC","title":"SMOTENC","text":"A model type for constructing a smotenc, based on Imbalance.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/SMOTENC_Imbalance/","page":"SMOTENC","title":"SMOTENC","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/SMOTENC_Imbalance/","page":"SMOTENC","title":"SMOTENC","text":"SMOTENC = @load SMOTENC pkg=Imbalance","category":"page"},{"location":"models/SMOTENC_Imbalance/","page":"SMOTENC","title":"SMOTENC","text":"Do model = SMOTENC() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in SMOTENC(k=...).","category":"page"},{"location":"models/SMOTENC_Imbalance/","page":"SMOTENC","title":"SMOTENC","text":"SMOTENC implements the SMOTENC algorithm to correct for class imbalance as in N. V. Chawla, K. W. Bowyer, L. O.Hall, W. P. Kegelmeyer, “SMOTE: synthetic minority over-sampling technique,” Journal of artificial intelligence research, 321-357, 2002.","category":"page"},{"location":"models/SMOTENC_Imbalance/#Training-data","page":"SMOTENC","title":"Training data","text":"","category":"section"},{"location":"models/SMOTENC_Imbalance/","page":"SMOTENC","title":"SMOTENC","text":"In MLJ or MLJBase, wrap the model in a machine by","category":"page"},{"location":"models/SMOTENC_Imbalance/","page":"SMOTENC","title":"SMOTENC","text":"mach = machine(model)","category":"page"},{"location":"models/SMOTENC_Imbalance/","page":"SMOTENC","title":"SMOTENC","text":"There is no need to provide any data here because the model is a static transformer.","category":"page"},{"location":"models/SMOTENC_Imbalance/","page":"SMOTENC","title":"SMOTENC","text":"Likewise, there is no need to fit!(mach).","category":"page"},{"location":"models/SMOTENC_Imbalance/","page":"SMOTENC","title":"SMOTENC","text":"For default values of the hyper-parameters, model can be constructed by","category":"page"},{"location":"models/SMOTENC_Imbalance/","page":"SMOTENC","title":"SMOTENC","text":"model = SMOTENC()","category":"page"},{"location":"models/SMOTENC_Imbalance/#Hyperparameters","page":"SMOTENC","title":"Hyperparameters","text":"","category":"section"},{"location":"models/SMOTENC_Imbalance/","page":"SMOTENC","title":"SMOTENC","text":"k=5: Number of nearest neighbors to consider in the SMOTENC algorithm. Should be within the range [1, n - 1], where n is the number of observations; otherwise set to the nearest of these two values.\nratios=1.0: A parameter that controls the amount of oversampling to be done for each class\nCan be a float and in this case each class will be oversampled to the size of the majority class times the float. By default, all classes are oversampled to the size of the majority class\nCan be a dictionary mapping each class label to the float ratio for that class\nknn_tree: Decides the tree used in KNN computations. Either \"Brute\" or \"Ball\". BallTree can be much faster but may lead to inaccurate results.\nrng::Union{AbstractRNG, Integer}=default_rng(): Either an AbstractRNG object or an Integer seed to be used with Xoshiro if the Julia VERSION supports it. Otherwise, uses MersenneTwister`.","category":"page"},{"location":"models/SMOTENC_Imbalance/#Transform-Inputs","page":"SMOTENC","title":"Transform Inputs","text":"","category":"section"},{"location":"models/SMOTENC_Imbalance/","page":"SMOTENC","title":"SMOTENC","text":"X: A table with element scitypes that subtype Union{Finite, Infinite}. Elements in nominal columns should subtype Finite (i.e., have scitype OrderedFactor or Multiclass) and elements in continuous columns should subtype Infinite (i.e., have scitype Count or Continuous).\ny: An abstract vector of labels (e.g., strings) that correspond to the observations in X","category":"page"},{"location":"models/SMOTENC_Imbalance/#Transform-Outputs","page":"SMOTENC","title":"Transform Outputs","text":"","category":"section"},{"location":"models/SMOTENC_Imbalance/","page":"SMOTENC","title":"SMOTENC","text":"Xover: A matrix or table that includes original data and the new observations due to oversampling. depending on whether the input X is a matrix or table respectively\nyover: An abstract vector of labels corresponding to Xover","category":"page"},{"location":"models/SMOTENC_Imbalance/#Operations","page":"SMOTENC","title":"Operations","text":"","category":"section"},{"location":"models/SMOTENC_Imbalance/","page":"SMOTENC","title":"SMOTENC","text":"transform(mach, X, y): resample the data X and y using SMOTENC, returning both the new and original observations","category":"page"},{"location":"models/SMOTENC_Imbalance/#Example","page":"SMOTENC","title":"Example","text":"","category":"section"},{"location":"models/SMOTENC_Imbalance/","page":"SMOTENC","title":"SMOTENC","text":"using MLJ\nusing ScientificTypes\nimport Imbalance\n\n## set probability of each class\nclass_probs = [0.5, 0.2, 0.3] \nnum_rows = 100\nnum_continuous_feats = 3\n## want two categorical features with three and two possible values respectively\nnum_vals_per_category = [3, 2]\n\n## generate a table and categorical vector accordingly\nX, y = Imbalance.generate_imbalanced_data(num_rows, num_continuous_feats; \n class_probs, num_vals_per_category, rng=42) \njulia> Imbalance.checkbalance(y)\n1: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 19 (39.6%) \n2: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 33 (68.8%) \n0: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 48 (100.0%) \n\njulia> ScientificTypes.schema(X).scitypes\n(Continuous, Continuous, Continuous, Continuous, Continuous)\n## coerce nominal columns to a finite scitype (multiclass or ordered factor)\nX = coerce(X, :Column4=>Multiclass, :Column5=>Multiclass)\n\n## load SMOTE-NC\nSMOTENC = @load SMOTENC pkg=Imbalance\n\n## wrap the model in a machine\noversampler = SMOTENC(k=5, ratios=Dict(0=>1.0, 1=> 0.9, 2=>0.8), rng=42)\nmach = machine(oversampler)\n\n## provide the data to transform (there is nothing to fit)\nXover, yover = transform(mach, X, y)\n\njulia> Imbalance.checkbalance(yover)\n2: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 38 (79.2%) \n1: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 43 (89.6%) \n0: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 48 (100.0%) ","category":"page"},{"location":"models/EvoTreeCount_EvoTrees/#EvoTreeCount_EvoTrees","page":"EvoTreeCount","title":"EvoTreeCount","text":"","category":"section"},{"location":"models/EvoTreeCount_EvoTrees/","page":"EvoTreeCount","title":"EvoTreeCount","text":"EvoTreeCount(;kwargs...)","category":"page"},{"location":"models/EvoTreeCount_EvoTrees/","page":"EvoTreeCount","title":"EvoTreeCount","text":"A model type for constructing a EvoTreeCount, based on EvoTrees.jl, and implementing both an internal API the MLJ model interface. EvoTreeCount is used to perform Poisson probabilistic regression on count target.","category":"page"},{"location":"models/EvoTreeCount_EvoTrees/#Hyper-parameters","page":"EvoTreeCount","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/EvoTreeCount_EvoTrees/","page":"EvoTreeCount","title":"EvoTreeCount","text":"nrounds=100: Number of rounds. It corresponds to the number of trees that will be sequentially stacked. Must be >= 1.\neta=0.1: Learning rate. Each tree raw predictions are scaled by eta prior to be added to the stack of predictions. Must be > 0. A lower eta results in slower learning, requiring a higher nrounds but typically improves model performance.\nL2::T=0.0: L2 regularization factor on aggregate gain. Must be >= 0. Higher L2 can result in a more robust model.\nlambda::T=0.0: L2 regularization factor on individual gain. Must be >= 0. Higher lambda can result in a more robust model.\ngamma::T=0.0: Minimum gain imprvement needed to perform a node split. Higher gamma can result in a more robust model.\nmax_depth=6: Maximum depth of a tree. Must be >= 1. A tree of depth 1 is made of a single prediction leaf. A complete tree of depth N contains 2^(N - 1) terminal leaves and 2^(N - 1) - 1 split nodes. Compute cost is proportional to 2^max_depth. Typical optimal values are in the 3 to 9 range.\nmin_weight=1.0: Minimum weight needed in a node to perform a split. Matches the number of observations by default or the sum of weights as provided by the weights vector. Must be > 0.\nrowsample=1.0: Proportion of rows that are sampled at each iteration to build the tree. Should be ]0, 1].\ncolsample=1.0: Proportion of columns / features that are sampled at each iteration to build the tree. Should be ]0, 1].\nnbins=64: Number of bins into which each feature is quantized. Buckets are defined based on quantiles, hence resulting in equal weight bins. Should be between 2 and 255.\nmonotone_constraints=Dict{Int, Int}(): Specify monotonic constraints using a dict where the key is the feature index and the value the applicable constraint (-1=decreasing, 0=none, 1=increasing).\ntree_type=\"binary\" Tree structure to be used. One of:\nbinary: Each node of a tree is grown independently. Tree are built depthwise until max depth is reach or if min weight or gain (see gamma) stops further node splits.\noblivious: A common splitting condition is imposed to all nodes of a given depth.\nrng=123: Either an integer used as a seed to the random number generator or an actual random number generator (::Random.AbstractRNG).","category":"page"},{"location":"models/EvoTreeCount_EvoTrees/#Internal-API","page":"EvoTreeCount","title":"Internal API","text":"","category":"section"},{"location":"models/EvoTreeCount_EvoTrees/","page":"EvoTreeCount","title":"EvoTreeCount","text":"Do config = EvoTreeCount() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in EvoTreeCount(max_depth=...).","category":"page"},{"location":"models/EvoTreeCount_EvoTrees/#Training-model","page":"EvoTreeCount","title":"Training model","text":"","category":"section"},{"location":"models/EvoTreeCount_EvoTrees/","page":"EvoTreeCount","title":"EvoTreeCount","text":"A model is built using fit_evotree:","category":"page"},{"location":"models/EvoTreeCount_EvoTrees/","page":"EvoTreeCount","title":"EvoTreeCount","text":"model = fit_evotree(config; x_train, y_train, kwargs...)","category":"page"},{"location":"models/EvoTreeCount_EvoTrees/#Inference","page":"EvoTreeCount","title":"Inference","text":"","category":"section"},{"location":"models/EvoTreeCount_EvoTrees/","page":"EvoTreeCount","title":"EvoTreeCount","text":"Predictions are obtained using predict which returns a Vector of length nobs:","category":"page"},{"location":"models/EvoTreeCount_EvoTrees/","page":"EvoTreeCount","title":"EvoTreeCount","text":"EvoTrees.predict(model, X)","category":"page"},{"location":"models/EvoTreeCount_EvoTrees/","page":"EvoTreeCount","title":"EvoTreeCount","text":"Alternatively, models act as a functor, returning predictions when called as a function with features as argument:","category":"page"},{"location":"models/EvoTreeCount_EvoTrees/","page":"EvoTreeCount","title":"EvoTreeCount","text":"model(X)","category":"page"},{"location":"models/EvoTreeCount_EvoTrees/#MLJ","page":"EvoTreeCount","title":"MLJ","text":"","category":"section"},{"location":"models/EvoTreeCount_EvoTrees/","page":"EvoTreeCount","title":"EvoTreeCount","text":"From MLJ, the type can be imported using:","category":"page"},{"location":"models/EvoTreeCount_EvoTrees/","page":"EvoTreeCount","title":"EvoTreeCount","text":"EvoTreeCount = @load EvoTreeCount pkg=EvoTrees","category":"page"},{"location":"models/EvoTreeCount_EvoTrees/","page":"EvoTreeCount","title":"EvoTreeCount","text":"Do model = EvoTreeCount() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in EvoTreeCount(loss=...).","category":"page"},{"location":"models/EvoTreeCount_EvoTrees/#Training-data","page":"EvoTreeCount","title":"Training data","text":"","category":"section"},{"location":"models/EvoTreeCount_EvoTrees/","page":"EvoTreeCount","title":"EvoTreeCount","text":"In MLJ or MLJBase, bind an instance model to data with mach = machine(model, X, y) where","category":"page"},{"location":"models/EvoTreeCount_EvoTrees/","page":"EvoTreeCount","title":"EvoTreeCount","text":"X: any table of input features (eg, a DataFrame) whose columns each have one of the following element scitypes: Continuous, Count, or <:OrderedFactor; check column scitypes with schema(X)\ny: is the target, which can be any AbstractVector whose element scitype is <:Count; check the scitype with scitype(y)","category":"page"},{"location":"models/EvoTreeCount_EvoTrees/","page":"EvoTreeCount","title":"EvoTreeCount","text":"Train the machine using fit!(mach, rows=...).","category":"page"},{"location":"models/EvoTreeCount_EvoTrees/#Operations","page":"EvoTreeCount","title":"Operations","text":"","category":"section"},{"location":"models/EvoTreeCount_EvoTrees/","page":"EvoTreeCount","title":"EvoTreeCount","text":"predict(mach, Xnew): returns a vector of Poisson distributions given features Xnew having the same scitype as X above. Predictions are probabilistic.","category":"page"},{"location":"models/EvoTreeCount_EvoTrees/","page":"EvoTreeCount","title":"EvoTreeCount","text":"Specific metrics can also be predicted using:","category":"page"},{"location":"models/EvoTreeCount_EvoTrees/","page":"EvoTreeCount","title":"EvoTreeCount","text":"predict_mean(mach, Xnew)\npredict_mode(mach, Xnew)\npredict_median(mach, Xnew)","category":"page"},{"location":"models/EvoTreeCount_EvoTrees/#Fitted-parameters","page":"EvoTreeCount","title":"Fitted parameters","text":"","category":"section"},{"location":"models/EvoTreeCount_EvoTrees/","page":"EvoTreeCount","title":"EvoTreeCount","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/EvoTreeCount_EvoTrees/","page":"EvoTreeCount","title":"EvoTreeCount","text":":fitresult: The GBTree object returned by EvoTrees.jl fitting algorithm.","category":"page"},{"location":"models/EvoTreeCount_EvoTrees/#Report","page":"EvoTreeCount","title":"Report","text":"","category":"section"},{"location":"models/EvoTreeCount_EvoTrees/","page":"EvoTreeCount","title":"EvoTreeCount","text":"The fields of report(mach) are:","category":"page"},{"location":"models/EvoTreeCount_EvoTrees/","page":"EvoTreeCount","title":"EvoTreeCount","text":":features: The names of the features encountered in training.","category":"page"},{"location":"models/EvoTreeCount_EvoTrees/#Examples","page":"EvoTreeCount","title":"Examples","text":"","category":"section"},{"location":"models/EvoTreeCount_EvoTrees/","page":"EvoTreeCount","title":"EvoTreeCount","text":"## Internal API\nusing EvoTrees\nconfig = EvoTreeCount(max_depth=5, nbins=32, nrounds=100)\nnobs, nfeats = 1_000, 5\nx_train, y_train = randn(nobs, nfeats), rand(0:2, nobs)\nmodel = fit_evotree(config; x_train, y_train)\npreds = EvoTrees.predict(model, x_train)","category":"page"},{"location":"models/EvoTreeCount_EvoTrees/","page":"EvoTreeCount","title":"EvoTreeCount","text":"using MLJ\nEvoTreeCount = @load EvoTreeCount pkg=EvoTrees\nmodel = EvoTreeCount(max_depth=5, nbins=32, nrounds=100)\nnobs, nfeats = 1_000, 5\nX, y = randn(nobs, nfeats), rand(0:2, nobs)\nmach = machine(model, X, y) |> fit!\npreds = predict(mach, X)\npreds = predict_mean(mach, X)\npreds = predict_mode(mach, X)\npreds = predict_median(mach, X)\n","category":"page"},{"location":"models/EvoTreeCount_EvoTrees/","page":"EvoTreeCount","title":"EvoTreeCount","text":"See also EvoTrees.jl.","category":"page"},{"location":"list_of_supported_models/#model_list","page":"List of Supported Models","title":"List of Supported Models","text":"","category":"section"},{"location":"list_of_supported_models/","page":"List of Supported Models","title":"List of Supported Models","text":"For a list of models organized around function (\"classification\", \"regression\", etc.), see the Model Browser.","category":"page"},{"location":"list_of_supported_models/","page":"List of Supported Models","title":"List of Supported Models","text":"MLJ provides access to a wide variety of machine learning models. We are always looking for help adding new models or testing existing ones. Currently available models are listed below; for the most up-to-date list, run using MLJ; models(). ","category":"page"},{"location":"list_of_supported_models/","page":"List of Supported Models","title":"List of Supported Models","text":"Indications of \"maturity\" in the table below are approximate, surjective, and possibly out-of-date. A decision to use or not use a model in a critical application should be based on a user's independent assessment.","category":"page"},{"location":"list_of_supported_models/","page":"List of Supported Models","title":"List of Supported Models","text":"experimental: indicates the package is fairly new and/or is under active development; you can help by testing these packages and making them more robust,\nlow: indicate a package that has reached a roughly stable form in terms of interface and which is unlikely to contain serious bugs. It may be missing some functionality found in similar packages. It has not benefited from a high level of use\nmedium: indicates the package is fairly mature but may benefit from optimizations and/or extra features; you can help by suggesting either,\nhigh: indicates the package is very mature and functionalities are expected to have been fairly optimiser and tested.","category":"page"},{"location":"list_of_supported_models/","page":"List of Supported Models","title":"List of Supported Models","text":"Package Interface Pkg Models Maturity Note\nBetaML.jl - DecisionTreeClassifier, RandomForestClassifier, NeuralNetworkClassifier, PerceptronClassifier, KernelPerceptronClassifier, PegasosClassifier, DecisionTreeRegressor, RandomForestRegressor, NeuralNetworkRegressor, MultitargetNeuralNetworkRegressor, GaussianMixtureRegressor, MultitargetGaussianMixtureRegressor, KMeansClusterer, KMedoidsClusterer, GaussianMixtureClusterer, SimpleImputer, GaussianMixtureImputer, RandomForestImputer, GeneralImputer, AutoEncoder medium \nCatBoost.jl - CatBoostRegressor, CatBoostClassifier high \nClustering.jl MLJClusteringInterface.jl KMeans, KMedoids, DBSCAN, HierarchicalClustering high² \nDecisionTree.jl MLJDecisionTreeInterface.jl DecisionTreeClassifier, DecisionTreeRegressor, AdaBoostStumpClassifier, RandomForestClassifier, RandomForestRegressor high \nEvoTrees.jl - EvoTreeRegressor, EvoTreeClassifier, EvoTreeCount, EvoTreeGaussian, EvoTreeMLE medium tree-based gradient boosting models\nEvoLinear.jl - EvoLinearRegressor medium linear boosting models\nGLM.jl MLJGLMInterface.jl LinearRegressor, LinearBinaryClassifier, LinearCountRegressor medium² \nImbalance.jl - RandomOversampler, RandomWalkOversampler, ROSE, SMOTE, BorderlineSMOTE1, SMOTEN, SMOTENC, RandomUndersampler, ClusterUndersampler, ENNUndersampler, TomekUndersampler, low \nLIBSVM.jl MLJLIBSVMInterface.jl LinearSVC, SVC, NuSVC, NuSVR, EpsilonSVR, OneClassSVM high also via ScikitLearn.jl\nLightGBM.jl - LGBMClassifier, LGBMRegressor high \nFlux.jl MLJFlux.jl NeuralNetworkRegressor, NeuralNetworkClassifier, MultitargetNeuralNetworkRegressor, ImageClassifier low \nMLJBalancing.jl - BalancedBaggingClassifier low \nMLJLinearModels.jl - LinearRegressor, RidgeRegressor, LassoRegressor, ElasticNetRegressor, QuantileRegressor, HuberRegressor, RobustRegressor, LADRegressor, LogisticClassifier, MultinomialClassifier medium \nMLJModels.jl (built-in) - ConstantClassifier, ConstantRegressor, ContinuousEncoder, DeterministicConstantClassifier, DeterministicConstantRegressor, FeatureSelector, FillImputer, InteractionTransformer, OneHotEncoder, Standardizer, UnivariateBoxCoxTransformer, UnivariateDiscretizer, UnivariateFillImputer, UnivariateTimeTypeToContinuous, Standardizer, BinaryThreshholdPredictor medium \nMLJText.jl - TfidfTransformer, BM25Transformer, CountTransformer low \nMultivariateStats.jl MLJMultivariateStatsInterface.jl LinearRegressor, MultitargetLinearRegressor, RidgeRegressor, MultitargetRidgeRegressor, PCA, KernelPCA, ICA, LDA, BayesianLDA, SubspaceLDA, BayesianSubspaceLDA, FactorAnalysis, PPCA high \nNaiveBayes.jl MLJNaiveBayesInterface.jl GaussianNBClassifier, MultinomialNBClassifier, HybridNBClassifier low \nNearestNeighborModels.jl - KNNClassifier, KNNRegressor, MultitargetKNNClassifier, MultitargetKNNRegressor high \nOneRule.jl - OneRuleClassifier experimental \nOutlierDetectionNeighbors.jl - ABODDetector, COFDetector, DNNDetector, KNNDetector, LOFDetector medium \nOutlierDetectionNetworks.jl - AEDetector, DSADDetector, ESADDetector medium \nOutlierDetectionPython.jl - ABODDetector, CBLOFDetector, CDDetector, COFDetector, COPODDetector, ECODDetector, GMMDetector, HBOSDetector, IForestDetector, INNEDetector, KDEDetector, KNNDetector, LMDDDetector, LOCIDetector, LODADetector, LOFDetector, MCDDetector, OCSVMDetector, PCADetector, RODDetector, SODDetector, SOSDetector high \nParallelKMeans.jl - KMeans experimental \nPartialLeastSquaresRegressor.jl - PLSRegressor, KPLSRegressor experimental \nScikitLearn.jl MLJScikitLearnInterface.jl ARDRegressor, AdaBoostClassifier, AdaBoostRegressor, AffinityPropagation, AgglomerativeClustering, BaggingClassifier, BaggingRegressor, BayesianLDA, BayesianQDA, BayesianRidgeRegressor, BernoulliNBClassifier, Birch, ComplementNBClassifier, DBSCAN, DummyClassifier, DummyRegressor, ElasticNetCVRegressor, ElasticNetRegressor, ExtraTreesClassifier, ExtraTreesRegressor, FeatureAgglomeration, GaussianNBClassifier, GaussianProcessClassifier, GaussianProcessRegressor, GradientBoostingClassifier, GradientBoostingRegressor, HuberRegressor, KMeans, KNeighborsClassifier, KNeighborsRegressor, LarsCVRegressor, LarsRegressor, LassoCVRegressor, LassoLarsCVRegressor, LassoLarsICRegressor, LassoLarsRegressor, LassoRegressor, LinearRegressor, LogisticCVClassifier, LogisticClassifier, MeanShift, MiniBatchKMeans, MultiTaskElasticNetCVRegressor, MultiTaskElasticNetRegressor, MultiTaskLassoCVRegressor, MultiTaskLassoRegressor, MultinomialNBClassifier, OPTICS, OrthogonalMatchingPursuitCVRegressor, OrthogonalMatchingPursuitRegressor, PassiveAggressiveClassifier, PassiveAggressiveRegressor, PerceptronClassifier, ProbabilisticSGDClassifier, RANSACRegressor, RandomForestClassifier, RandomForestRegressor, RidgeCVClassifier, RidgeCVRegressor, RidgeClassifier, RidgeRegressor, SGDClassifier, SGDRegressor, SVMClassifier, SVMLClassifier, SVMLRegressor, SVMNuClassifier, SVMNuRegressor, SVMRegressor, SpectralClustering, TheilSenRegressor high² \nSIRUS.jl - StableForestClassifier, StableForestRegressor, StableRulesClassifier, StableRulesRegressor low \nSymbolicRegression.jl - MultitargetSRRegressor, SRRegressor experimental \nTSVD.jl MLJTSVDInterface.jl TSVDTransformer high \nXGBoost.jl MLJXGBoostInterface.jl XGBoostRegressor, XGBoostClassifier, XGBoostCount high ","category":"page"},{"location":"list_of_supported_models/","page":"List of Supported Models","title":"List of Supported Models","text":"Notes ","category":"page"},{"location":"list_of_supported_models/","page":"List of Supported Models","title":"List of Supported Models","text":"¹Models not in the MLJ registry are not included in integration tests. Consult package documentation to see how to load them. There may be issues loading these models simultaneously with other registered models.","category":"page"},{"location":"list_of_supported_models/","page":"List of Supported Models","title":"List of Supported Models","text":"²Some models are missing and assistance is welcome to complete the interface. Post a message on the Julia #mlj Slack channel if you would like to help, thanks!","category":"page"},{"location":"models/GaussianProcessClassifier_MLJScikitLearnInterface/#GaussianProcessClassifier_MLJScikitLearnInterface","page":"GaussianProcessClassifier","title":"GaussianProcessClassifier","text":"","category":"section"},{"location":"models/GaussianProcessClassifier_MLJScikitLearnInterface/","page":"GaussianProcessClassifier","title":"GaussianProcessClassifier","text":"GaussianProcessClassifier","category":"page"},{"location":"models/GaussianProcessClassifier_MLJScikitLearnInterface/","page":"GaussianProcessClassifier","title":"GaussianProcessClassifier","text":"A model type for constructing a Gaussian process classifier, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/GaussianProcessClassifier_MLJScikitLearnInterface/","page":"GaussianProcessClassifier","title":"GaussianProcessClassifier","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/GaussianProcessClassifier_MLJScikitLearnInterface/","page":"GaussianProcessClassifier","title":"GaussianProcessClassifier","text":"GaussianProcessClassifier = @load GaussianProcessClassifier pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/GaussianProcessClassifier_MLJScikitLearnInterface/","page":"GaussianProcessClassifier","title":"GaussianProcessClassifier","text":"Do model = GaussianProcessClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in GaussianProcessClassifier(kernel=...).","category":"page"},{"location":"models/GaussianProcessClassifier_MLJScikitLearnInterface/#Hyper-parameters","page":"GaussianProcessClassifier","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/GaussianProcessClassifier_MLJScikitLearnInterface/","page":"GaussianProcessClassifier","title":"GaussianProcessClassifier","text":"kernel = nothing\noptimizer = fmin_l_bfgs_b\nn_restarts_optimizer = 0\ncopy_X_train = true\nrandom_state = nothing\nmax_iter_predict = 100\nwarm_start = false\nmulti_class = one_vs_rest","category":"page"},{"location":"models/SpectralClustering_MLJScikitLearnInterface/#SpectralClustering_MLJScikitLearnInterface","page":"SpectralClustering","title":"SpectralClustering","text":"","category":"section"},{"location":"models/SpectralClustering_MLJScikitLearnInterface/","page":"SpectralClustering","title":"SpectralClustering","text":"SpectralClustering","category":"page"},{"location":"models/SpectralClustering_MLJScikitLearnInterface/","page":"SpectralClustering","title":"SpectralClustering","text":"A model type for constructing a spectral clustering, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/SpectralClustering_MLJScikitLearnInterface/","page":"SpectralClustering","title":"SpectralClustering","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/SpectralClustering_MLJScikitLearnInterface/","page":"SpectralClustering","title":"SpectralClustering","text":"SpectralClustering = @load SpectralClustering pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/SpectralClustering_MLJScikitLearnInterface/","page":"SpectralClustering","title":"SpectralClustering","text":"Do model = SpectralClustering() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in SpectralClustering(n_clusters=...).","category":"page"},{"location":"models/SpectralClustering_MLJScikitLearnInterface/","page":"SpectralClustering","title":"SpectralClustering","text":"Apply clustering to a projection of the normalized Laplacian. In practice spectral clustering is very useful when the structure of the individual clusters is highly non-convex or more generally when a measure of the center and spread of the cluster is not a suitable description of the complete cluster. For instance when clusters are nested circles on the 2D plane.","category":"page"},{"location":"models/ElasticNetRegressor_MLJLinearModels/#ElasticNetRegressor_MLJLinearModels","page":"ElasticNetRegressor","title":"ElasticNetRegressor","text":"","category":"section"},{"location":"models/ElasticNetRegressor_MLJLinearModels/","page":"ElasticNetRegressor","title":"ElasticNetRegressor","text":"ElasticNetRegressor","category":"page"},{"location":"models/ElasticNetRegressor_MLJLinearModels/","page":"ElasticNetRegressor","title":"ElasticNetRegressor","text":"A model type for constructing a elastic net regressor, based on MLJLinearModels.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/ElasticNetRegressor_MLJLinearModels/","page":"ElasticNetRegressor","title":"ElasticNetRegressor","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/ElasticNetRegressor_MLJLinearModels/","page":"ElasticNetRegressor","title":"ElasticNetRegressor","text":"ElasticNetRegressor = @load ElasticNetRegressor pkg=MLJLinearModels","category":"page"},{"location":"models/ElasticNetRegressor_MLJLinearModels/","page":"ElasticNetRegressor","title":"ElasticNetRegressor","text":"Do model = ElasticNetRegressor() to construct an instance with default hyper-parameters.","category":"page"},{"location":"models/ElasticNetRegressor_MLJLinearModels/","page":"ElasticNetRegressor","title":"ElasticNetRegressor","text":"Elastic net is a linear model with objective function","category":"page"},{"location":"models/ElasticNetRegressor_MLJLinearModels/","page":"ElasticNetRegressor","title":"ElasticNetRegressor","text":"$","category":"page"},{"location":"models/ElasticNetRegressor_MLJLinearModels/","page":"ElasticNetRegressor","title":"ElasticNetRegressor","text":"|Xθ - y|₂²/2 + n⋅λ|θ|₂²/2 + n⋅γ|θ|₁ $","category":"page"},{"location":"models/ElasticNetRegressor_MLJLinearModels/","page":"ElasticNetRegressor","title":"ElasticNetRegressor","text":"where n is the number of observations.","category":"page"},{"location":"models/ElasticNetRegressor_MLJLinearModels/","page":"ElasticNetRegressor","title":"ElasticNetRegressor","text":"If scale_penalty_with_samples = false the objective function is instead","category":"page"},{"location":"models/ElasticNetRegressor_MLJLinearModels/","page":"ElasticNetRegressor","title":"ElasticNetRegressor","text":"$","category":"page"},{"location":"models/ElasticNetRegressor_MLJLinearModels/","page":"ElasticNetRegressor","title":"ElasticNetRegressor","text":"|Xθ - y|₂²/2 + λ|θ|₂²/2 + γ|θ|₁ $","category":"page"},{"location":"models/ElasticNetRegressor_MLJLinearModels/","page":"ElasticNetRegressor","title":"ElasticNetRegressor","text":".","category":"page"},{"location":"models/ElasticNetRegressor_MLJLinearModels/","page":"ElasticNetRegressor","title":"ElasticNetRegressor","text":"Different solver options exist, as indicated under \"Hyperparameters\" below. ","category":"page"},{"location":"models/ElasticNetRegressor_MLJLinearModels/#Training-data","page":"ElasticNetRegressor","title":"Training data","text":"","category":"section"},{"location":"models/ElasticNetRegressor_MLJLinearModels/","page":"ElasticNetRegressor","title":"ElasticNetRegressor","text":"In MLJ or MLJBase, bind an instance model to data with","category":"page"},{"location":"models/ElasticNetRegressor_MLJLinearModels/","page":"ElasticNetRegressor","title":"ElasticNetRegressor","text":"mach = machine(model, X, y)","category":"page"},{"location":"models/ElasticNetRegressor_MLJLinearModels/","page":"ElasticNetRegressor","title":"ElasticNetRegressor","text":"where:","category":"page"},{"location":"models/ElasticNetRegressor_MLJLinearModels/","page":"ElasticNetRegressor","title":"ElasticNetRegressor","text":"X is any table of input features (eg, a DataFrame) whose columns have Continuous scitype; check column scitypes with schema(X)\ny is the target, which can be any AbstractVector whose element scitype is Continuous; check the scitype with scitype(y)","category":"page"},{"location":"models/ElasticNetRegressor_MLJLinearModels/","page":"ElasticNetRegressor","title":"ElasticNetRegressor","text":"Train the machine using fit!(mach, rows=...).","category":"page"},{"location":"models/ElasticNetRegressor_MLJLinearModels/#Hyperparameters","page":"ElasticNetRegressor","title":"Hyperparameters","text":"","category":"section"},{"location":"models/ElasticNetRegressor_MLJLinearModels/","page":"ElasticNetRegressor","title":"ElasticNetRegressor","text":"lambda::Real: strength of the L2 regularization. Default: 1.0\ngamma::Real: strength of the L1 regularization. Default: 0.0\nfit_intercept::Bool: whether to fit the intercept or not. Default: true\npenalize_intercept::Bool: whether to penalize the intercept. Default: false\nscale_penalty_with_samples::Bool: whether to scale the penalty with the number of observations. Default: true\nsolver::Union{Nothing, MLJLinearModels.Solver}: any instance of MLJLinearModels.ProxGrad.\nIf solver=nothing (default) then ProxGrad(accel=true) (FISTA) is used.\nSolver aliases: FISTA(; kwargs...) = ProxGrad(accel=true, kwargs...), ISTA(; kwargs...) = ProxGrad(accel=false, kwargs...). Default: nothing","category":"page"},{"location":"models/ElasticNetRegressor_MLJLinearModels/#Example","page":"ElasticNetRegressor","title":"Example","text":"","category":"section"},{"location":"models/ElasticNetRegressor_MLJLinearModels/","page":"ElasticNetRegressor","title":"ElasticNetRegressor","text":"using MLJ\nX, y = make_regression()\nmach = fit!(machine(ElasticNetRegressor(), X, y))\npredict(mach, X)\nfitted_params(mach)","category":"page"},{"location":"models/ElasticNetRegressor_MLJLinearModels/","page":"ElasticNetRegressor","title":"ElasticNetRegressor","text":"See also LassoRegressor.","category":"page"},{"location":"models/KMeans_Clustering/#KMeans_Clustering","page":"KMeans","title":"KMeans","text":"","category":"section"},{"location":"models/KMeans_Clustering/","page":"KMeans","title":"KMeans","text":"KMeans","category":"page"},{"location":"models/KMeans_Clustering/","page":"KMeans","title":"KMeans","text":"A model type for constructing a K-means clusterer, based on Clustering.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/KMeans_Clustering/","page":"KMeans","title":"KMeans","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/KMeans_Clustering/","page":"KMeans","title":"KMeans","text":"KMeans = @load KMeans pkg=Clustering","category":"page"},{"location":"models/KMeans_Clustering/","page":"KMeans","title":"KMeans","text":"Do model = KMeans() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in KMeans(k=...).","category":"page"},{"location":"models/KMeans_Clustering/","page":"KMeans","title":"KMeans","text":"K-means is a classical method for clustering or vector quantization. It produces a fixed number of clusters, each associated with a center (also known as a prototype), and each data point is assigned to a cluster with the nearest center.","category":"page"},{"location":"models/KMeans_Clustering/","page":"KMeans","title":"KMeans","text":"From a mathematical standpoint, K-means is a coordinate descent algorithm that solves the following optimization problem:","category":"page"},{"location":"models/KMeans_Clustering/","page":"KMeans","title":"KMeans","text":":$","category":"page"},{"location":"models/KMeans_Clustering/","page":"KMeans","title":"KMeans","text":"\\text{minimize} \\ \\sum{i=1}^n \\| \\mathbf{x}i - \\boldsymbol{\\mu}{zi} \\|^2 \\ \\text{w.r.t.} \\ (\\boldsymbol{\\mu}, z) :$","category":"page"},{"location":"models/KMeans_Clustering/","page":"KMeans","title":"KMeans","text":"Here, boldsymbolmu_k is the center of the k-th cluster, and z_i is an index of the cluster for i-th point mathbfx_i.","category":"page"},{"location":"models/KMeans_Clustering/#Training-data","page":"KMeans","title":"Training data","text":"","category":"section"},{"location":"models/KMeans_Clustering/","page":"KMeans","title":"KMeans","text":"In MLJ or MLJBase, bind an instance model to data with","category":"page"},{"location":"models/KMeans_Clustering/","page":"KMeans","title":"KMeans","text":"mach = machine(model, X)","category":"page"},{"location":"models/KMeans_Clustering/","page":"KMeans","title":"KMeans","text":"Here:","category":"page"},{"location":"models/KMeans_Clustering/","page":"KMeans","title":"KMeans","text":"X is any table of input features (eg, a DataFrame) whose columns are of scitype Continuous; check column scitypes with schema(X).","category":"page"},{"location":"models/KMeans_Clustering/","page":"KMeans","title":"KMeans","text":"Train the machine using fit!(mach, rows=...).","category":"page"},{"location":"models/KMeans_Clustering/#Hyper-parameters","page":"KMeans","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/KMeans_Clustering/","page":"KMeans","title":"KMeans","text":"k=3: The number of centroids to use in clustering.\nmetric::SemiMetric=Distances.SqEuclidean: The metric used to calculate the clustering. Must have type PreMetric from Distances.jl.\ninit = :kmpp: One of the following options to indicate how cluster seeds should be initialized:\n:kmpp: KMeans++\n:kmenc: K-medoids initialization based on centrality\n:rand: random\nan instance of Clustering.SeedingAlgorithm from Clustering.jl\nan integer vector of length k that provides the indices of points to use as initial cluster centers.\nSee documentation of Clustering.jl.","category":"page"},{"location":"models/KMeans_Clustering/#Operations","page":"KMeans","title":"Operations","text":"","category":"section"},{"location":"models/KMeans_Clustering/","page":"KMeans","title":"KMeans","text":"predict(mach, Xnew): return cluster label assignments, given new features Xnew having the same Scitype as X above.\ntransform(mach, Xnew): instead return the mean pairwise distances from new samples to the cluster centers.","category":"page"},{"location":"models/KMeans_Clustering/#Fitted-parameters","page":"KMeans","title":"Fitted parameters","text":"","category":"section"},{"location":"models/KMeans_Clustering/","page":"KMeans","title":"KMeans","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/KMeans_Clustering/","page":"KMeans","title":"KMeans","text":"centers: The coordinates of the cluster centers.","category":"page"},{"location":"models/KMeans_Clustering/#Report","page":"KMeans","title":"Report","text":"","category":"section"},{"location":"models/KMeans_Clustering/","page":"KMeans","title":"KMeans","text":"The fields of report(mach) are:","category":"page"},{"location":"models/KMeans_Clustering/","page":"KMeans","title":"KMeans","text":"assignments: The cluster assignments of each point in the training data.\ncluster_labels: The labels assigned to each cluster.","category":"page"},{"location":"models/KMeans_Clustering/#Examples","page":"KMeans","title":"Examples","text":"","category":"section"},{"location":"models/KMeans_Clustering/","page":"KMeans","title":"KMeans","text":"using MLJ\nKMeans = @load KMeans pkg=Clustering\n\ntable = load_iris()\ny, X = unpack(table, ==(:target), rng=123)\nmodel = KMeans(k=3)\nmach = machine(model, X) |> fit!\n\nyhat = predict(mach, X)\n@assert yhat == report(mach).assignments\n\ncompare = zip(yhat, y) |> collect;\ncompare[1:8] ## clusters align with classes\n\ncenter_dists = transform(mach, fitted_params(mach).centers')\n\n@assert center_dists[1][1] == 0.0\n@assert center_dists[2][2] == 0.0\n@assert center_dists[3][3] == 0.0","category":"page"},{"location":"models/KMeans_Clustering/","page":"KMeans","title":"KMeans","text":"See also KMedoids","category":"page"},{"location":"models/PassiveAggressiveClassifier_MLJScikitLearnInterface/#PassiveAggressiveClassifier_MLJScikitLearnInterface","page":"PassiveAggressiveClassifier","title":"PassiveAggressiveClassifier","text":"","category":"section"},{"location":"models/PassiveAggressiveClassifier_MLJScikitLearnInterface/","page":"PassiveAggressiveClassifier","title":"PassiveAggressiveClassifier","text":"PassiveAggressiveClassifier","category":"page"},{"location":"models/PassiveAggressiveClassifier_MLJScikitLearnInterface/","page":"PassiveAggressiveClassifier","title":"PassiveAggressiveClassifier","text":"A model type for constructing a passive aggressive classifier, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/PassiveAggressiveClassifier_MLJScikitLearnInterface/","page":"PassiveAggressiveClassifier","title":"PassiveAggressiveClassifier","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/PassiveAggressiveClassifier_MLJScikitLearnInterface/","page":"PassiveAggressiveClassifier","title":"PassiveAggressiveClassifier","text":"PassiveAggressiveClassifier = @load PassiveAggressiveClassifier pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/PassiveAggressiveClassifier_MLJScikitLearnInterface/","page":"PassiveAggressiveClassifier","title":"PassiveAggressiveClassifier","text":"Do model = PassiveAggressiveClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in PassiveAggressiveClassifier(C=...).","category":"page"},{"location":"models/PassiveAggressiveClassifier_MLJScikitLearnInterface/#Hyper-parameters","page":"PassiveAggressiveClassifier","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/PassiveAggressiveClassifier_MLJScikitLearnInterface/","page":"PassiveAggressiveClassifier","title":"PassiveAggressiveClassifier","text":"C = 1.0\nfit_intercept = true\nmax_iter = 100\ntol = 0.001\nearly_stopping = false\nvalidation_fraction = 0.1\nn_iter_no_change = 5\nshuffle = true\nverbose = 0\nloss = hinge\nn_jobs = nothing\nrandom_state = 0\nwarm_start = false\nclass_weight = nothing\naverage = false","category":"page"},{"location":"tuning_models/#Tuning-Models","page":"Tuning Models","title":"Tuning Models","text":"","category":"section"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"MLJ provides several built-in and third-party options for optimizing a model's hyper-parameters. The quick-reference table below omits some advanced keyword options.","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"tuning strategy notes package to import package providing the core algorithm\nGrid(goal=nothing, resolution=10) shuffled by default; goal is upper bound for number of grid points MLJ.jl or MLJTuning.jl MLJTuning.jl\nRandomSearch(rng=GLOBAL_RNG) with customizable priors MLJ.jl or MLJTuning.jl MLJTuning.jl\nLatinHypercube(rng=GLOBAL_RNG) with discrete parameter support MLJ.jl or MLJTuning.jl LatinHypercubeSampling\nMLJTreeParzenTuning() See this example for usage TreeParzen.jl TreeParzen.jl (port to Julia of hyperopt)\nParticleSwarm(n_particles=3, rng=GLOBAL_RNG) Standard Kennedy-Eberhart algorithm, plus discrete parameter support MLJParticleSwarmOptimization.jl MLJParticleSwarmOptimization.jl\nAdaptiveParticleSwarm(n_particles=3, rng=GLOBAL_RNG) Zhan et al. variant with automated swarm coefficient updates, plus discrete parameter support MLJParticleSwarmOptimization.jl MLJParticleSwarmOptimization.jl\nExplicit() For an explicit list of models of varying type MLJ.jl or MLJTuning.jl MLJTuning.jl","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"Below we illustrate hyperparameter optimization using the Grid, RandomSearch, LatinHypercube and Explicit tuning strategies.","category":"page"},{"location":"tuning_models/#Overview","page":"Tuning Models","title":"Overview","text":"","category":"section"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"In MLJ model tuning is implemented as a model wrapper. After wrapping a model in a tuning strategy and binding the wrapped model to data in a machine called mach, calling fit!(mach) instigates a search for optimal model hyperparameters, within a specified range, and then uses all supplied data to train the best model. To predict using that model, one then calls predict(mach, Xnew). In this way, the wrapped model may be viewed as a \"self-tuning\" version of the unwrapped model. That is, wrapping the model simply transforms certain hyper-parameters into learned parameters.","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"A corollary of the tuning-as-wrapper approach is that the evaluation of the performance of a TunedModel instance using evaluate! implies nested resampling. This approach is inspired by MLR. See also below.","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"In MLJ, tuning is an iterative procedure, with an iteration parameter n, the total number of model instances to be evaluated. Accordingly, tuning can be controlled using MLJ's IteratedModel wrapper. After familiarizing oneself with the TunedModel wrapper described below, see Controlling model tuning for more on this advanced feature.","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"For a more in-depth overview of tuning in MLJ, or for implementation details, see the MLJTuning documentation. For a complete list of options see the TunedModel doc-string below.","category":"page"},{"location":"tuning_models/#Tuning-a-single-hyperparameter-using-a-grid-search-(regression-example)","page":"Tuning Models","title":"Tuning a single hyperparameter using a grid search (regression example)","text":"","category":"section"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"using MLJ\nX = MLJ.table(rand(100, 10));\ny = 2X.x1 - X.x2 + 0.05*rand(100);\nTree = @load DecisionTreeRegressor pkg=DecisionTree verbosity=0;\ntree = Tree()","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"Let's tune min_purity_increase in the model above, using a grid-search. To do so we will use the simplest range object, a one-dimensional range object constructed using the range method:","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"r = range(tree, :min_purity_increase, lower=0.001, upper=1.0, scale=:log);\nself_tuning_tree = TunedModel(\n model=tree,\n resampling=CV(nfolds=3),\n tuning=Grid(resolution=10),\n range=r,\n measure=rms\n);","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"Incidentally, a grid is generated internally \"over the range\" by calling the iterator method with an appropriate resolution:","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"iterator(r, 5)","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"Non-numeric hyperparameters are handled a little differently:","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"selector = FeatureSelector();\nr2 = range(selector, :features, values = [[:x1,], [:x1, :x2]]);\niterator(r2)","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"Unbounded ranges are also permitted. See the range and iterator docstrings below for details, and the sampler docstring for generating random samples from one-dimensional ranges (used internally by the RandomSearch strategy).","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"Returning to the wrapped tree model:","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"mach = machine(self_tuning_tree, X, y);\nfit!(mach, verbosity=0)","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"We can inspect the detailed results of the grid search with report(mach) or just retrieve the optimal model, as here:","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"fitted_params(mach).best_model","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"For more detailed information, we can look at report(mach), for example:","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"entry = report(mach).best_history_entry","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"Predicting on new input observations using the optimal model, trained on all the data bound to mach:","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"Xnew = MLJ.table(rand(3, 10));\npredict(mach, Xnew)","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"Or predicting on some subset of the observations bound to mach:","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"test = 1:3\npredict(mach, rows=test)","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"For tuning using only a subset train of all observation indices, specify rows=train in the above fit! call. In that case, the above predict calls would be based on training the optimal model on all train rows.","category":"page"},{"location":"tuning_models/#A-probabilistic-classifier-example","page":"Tuning Models","title":"A probabilistic classifier example","text":"","category":"section"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"Tuning a classifier is not essentially different from tuning a regressor. A common gotcha however is to overlook the distinction between supervised models that make point predictions (subtypes of Deterministic) and those that make probabilistic predictions (subtypes of Probabilistic). The DecisionTreeRegressor model in the preceding illustration was deterministic, so this example will consider a probabilistic classifier:","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"info(\"KNNClassifier\").prediction_type","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"X, y = @load_iris\nKNN = @load KNNClassifier verbosity=0\nknn = KNN()","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"We'll tune the hyperparameter K in the model above, using a grid-search once more:","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"K_range = range(knn, :K, lower=5, upper=20);","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"Since the model is probabilistic, we can choose either: (i) a probabilistic measure, such as brier_loss; or (ii) use a deterministic measure, such as misclassification_rate (which means predict_mean is called instead of predict under the hood).","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"Case (i) - probabilistic measure:","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"self_tuning_knn = TunedModel(\n model=knn,\n resampling = CV(nfolds=4, rng=1234),\n tuning = Grid(resolution=5),\n range = K_range,\n measure=BrierLoss()\n);\n\nmach = machine(self_tuning_knn, X, y);\nfit!(mach, verbosity=0);","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"Case (ii) - deterministic measure:","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"self_tuning_knn = TunedModel(\n model=knn,\n resampling = CV(nfolds=4, rng=1234),\n tuning = Grid(resolution=5),\n range = K_range,\n measure=MisclassificationRate()\n)\n\nmach = machine(self_tuning_knn, X, y);\nfit!(mach, verbosity=0);","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"Let's inspect the best model and corresponding evaluation of the metric in case (ii):","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"entry = report(mach).best_history_entry","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"entry.model.K","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"Recall that fitting mach also retrains the optimal model on all available data. The following is therefore an optimal model prediction based on all available data:","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"predict(mach, rows=148:150)","category":"page"},{"location":"tuning_models/#Specifying-a-custom-measure","page":"Tuning Models","title":"Specifying a custom measure","text":"","category":"section"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"Users may specify a custom loss or scoring function, so long as it complies with the StatisticalMeasuresBase.jl API and implements the appropriate orientation trait (Score() or Loss()) from that package. For example, we suppose define a \"new\" scoring function custom_accuracy by","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"custom_accuracy(yhat, y) = mean(y .== yhat); # yhat - prediction, y - ground truth","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"In tuning, scores are maximised, while losses are minimised. So here we declare","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"import StatisticalMeasuresBase as SMB\nSMB.orientation(::typeof(custom_accuracy)) = SMB.Score()","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"For full details on constructing custom measures, see StatisticalMeasuresBase.jl.","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"self_tuning_knn = TunedModel(\n model=knn,\n resampling = CV(nfolds=4),\n tuning = Grid(resolution=5),\n range = K_range,\n measure = [custom_accuracy, MulticlassFScore()],\n operation = predict_mode\n);\n\nmach = machine(self_tuning_knn, X, y)\nfit!(mach, verbosity=0)\nentry = report(mach).best_history_entry","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"entry.model.K","category":"page"},{"location":"tuning_models/#Tuning-multiple-nested-hyperparameters","page":"Tuning Models","title":"Tuning multiple nested hyperparameters","text":"","category":"section"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"The forest model below has another model, namely a DecisionTreeRegressor, as a hyperparameter:","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"tree = Tree() # defined above\nforest = EnsembleModel(model=tree)","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"Ranges for nested hyperparameters are specified using dot syntax. In this case, we will specify a goal for the total number of grid points:","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"r1 = range(forest, :(model.n_subfeatures), lower=1, upper=9);\nr2 = range(forest, :bagging_fraction, lower=0.4, upper=1.0);\nself_tuning_forest = TunedModel(\n model=forest,\n tuning=Grid(goal=30),\n resampling=CV(nfolds=6),\n range=[r1, r2],\n measure=rms);\n\nX = MLJ.table(rand(100, 10));\ny = 2X.x1 - X.x2 + 0.05*rand(100);\n\nmach = machine(self_tuning_forest, X, y);\nfit!(mach, verbosity=0);","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"We can plot the grid search results:","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"using Plots\nplot(mach)","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"(Image: )","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"Instead of specifying a goal, we can declare a global resolution, which is overridden for a particular parameter by pairing its range with the resolution desired. In the next example, the default resolution=100 is applied to the r2 field, but a resolution of 3 is applied to the r1 field. Additionally, we ask that the grid points be randomly traversed and the total number of evaluations be limited to 25.","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"tuning = Grid(resolution=100, shuffle=true, rng=1234)\nself_tuning_forest = TunedModel(\n model=forest,\n tuning=tuning,\n resampling=CV(nfolds=6),\n range=[(r1, 3), r2],\n measure=rms,\n n=25\n);\nfit!(machine(self_tuning_forest, X, y), verbosity=0);","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"For more options for a grid search, see Grid below.","category":"page"},{"location":"tuning_models/#Tuning-using-a-random-search","page":"Tuning Models","title":"Tuning using a random search","text":"","category":"section"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"Let's attempt to tune the same hyperparameters using a RandomSearch tuning strategy. By default, bounded numeric ranges like r1 and r2 are sampled uniformly (before rounding, in the case of the integer range r1). Positive unbounded ranges are sampled using a Gamma distribution by default, and all others using a (truncated) normal distribution.","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"self_tuning_forest = TunedModel(\n model=forest,\n tuning=RandomSearch(),\n resampling=CV(nfolds=6),\n range=[r1, r2],\n measure=rms,\n n=25\n);\nX = MLJ.table(rand(100, 10));\ny = 2X.x1 - X.x2 + 0.05*rand(100);\nmach = machine(self_tuning_forest, X, y);\nfit!(mach, verbosity=0)","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"using Plots\nplot(mach)","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"(Image: )","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"The prior distributions used for sampling each hyperparameter can be customized, as can the global fallbacks. See the RandomSearch doc-string below for details.","category":"page"},{"location":"tuning_models/#Tuning-using-Latin-hypercube-sampling","page":"Tuning Models","title":"Tuning using Latin hypercube sampling","text":"","category":"section"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"One can also tune the hyperparameters using the LatinHypercube tuning strategy. This method uses a genetic-based optimization algorithm based on the inverse of the Audze-Eglais function, using the library LatinHypercubeSampling.jl.","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"We'll work with the data X, y and ranges r1 and r2 defined above and instantiate a Latin hypercube resampling strategy:","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"latin = LatinHypercube(gens=2, popsize=120)","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"Here gens is the number of generations to run the optimisation for and popsize is the population size in the genetic algorithm. For more on these and other LatinHypercube parameters refer to the LatinHypercubeSampling.jl documentation. Pay attention that gens and popsize are not to be confused with the iteration parameter n in the construction of a corresponding TunedModel instance, which specifies the total number of models to be evaluated, independent of the tuning strategy.","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"For this illustration we'll add a third, nominal, hyper-parameter:","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"r3 = range(forest, :(model.post_prune), values=[true, false]);\nself_tuning_forest = TunedModel(\n model=forest,\n tuning=latin,\n resampling=CV(nfolds=6),\n range=[r1, r2, r3],\n measure=rms,\n n=25\n);\nmach = machine(self_tuning_forest, X, y);\nfit!(mach, verbosity=0)","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"using Plots\nplot(mach)","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"(Image: )","category":"page"},{"location":"tuning_models/#explicit","page":"Tuning Models","title":"Comparing models of different type and nested cross-validation","text":"","category":"section"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"Instead of mutating hyperparameters of a fixed model, one can instead optimise over an explicit list of models, whose types are allowed to vary. As with other tuning strategies, evaluating the resulting TunedModel itself implies nested resampling (e.g., nested cross-validation) which we now examine in a bit more detail.","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"tree = (@load DecisionTreeClassifier pkg=DecisionTree verbosity=0)()\nknn = (@load KNNClassifier pkg=NearestNeighborModels verbosity=0)()\nmodels = [tree, knn]\nnothing # hide","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"The following model is equivalent to the best in models by using 3-fold cross-validation:","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"multi_model = TunedModel(\n models=models,\n resampling=CV(nfolds=3),\n measure=log_loss,\n check_measure=false\n)\nnothing # hide","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"Note that there is no need to specify a tuning strategy or range but we do specify models (plural) instead of model. Evaluating multi_model implies nested cross-validation (each model gets evaluated 2 x 3 times):","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"X, y = make_blobs()\n\ne = evaluate(multi_model, X, y, resampling=CV(nfolds=2), measure=log_loss, verbosity=6)","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"Now, for example, we can get the best model for the first fold out of the two folds:","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"e.report_per_fold[1].best_model","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"And the losses in the outer loop (these still have to be matched to the best performing model):","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"e.per_fold","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"It is also possible to get the results for the nested evaluations. For example, for the first fold of the outer loop and the second model:","category":"page"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"e.report_per_fold[2].history[1]","category":"page"},{"location":"tuning_models/#Reference","page":"Tuning Models","title":"Reference","text":"","category":"section"},{"location":"tuning_models/","page":"Tuning Models","title":"Tuning Models","text":"MLJBase.range\nMLJBase.iterator\nMLJBase.sampler\nDistributions.fit(::Type{D}, ::MLJBase.NumericRange) where D<:Distributions.Distribution\nMLJTuning.TunedModel\nMLJTuning.Grid\nMLJTuning.RandomSearch\nMLJTuning.LatinHypercube","category":"page"},{"location":"tuning_models/#Base.range","page":"Tuning Models","title":"Base.range","text":"r = range(model, :hyper; values=nothing)\n\nDefine a one-dimensional NominalRange object for a field hyper of model. Note that r is not directly iterable but iterator(r) is.\n\nA nested hyperparameter is specified using dot notation. For example, :(atom.max_depth) specifies the max_depth hyperparameter of the submodel model.atom.\n\nr = range(model, :hyper; upper=nothing, lower=nothing,\n scale=nothing, values=nothing)\n\nAssuming values is not specified, define a one-dimensional NumericRange object for a Real field hyper of model. Note that r is not directly iteratable but iterator(r, n)is an iterator of length n. To generate random elements from r, instead apply rand methods to sampler(r). The supported scales are :linear,:log, :logminus, :log10, :log10minus, :log2, or a callable object.\n\nNote that r is not directly iterable, but iterator(r, n) is, for given resolution (length) n.\n\nBy default, the behaviour of the constructed object depends on the type of the value of the hyperparameter :hyper at model at the time of construction. To override this behaviour (for instance if model is not available) specify a type in place of model so the behaviour is determined by the value of the specified type.\n\nA nested hyperparameter is specified using dot notation (see above).\n\nIf scale is unspecified, it is set to :linear, :log, :log10minus, or :linear, according to whether the interval (lower, upper) is bounded, right-unbounded, left-unbounded, or doubly unbounded, respectively. Note upper=Inf and lower=-Inf are allowed.\n\nIf values is specified, the other keyword arguments are ignored and a NominalRange object is returned (see above).\n\nSee also: iterator, sampler\n\n\n\n\n\n","category":"function"},{"location":"tuning_models/#MLJBase.iterator","page":"Tuning Models","title":"MLJBase.iterator","text":"iterator([rng, ], r::NominalRange, [,n])\niterator([rng, ], r::NumericRange, n)\n\nReturn an iterator (currently a vector) for a ParamRange object r. In the first case iteration is over all values stored in the range (or just the first n, if n is specified). In the second case, the iteration is over approximately n ordered values, generated as follows:\n\n(i) First, exactly n values are generated between U and L, with a spacing determined by r.scale (uniform if scale=:linear) where U and L are given by the following table:\n\nr.lower r.upper L U\nfinite finite r.lower r.upper\n-Inf finite r.upper - 2r.unit r.upper\nfinite Inf r.lower r.lower + 2r.unit\n-Inf Inf r.origin - r.unit r.origin + r.unit\n\n(ii) If a callable f is provided as scale, then a uniform spacing is always applied in (i) but f is broadcast over the results. (Unlike ordinary scales, this alters the effective range of values generated, instead of just altering the spacing.)\n\n(iii) If r is a discrete numeric range (r isa NumericRange{<:Integer}) then the values are additionally rounded, with any duplicate values removed. Otherwise all the values are used (and there are exacltly n of them).\n\n(iv) Finally, if a random number generator rng is specified, then the values are returned in random order (sampling without replacement), and otherwise they are returned in numeric order, or in the order provided to the range constructor, in the case of a NominalRange.\n\n\n\n\n\n","category":"function"},{"location":"tuning_models/#Distributions.sampler","page":"Tuning Models","title":"Distributions.sampler","text":"sampler(r::NominalRange, probs::AbstractVector{<:Real})\nsampler(r::NominalRange)\nsampler(r::NumericRange{T}, d)\n\nConstruct an object s which can be used to generate random samples from a ParamRange object r (a one-dimensional range) using one of the following calls:\n\nrand(s) # for one sample\nrand(s, n) # for n samples\nrand(rng, s [, n]) # to specify an RNG\n\nThe argument probs can be any probability vector with the same length as r.values. The second sampler method above calls the first with a uniform probs vector.\n\nThe argument d can be either an arbitrary instance of UnivariateDistribution from the Distributions.jl package, or one of a Distributions.jl types for which fit(d, ::NumericRange) is defined. These include: Arcsine, Uniform, Biweight, Cosine, Epanechnikov, SymTriangularDist, Triweight, Normal, Gamma, InverseGaussian, Logistic, LogNormal, Cauchy, Gumbel, Laplace, and Poisson; but see the doc-string for Distributions.fit for an up-to-date list.\n\nIf d is an instance, then sampling is from a truncated form of the supplied distribution d, the truncation bounds being r.lower and r.upper (the attributes r.origin and r.unit attributes are ignored). For discrete numeric ranges (T <: Integer) the samples are rounded.\n\nIf d is a type then a suitably truncated distribution is automatically generated using Distributions.fit(d, r).\n\nImportant. Values are generated with no regard to r.scale, except in the special case r.scale is a callable object f. In that case, f is applied to all values generated by rand as described above (prior to rounding, in the case of discrete numeric ranges).\n\nExamples\n\nr = range(Char, :letter, values=collect(\"abc\"))\ns = sampler(r, [0.1, 0.2, 0.7])\nsamples = rand(s, 1000);\nStatsBase.countmap(samples)\nDict{Char,Int64} with 3 entries:\n 'a' => 107\n 'b' => 205\n 'c' => 688\n\nr = range(Int, :k, lower=2, upper=6) # numeric but discrete\ns = sampler(r, Normal)\nsamples = rand(s, 1000);\nUnicodePlots.histogram(samples)\n ┌ ┐\n[2.0, 2.5) ┤▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 119\n[2.5, 3.0) ┤ 0\n[3.0, 3.5) ┤▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 296\n[3.5, 4.0) ┤ 0\n[4.0, 4.5) ┤▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 275\n[4.5, 5.0) ┤ 0\n[5.0, 5.5) ┤▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 221\n[5.5, 6.0) ┤ 0\n[6.0, 6.5) ┤▇▇▇▇▇▇▇▇▇▇▇ 89\n └ ┘\n\n\n\n\n\n","category":"function"},{"location":"tuning_models/#StatsAPI.fit-Union{Tuple{D}, Tuple{Type{D}, NumericRange}} where D<:Distributions.Distribution","page":"Tuning Models","title":"StatsAPI.fit","text":"Distributions.fit(D, r::MLJBase.NumericRange)\n\nFit and return a distribution d of type D to the one-dimensional range r.\n\nOnly types D in the table below are supported.\n\nThe distribution d is constructed in two stages. First, a distributon d0, characterized by the conditions in the second column of the table, is fit to r. Then d0 is truncated between r.lower and r.upper to obtain d.\n\nDistribution type D Characterization of d0\nArcsine, Uniform, Biweight, Cosine, Epanechnikov, SymTriangularDist, Triweight minimum(d) = r.lower, maximum(d) = r.upper\nNormal, Gamma, InverseGaussian, Logistic, LogNormal mean(d) = r.origin, std(d) = r.unit\nCauchy, Gumbel, Laplace, (Normal) Dist.location(d) = r.origin, Dist.scale(d) = r.unit\nPoisson Dist.mean(d) = r.unit\n\nHere Dist = Distributions.\n\n\n\n\n\n","category":"method"},{"location":"tuning_models/#MLJTuning.TunedModel","page":"Tuning Models","title":"MLJTuning.TunedModel","text":"tuned_model = TunedModel(; model=,\n tuning=RandomSearch(),\n resampling=Holdout(),\n range=nothing,\n measure=nothing,\n n=default_n(tuning, range),\n operation=nothing,\n other_options...)\n\nConstruct a model wrapper for hyper-parameter optimization of a supervised learner, specifying the tuning strategy and model whose hyper-parameters are to be mutated.\n\ntuned_model = TunedModel(; models=,\n resampling=Holdout(),\n measure=nothing,\n n=length(models),\n operation=nothing,\n other_options...)\n\nConstruct a wrapper for multiple models, for selection of an optimal one (equivalent to specifying tuning=Explicit() and range=models above). Elements of the iterator models need not have a common type, but they must all be Deterministic or all be Probabilistic and this is not checked but inferred from the first element generated.\n\nSee below for a complete list of options.\n\nTraining\n\nCalling fit!(mach) on a machine mach=machine(tuned_model, X, y) or mach=machine(tuned_model, X, y, w) will:\n\nInstigate a search, over clones of model, with the hyperparameter mutations specified by range, for a model optimizing the specified measure, using performance evaluations carried out using the specified tuning strategy and resampling strategy. In the case models is explictly listed, the search is instead over the models generated by the iterator models.\nFit an internal machine, based on the optimal model fitted_params(mach).best_model, wrapping the optimal model object in all the provided data X, y(, w). Calling predict(mach, Xnew) then returns predictions on Xnew of this internal machine. The final train can be supressed by setting train_best=false.\n\nSearch space\n\nThe range objects supported depend on the tuning strategy specified. Query the strategy docstring for details. To optimize over an explicit list v of models of the same type, use strategy=Explicit() and specify model=v[1] and range=v.\n\nThe number of models searched is specified by n. If unspecified, then MLJTuning.default_n(tuning, range) is used. When n is increased and fit!(mach) called again, the old search history is re-instated and the search continues where it left off.\n\nMeasures (metrics)\n\nIf more than one measure is specified, then only the first is optimized (unless strategy is multi-objective) but the performance against every measure specified will be computed and reported in report(mach).best_performance and other relevant attributes of the generated report. Options exist to pass per-observation weights or class weights to measures; see below.\n\nImportant. If a custom measure, my_measure is used, and the measure is a score, rather than a loss, be sure to check that MLJ.orientation(my_measure) == :score to ensure maximization of the measure, rather than minimization. Override an incorrect value with MLJ.orientation(::typeof(my_measure)) = :score.\n\nAccessing the fitted parameters and other training (tuning) outcomes\n\nA Plots.jl plot of performance estimates is returned by plot(mach) or heatmap(mach).\n\nOnce a tuning machine mach has bee trained as above, then fitted_params(mach) has these keys/values:\n\nkey value\nbest_model optimal model instance\nbest_fitted_params learned parameters of the optimal model\n\nThe named tuple report(mach) includes these keys/values:\n\nkey value\nbest_model optimal model instance\nbest_history_entry corresponding entry in the history, including performance estimate\nbest_report report generated by fitting the optimal model to all data\nhistory tuning strategy-specific history of all evaluations\n\nplus other key/value pairs specific to the tuning strategy.\n\nComplete list of key-word options\n\nmodel: Supervised model prototype that is cloned and mutated to generate models for evaluation\nmodels: Alternatively, an iterator of MLJ models to be explicitly evaluated. These may have varying types.\ntuning=RandomSearch(): tuning strategy to be applied (eg, Grid()). See the Tuning Models section of the MLJ manual for a complete list of options.\nresampling=Holdout(): resampling strategy (eg, Holdout(), CV()), StratifiedCV()) to be applied in performance evaluations\nmeasure: measure or measures to be applied in performance evaluations; only the first used in optimization (unless the strategy is multi-objective) but all reported to the history\nweights: per-observation weights to be passed the measure(s) in performance evaluations, where supported. Check support with supports_weights(measure).\nclass_weights: class weights to be passed the measure(s) in performance evaluations, where supported. Check support with supports_class_weights(measure).\nrepeats=1: for generating train/test sets multiple times in resampling (\"Monte Carlo\" resampling); see evaluate! for details\noperation/operations - One of predict, predict_mean, predict_mode, predict_median, or predict_joint, or a vector of these of the same length as measure/measures. Automatically inferred if left unspecified.\nrange: range object; tuning strategy documentation describes supported types\nselection_heuristic: the rule determining how the best model is decided. According to the default heuristic, NaiveSelection(), measure (or the first element of measure) is evaluated for each resample and these per-fold measurements are aggregrated. The model with the lowest (resp. highest) aggregate is chosen if the measure is a :loss (resp. a :score).\nn: number of iterations (ie, models to be evaluated); set by tuning strategy if left unspecified\ntrain_best=true: whether to train the optimal model\nacceleration=default_resource(): mode of parallelization for tuning strategies that support this\nacceleration_resampling=CPU1(): mode of parallelization for resampling\ncheck_measure=true: whether to check measure is compatible with the specified model and operation)\ncache=true: whether to cache model-specific representations of user-suplied data; set to false to conserve memory. Speed gains likely limited to the case resampling isa Holdout.\n\n\n\n\n\n","category":"function"},{"location":"tuning_models/#MLJTuning.Grid","page":"Tuning Models","title":"MLJTuning.Grid","text":"Grid(goal=nothing, resolution=10, rng=Random.GLOBAL_RNG, shuffle=true)\n\nInstantiate a Cartesian grid-based hyperparameter tuning strategy with a specified number of grid points as goal, or using a specified default resolution in each numeric dimension.\n\nSupported ranges:\n\nA single one-dimensional range or vector of one-dimensioinal ranges can be specified. Specifically, in Grid search, the range field of a TunedModel instance can be:\n\nA single one-dimensional range - ie, ParamRange object - r, or pair of the form (r, res) where res specifies a resolution to override the default resolution.\nAny vector of objects of the above form\n\nTwo elements of a range vector may share the same field attribute, with the effect that their grids are combined, as in Example 3 below.\n\nParamRange objects are constructed using the range method.\n\nExample 1:\n\nrange(model, :hyper1, lower=1, origin=2, unit=1)\n\nExample 2:\n\n[(range(model, :hyper1, lower=1, upper=10), 15),\n range(model, :hyper2, lower=2, upper=4),\n range(model, :hyper3, values=[:ball, :tree])]\n\nExample 3:\n\n# a range generating the grid `[1, 2, 10, 20, 30]` for `:hyper1`:\n[range(model, :hyper1, values=[1, 2]),\n (range(model, :hyper1, lower= 10, upper=30), 3)]\n\nNote: All the field values of the ParamRange objects (:hyper1, :hyper2, :hyper3 in the preceding example) must refer to field names a of single model (the model specified during TunedModel construction).\n\nAlgorithm\n\nThis is a standard grid search with the following specifics: In all cases all values of each specified NominalRange are exhausted. If goal is specified, then all resolutions are ignored, and a global resolution is applied to the NumericRange objects that maximizes the number of grid points, subject to the restriction that this not exceed goal. (This assumes no field appears twice in the range vector.) Otherwise the default resolution and any parameter-specific resolutions apply.\n\nIn all cases the models generated are shuffled using rng, unless shuffle=false.\n\nSee also TunedModel, range.\n\n\n\n\n\n","category":"type"},{"location":"tuning_models/#MLJTuning.RandomSearch","page":"Tuning Models","title":"MLJTuning.RandomSearch","text":"RandomSearch(bounded=Distributions.Uniform,\n positive_unbounded=Distributions.Gamma,\n other=Distributions.Normal,\n rng=Random.GLOBAL_RNG)\n\nInstantiate a random search tuning strategy, for searching over Cartesian hyperparameter domains, with customizable priors in each dimension.\n\nSupported ranges\n\nA single one-dimensional range or vector of one-dimensioinal ranges can be specified. If not paired with a prior, then one is fitted, according to fallback distribution types specified by the tuning strategy hyperparameters. Specifically, in RandomSearch, the range field of a TunedModel instance can be:\n\na single one-dimensional range (ParamRange object) r\na pair of the form (r, d), with r as above and where d is:\na probability vector of the same length as r.values (r a NominalRange)\nany Distributions.UnivariateDistribution instance (r a NumericRange)\none of the subtypes of Distributions.UnivariateDistribution listed in the table below, for automatic fitting using Distributions.fit(d, r), a distribution whose support always lies between r.lower and r.upper (r a NumericRange)\nany pair of the form (field, s), where field is the (possibly nested) name of a field of the model to be tuned, and s an arbitrary sampler object for that field. This means only that rand(rng, s) is defined and returns valid values for the field.\nany vector of objects of the above form\n\nA range vector may contain multiple entries for the same model field, as in range = [(:lambda, s1), (:alpha, s), (:lambda, s2)]. In that case the entry used in each iteration is random.\n\ndistribution types for fitting to ranges of this type\nArcsine, Uniform, Biweight, Cosine, Epanechnikov, SymTriangularDist, Triweight bounded\nGamma, InverseGaussian, Poisson positive (bounded or unbounded)\nNormal, Logistic, LogNormal, Cauchy, Gumbel, Laplace any\n\nParamRange objects are constructed using the range method.\n\nExamples\n\nusing Distributions\n\nrange1 = range(model, :hyper1, lower=0, upper=1)\n\nrange2 = [(range(model, :hyper1, lower=1, upper=10), Arcsine),\n range(model, :hyper2, lower=2, upper=Inf, unit=1, origin=3),\n (range(model, :hyper2, lower=2, upper=4), Normal(0, 3)),\n (range(model, :hyper3, values=[:ball, :tree]), [0.3, 0.7])]\n\n# uniform sampling of :(atom.λ) from [0, 1] without defining a NumericRange:\nstruct MySampler end\nBase.rand(rng::Random.AbstractRNG, ::MySampler) = rand(rng)\nrange3 = (:(atom.λ), MySampler())\n\nAlgorithm\n\nIn each iteration, a model is generated for evaluation by mutating the fields of a deep copy of model. The range vector is shuffled and the fields sampled according to the new order (repeated fields being mutated more than once). For a range entry of the form (field, s) the algorithm calls rand(rng, s) and mutates the field field of the model clone to have this value. For an entry of the form (r, d), s is substituted with sampler(r, d). If no d is specified, then sampling is uniform (with replacement) if r is a NominalRange, and is otherwise given by the defaults specified by the tuning strategy parameters bounded, positive_unbounded, and other, depending on the field values of the NumericRange object r.\n\nSee also TunedModel, range, sampler.\n\n\n\n\n\n","category":"type"},{"location":"tuning_models/#MLJTuning.LatinHypercube","page":"Tuning Models","title":"MLJTuning.LatinHypercube","text":"LatinHypercube(gens = 1,\n popsize = 100,\n ntour = 2,\n ptour = 0.8.,\n interSampleWeight = 1.0,\n ae_power = 2,\n periodic_ae = false,\n rng=Random.GLOBAL_RNG)\n\nInstantiate grid-based hyperparameter tuning strategy using the library LatinHypercubeSampling.jl.\n\nAn optimised Latin Hypercube sampling plan is created using a genetic based optimization algorithm based on the inverse of the Audze-Eglais function. The optimization is run for nGenerations and creates n models for evaluation, where n is specified by a corresponding TunedModel instance, as in\n\ntuned_model = TunedModel(model=...,\n tuning=LatinHypercube(...),\n range=...,\n measures=...,\n n=...)\n\n(See TunedModel for complete options.)\n\nTo use a periodic version of the Audze-Eglais function (to reduce clustering along the boundaries) specify periodic_ae = true.\n\nSupported ranges:\n\nA single one-dimensional range or vector of one-dimensioinal ranges can be specified. Specifically, in LatinHypercubeSampling search, the range field of a TunedModel instance can be:\n\nA single one-dimensional range - ie, ParamRange object - r, constructed\n\nusing the range method.\n\nAny vector of objects of the above form\n\nBoth NumericRanges and NominalRanges are supported, and hyper-parameter values are sampled on a scale specified by the range (eg, r.scale = :log).\n\n\n\n\n\n","category":"type"},{"location":"models/DummyClassifier_MLJScikitLearnInterface/#DummyClassifier_MLJScikitLearnInterface","page":"DummyClassifier","title":"DummyClassifier","text":"","category":"section"},{"location":"models/DummyClassifier_MLJScikitLearnInterface/","page":"DummyClassifier","title":"DummyClassifier","text":"DummyClassifier","category":"page"},{"location":"models/DummyClassifier_MLJScikitLearnInterface/","page":"DummyClassifier","title":"DummyClassifier","text":"A model type for constructing a dummy classifier, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/DummyClassifier_MLJScikitLearnInterface/","page":"DummyClassifier","title":"DummyClassifier","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/DummyClassifier_MLJScikitLearnInterface/","page":"DummyClassifier","title":"DummyClassifier","text":"DummyClassifier = @load DummyClassifier pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/DummyClassifier_MLJScikitLearnInterface/","page":"DummyClassifier","title":"DummyClassifier","text":"Do model = DummyClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in DummyClassifier(strategy=...).","category":"page"},{"location":"models/DummyClassifier_MLJScikitLearnInterface/","page":"DummyClassifier","title":"DummyClassifier","text":"DummyClassifier is a classifier that makes predictions using simple rules.","category":"page"},{"location":"models/StableForestRegressor_SIRUS/#StableForestRegressor_SIRUS","page":"StableForestRegressor","title":"StableForestRegressor","text":"","category":"section"},{"location":"models/StableForestRegressor_SIRUS/","page":"StableForestRegressor","title":"StableForestRegressor","text":"StableForestRegressor","category":"page"},{"location":"models/StableForestRegressor_SIRUS/","page":"StableForestRegressor","title":"StableForestRegressor","text":"A model type for constructing a stable forest regressor, based on SIRUS.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/StableForestRegressor_SIRUS/","page":"StableForestRegressor","title":"StableForestRegressor","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/StableForestRegressor_SIRUS/","page":"StableForestRegressor","title":"StableForestRegressor","text":"StableForestRegressor = @load StableForestRegressor pkg=SIRUS","category":"page"},{"location":"models/StableForestRegressor_SIRUS/","page":"StableForestRegressor","title":"StableForestRegressor","text":"Do model = StableForestRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in StableForestRegressor(rng=...).","category":"page"},{"location":"models/StableForestRegressor_SIRUS/","page":"StableForestRegressor","title":"StableForestRegressor","text":"StableForestRegressor implements the random forest regressor with a stabilized forest structure (Bénard et al., 2021).","category":"page"},{"location":"models/StableForestRegressor_SIRUS/#Training-data","page":"StableForestRegressor","title":"Training data","text":"","category":"section"},{"location":"models/StableForestRegressor_SIRUS/","page":"StableForestRegressor","title":"StableForestRegressor","text":"In MLJ or MLJBase, bind an instance model to data with","category":"page"},{"location":"models/StableForestRegressor_SIRUS/","page":"StableForestRegressor","title":"StableForestRegressor","text":"mach = machine(model, X, y)","category":"page"},{"location":"models/StableForestRegressor_SIRUS/","page":"StableForestRegressor","title":"StableForestRegressor","text":"where","category":"page"},{"location":"models/StableForestRegressor_SIRUS/","page":"StableForestRegressor","title":"StableForestRegressor","text":"X: any table of input features (eg, a DataFrame) whose columns each have one of the following element scitypes: Continuous, Count, or <:OrderedFactor; check column scitypes with schema(X)\ny: the target, which can be any AbstractVector whose element scitype is <:OrderedFactor or <:Multiclass; check the scitype with scitype(y)","category":"page"},{"location":"models/StableForestRegressor_SIRUS/","page":"StableForestRegressor","title":"StableForestRegressor","text":"Train the machine with fit!(mach, rows=...).","category":"page"},{"location":"models/StableForestRegressor_SIRUS/#Hyperparameters","page":"StableForestRegressor","title":"Hyperparameters","text":"","category":"section"},{"location":"models/StableForestRegressor_SIRUS/","page":"StableForestRegressor","title":"StableForestRegressor","text":"rng::AbstractRNG=default_rng(): Random number generator. Using a StableRNG from StableRNGs.jl is advised.\npartial_sampling::Float64=0.7: Ratio of samples to use in each subset of the data. The default should be fine for most cases.\nn_trees::Int=1000: The number of trees to use. It is advisable to use at least thousand trees to for a better rule selection, and in turn better predictive performance.\nmax_depth::Int=2: The depth of the tree. A lower depth decreases model complexity and can therefore improve accuracy when the sample size is small (reduce overfitting).\nq::Int=10: Number of cutpoints to use per feature. The default value should be fine for most situations.\nmin_data_in_leaf::Int=5: Minimum number of data points per leaf.","category":"page"},{"location":"models/StableForestRegressor_SIRUS/#Fitted-parameters","page":"StableForestRegressor","title":"Fitted parameters","text":"","category":"section"},{"location":"models/StableForestRegressor_SIRUS/","page":"StableForestRegressor","title":"StableForestRegressor","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/StableForestRegressor_SIRUS/","page":"StableForestRegressor","title":"StableForestRegressor","text":"fitresult: A StableForest object.","category":"page"},{"location":"models/StableForestRegressor_SIRUS/#Operations","page":"StableForestRegressor","title":"Operations","text":"","category":"section"},{"location":"models/StableForestRegressor_SIRUS/","page":"StableForestRegressor","title":"StableForestRegressor","text":"predict(mach, Xnew): Return a vector of predictions for each row of Xnew.","category":"page"},{"location":"models/ContinuousEncoder_MLJModels/#ContinuousEncoder_MLJModels","page":"ContinuousEncoder","title":"ContinuousEncoder","text":"","category":"section"},{"location":"models/ContinuousEncoder_MLJModels/","page":"ContinuousEncoder","title":"ContinuousEncoder","text":"ContinuousEncoder","category":"page"},{"location":"models/ContinuousEncoder_MLJModels/","page":"ContinuousEncoder","title":"ContinuousEncoder","text":"A model type for constructing a continuous encoder, based on MLJModels.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/ContinuousEncoder_MLJModels/","page":"ContinuousEncoder","title":"ContinuousEncoder","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/ContinuousEncoder_MLJModels/","page":"ContinuousEncoder","title":"ContinuousEncoder","text":"ContinuousEncoder = @load ContinuousEncoder pkg=MLJModels","category":"page"},{"location":"models/ContinuousEncoder_MLJModels/","page":"ContinuousEncoder","title":"ContinuousEncoder","text":"Do model = ContinuousEncoder() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in ContinuousEncoder(drop_last=...).","category":"page"},{"location":"models/ContinuousEncoder_MLJModels/","page":"ContinuousEncoder","title":"ContinuousEncoder","text":"Use this model to arrange all features (columns) of a table to have Continuous element scitype, by applying the following protocol to each feature ftr:","category":"page"},{"location":"models/ContinuousEncoder_MLJModels/","page":"ContinuousEncoder","title":"ContinuousEncoder","text":"If ftr is already Continuous retain it.\nIf ftr is Multiclass, one-hot encode it.\nIf ftr is OrderedFactor, replace it with coerce(ftr, Continuous) (vector of floating point integers), unless ordered_factors=false is specified, in which case one-hot encode it.\nIf ftr is Count, replace it with coerce(ftr, Continuous).\nIf ftr has some other element scitype, or was not observed in fitting the encoder, drop it from the table.","category":"page"},{"location":"models/ContinuousEncoder_MLJModels/","page":"ContinuousEncoder","title":"ContinuousEncoder","text":"Warning: This transformer assumes that levels(col) for any Multiclass or OrderedFactor column, col, is the same for training data and new data to be transformed.","category":"page"},{"location":"models/ContinuousEncoder_MLJModels/","page":"ContinuousEncoder","title":"ContinuousEncoder","text":"To selectively one-hot-encode categorical features (without dropping columns) use OneHotEncoder instead.","category":"page"},{"location":"models/ContinuousEncoder_MLJModels/#Training-data","page":"ContinuousEncoder","title":"Training data","text":"","category":"section"},{"location":"models/ContinuousEncoder_MLJModels/","page":"ContinuousEncoder","title":"ContinuousEncoder","text":"In MLJ or MLJBase, bind an instance model to data with","category":"page"},{"location":"models/ContinuousEncoder_MLJModels/","page":"ContinuousEncoder","title":"ContinuousEncoder","text":"mach = machine(model, X)","category":"page"},{"location":"models/ContinuousEncoder_MLJModels/","page":"ContinuousEncoder","title":"ContinuousEncoder","text":"where","category":"page"},{"location":"models/ContinuousEncoder_MLJModels/","page":"ContinuousEncoder","title":"ContinuousEncoder","text":"X: any Tables.jl compatible table. Columns can be of mixed type but only those with element scitype Multiclass or OrderedFactor can be encoded. Check column scitypes with schema(X).","category":"page"},{"location":"models/ContinuousEncoder_MLJModels/","page":"ContinuousEncoder","title":"ContinuousEncoder","text":"Train the machine using fit!(mach, rows=...).","category":"page"},{"location":"models/ContinuousEncoder_MLJModels/#Hyper-parameters","page":"ContinuousEncoder","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/ContinuousEncoder_MLJModels/","page":"ContinuousEncoder","title":"ContinuousEncoder","text":"drop_last=true: whether to drop the column corresponding to the final class of one-hot encoded features. For example, a three-class feature is spawned into three new features if drop_last=false, but two just features otherwise.\none_hot_ordered_factors=false: whether to one-hot any feature with OrderedFactor element scitype, or to instead coerce it directly to a (single) Continuous feature using the order","category":"page"},{"location":"models/ContinuousEncoder_MLJModels/#Fitted-parameters","page":"ContinuousEncoder","title":"Fitted parameters","text":"","category":"section"},{"location":"models/ContinuousEncoder_MLJModels/","page":"ContinuousEncoder","title":"ContinuousEncoder","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/ContinuousEncoder_MLJModels/","page":"ContinuousEncoder","title":"ContinuousEncoder","text":"features_to_keep: names of features that will not be dropped from the table\none_hot_encoder: the OneHotEncoder model instance for handling the one-hot encoding\none_hot_encoder_fitresult: the fitted parameters of the OneHotEncoder model","category":"page"},{"location":"models/ContinuousEncoder_MLJModels/#Report","page":"ContinuousEncoder","title":"Report","text":"","category":"section"},{"location":"models/ContinuousEncoder_MLJModels/","page":"ContinuousEncoder","title":"ContinuousEncoder","text":"features_to_keep: names of input features that will not be dropped from the table\nnew_features: names of all output features","category":"page"},{"location":"models/ContinuousEncoder_MLJModels/#Example","page":"ContinuousEncoder","title":"Example","text":"","category":"section"},{"location":"models/ContinuousEncoder_MLJModels/","page":"ContinuousEncoder","title":"ContinuousEncoder","text":"X = (name=categorical([\"Danesh\", \"Lee\", \"Mary\", \"John\"]),\n grade=categorical([\"A\", \"B\", \"A\", \"C\"], ordered=true),\n height=[1.85, 1.67, 1.5, 1.67],\n n_devices=[3, 2, 4, 3],\n comments=[\"the force\", \"be\", \"with you\", \"too\"])\n\njulia> schema(X)\n┌───────────┬──────────────────┐\n│ names │ scitypes │\n├───────────┼──────────────────┤\n│ name │ Multiclass{4} │\n│ grade │ OrderedFactor{3} │\n│ height │ Continuous │\n│ n_devices │ Count │\n│ comments │ Textual │\n└───────────┴──────────────────┘\n\nencoder = ContinuousEncoder(drop_last=true)\nmach = fit!(machine(encoder, X))\nW = transform(mach, X)\n\njulia> schema(W)\n┌──────────────┬────────────┐\n│ names │ scitypes │\n├──────────────┼────────────┤\n│ name__Danesh │ Continuous │\n│ name__John │ Continuous │\n│ name__Lee │ Continuous │\n│ grade │ Continuous │\n│ height │ Continuous │\n│ n_devices │ Continuous │\n└──────────────┴────────────┘\n\njulia> setdiff(schema(X).names, report(mach).features_to_keep) ## dropped features\n1-element Vector{Symbol}:\n :comments\n","category":"page"},{"location":"models/ContinuousEncoder_MLJModels/","page":"ContinuousEncoder","title":"ContinuousEncoder","text":"See also OneHotEncoder","category":"page"},{"location":"models/SVC_LIBSVM/#SVC_LIBSVM","page":"SVC","title":"SVC","text":"","category":"section"},{"location":"models/SVC_LIBSVM/","page":"SVC","title":"SVC","text":"SVC","category":"page"},{"location":"models/SVC_LIBSVM/","page":"SVC","title":"SVC","text":"A model type for constructing a C-support vector classifier, based on LIBSVM.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/SVC_LIBSVM/","page":"SVC","title":"SVC","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/SVC_LIBSVM/","page":"SVC","title":"SVC","text":"SVC = @load SVC pkg=LIBSVM","category":"page"},{"location":"models/SVC_LIBSVM/","page":"SVC","title":"SVC","text":"Do model = SVC() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in SVC(kernel=...).","category":"page"},{"location":"models/SVC_LIBSVM/","page":"SVC","title":"SVC","text":"This model predicts actual class labels. To predict probabilities, use instead ProbabilisticSVC.","category":"page"},{"location":"models/SVC_LIBSVM/","page":"SVC","title":"SVC","text":"Reference for algorithm and core C-library: C.-C. Chang and C.-J. Lin (2011): \"LIBSVM: a library for support vector machines.\" ACM Transactions on Intelligent Systems and Technology, 2(3):27:1–27:27. Updated at https://www.csie.ntu.edu.tw/~cjlin/papers/libsvm.pdf. ","category":"page"},{"location":"models/SVC_LIBSVM/#Training-data","page":"SVC","title":"Training data","text":"","category":"section"},{"location":"models/SVC_LIBSVM/","page":"SVC","title":"SVC","text":"In MLJ or MLJBase, bind an instance model to data with one of:","category":"page"},{"location":"models/SVC_LIBSVM/","page":"SVC","title":"SVC","text":"mach = machine(model, X, y)\nmach = machine(model, X, y, w)","category":"page"},{"location":"models/SVC_LIBSVM/","page":"SVC","title":"SVC","text":"where","category":"page"},{"location":"models/SVC_LIBSVM/","page":"SVC","title":"SVC","text":"X: any table of input features (eg, a DataFrame) whose columns each have Continuous element scitype; check column scitypes with schema(X)\ny: is the target, which can be any AbstractVector whose element scitype is <:OrderedFactor or <:Multiclass; check the scitype with scitype(y)\nw: a dictionary of class weights, keyed on levels(y).","category":"page"},{"location":"models/SVC_LIBSVM/","page":"SVC","title":"SVC","text":"Train the machine using fit!(mach, rows=...).","category":"page"},{"location":"models/SVC_LIBSVM/#Hyper-parameters","page":"SVC","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/SVC_LIBSVM/","page":"SVC","title":"SVC","text":"kernel=LIBSVM.Kernel.RadialBasis: either an object that can be called, as in kernel(x1, x2), or one of the built-in kernels from the LIBSVM.jl package listed below. Here x1 and x2 are vectors whose lengths match the number of columns of the training data X (see \"Examples\" below).\nLIBSVM.Kernel.Linear: (x1, x2) -> x1'*x2\nLIBSVM.Kernel.Polynomial: (x1, x2) -> gamma*x1'*x2 + coef0)^degree\nLIBSVM.Kernel.RadialBasis: (x1, x2) -> (exp(-gamma*norm(x1 - x2)^2))\nLIBSVM.Kernel.Sigmoid: (x1, x2) - > tanh(gamma*x1'*x2 + coef0)\nHere gamma, coef0, degree are other hyper-parameters. Serialization of models with user-defined kernels comes with some restrictions. See LIVSVM.jl issue91\ngamma = 0.0: kernel parameter (see above); if gamma==-1.0 then gamma = 1/nfeatures is used in training, where nfeatures is the number of features (columns of X). If gamma==0.0 then gamma = 1/(var(Tables.matrix(X))*nfeatures) is used. Actual value used appears in the report (see below).\ncoef0 = 0.0: kernel parameter (see above)\ndegree::Int32 = Int32(3): degree in polynomial kernel (see above)\ncost=1.0 (range (0, Inf)): the parameter denoted C in the cited reference; for greater regularization, decrease cost\ncachesize=200.0 cache memory size in MB\ntolerance=0.001: tolerance for the stopping criterion\nshrinking=true: whether to use shrinking heuristics","category":"page"},{"location":"models/SVC_LIBSVM/#Operations","page":"SVC","title":"Operations","text":"","category":"section"},{"location":"models/SVC_LIBSVM/","page":"SVC","title":"SVC","text":"predict(mach, Xnew): return predictions of the target given features Xnew having the same scitype as X above.","category":"page"},{"location":"models/SVC_LIBSVM/#Fitted-parameters","page":"SVC","title":"Fitted parameters","text":"","category":"section"},{"location":"models/SVC_LIBSVM/","page":"SVC","title":"SVC","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/SVC_LIBSVM/","page":"SVC","title":"SVC","text":"libsvm_model: the trained model object created by the LIBSVM.jl package\nencoding: class encoding used internally by libsvm_model - a dictionary of class labels keyed on the internal integer representation","category":"page"},{"location":"models/SVC_LIBSVM/#Report","page":"SVC","title":"Report","text":"","category":"section"},{"location":"models/SVC_LIBSVM/","page":"SVC","title":"SVC","text":"The fields of report(mach) are:","category":"page"},{"location":"models/SVC_LIBSVM/","page":"SVC","title":"SVC","text":"gamma: actual value of the kernel parameter gamma used in training","category":"page"},{"location":"models/SVC_LIBSVM/#Examples","page":"SVC","title":"Examples","text":"","category":"section"},{"location":"models/SVC_LIBSVM/#Using-a-built-in-kernel","page":"SVC","title":"Using a built-in kernel","text":"","category":"section"},{"location":"models/SVC_LIBSVM/","page":"SVC","title":"SVC","text":"using MLJ\nimport LIBSVM\n\nSVC = @load SVC pkg=LIBSVM ## model type\nmodel = SVC(kernel=LIBSVM.Kernel.Polynomial) ## instance\n\nX, y = @load_iris ## table, vector\nmach = machine(model, X, y) |> fit!\n\nXnew = (sepal_length = [6.4, 7.2, 7.4],\n sepal_width = [2.8, 3.0, 2.8],\n petal_length = [5.6, 5.8, 6.1],\n petal_width = [2.1, 1.6, 1.9],)\n\njulia> yhat = predict(mach, Xnew)\n3-element CategoricalArrays.CategoricalArray{String,1,UInt32}:\n \"virginica\"\n \"virginica\"\n \"virginica\"","category":"page"},{"location":"models/SVC_LIBSVM/#User-defined-kernels","page":"SVC","title":"User-defined kernels","text":"","category":"section"},{"location":"models/SVC_LIBSVM/","page":"SVC","title":"SVC","text":"k(x1, x2) = x1'*x2 ## equivalent to `LIBSVM.Kernel.Linear`\nmodel = SVC(kernel=k)\nmach = machine(model, X, y) |> fit!\n\njulia> yhat = predict(mach, Xnew)\n3-element CategoricalArrays.CategoricalArray{String,1,UInt32}:\n \"virginica\"\n \"virginica\"\n \"virginica\"","category":"page"},{"location":"models/SVC_LIBSVM/#Incorporating-class-weights","page":"SVC","title":"Incorporating class weights","text":"","category":"section"},{"location":"models/SVC_LIBSVM/","page":"SVC","title":"SVC","text":"In either scenario above, we can do:","category":"page"},{"location":"models/SVC_LIBSVM/","page":"SVC","title":"SVC","text":"weights = Dict(\"virginica\" => 1, \"versicolor\" => 20, \"setosa\" => 1)\nmach = machine(model, X, y, weights) |> fit!\n\njulia> yhat = predict(mach, Xnew)\n3-element CategoricalArrays.CategoricalArray{String,1,UInt32}:\n \"versicolor\"\n \"versicolor\"\n \"versicolor\"","category":"page"},{"location":"models/SVC_LIBSVM/","page":"SVC","title":"SVC","text":"See also the classifiers ProbabilisticSVC, NuSVC and LinearSVC. And see LIVSVM.jl and the original C implementation documentation.","category":"page"},{"location":"modifying_behavior/#Modifying-Behavior","page":"Modifying Behavior","title":"Modifying Behavior","text":"","category":"section"},{"location":"modifying_behavior/","page":"Modifying Behavior","title":"Modifying Behavior","text":"To modify behavior of MLJ you will need to clone the relevant component package (e.g., MLJBase.jl) - or a fork thereof - and modify your local julia environment to use your local clone in place of the official release. For example, you might proceed something like this:","category":"page"},{"location":"modifying_behavior/","page":"Modifying Behavior","title":"Modifying Behavior","text":"using Pkg\nPkg.activate(\"my_MLJ_enf\", shared=true)\nPkg.develop(\"path/to/my/local/MLJBase\")","category":"page"},{"location":"modifying_behavior/","page":"Modifying Behavior","title":"Modifying Behavior","text":"To test your local clone, do","category":"page"},{"location":"modifying_behavior/","page":"Modifying Behavior","title":"Modifying Behavior","text":"Pkg.test(\"MLJBase\")","category":"page"},{"location":"modifying_behavior/","page":"Modifying Behavior","title":"Modifying Behavior","text":"For more on package management, see here.","category":"page"},{"location":"models/INNEDetector_OutlierDetectionPython/#INNEDetector_OutlierDetectionPython","page":"INNEDetector","title":"INNEDetector","text":"","category":"section"},{"location":"models/INNEDetector_OutlierDetectionPython/","page":"INNEDetector","title":"INNEDetector","text":"INNEDetector(n_estimators=200,\n max_samples=\"auto\",\n random_state=None)","category":"page"},{"location":"models/INNEDetector_OutlierDetectionPython/","page":"INNEDetector","title":"INNEDetector","text":"https://pyod.readthedocs.io/en/latest/pyod.models.html#module-pyod.models.inne","category":"page"},{"location":"models/COFDetector_OutlierDetectionNeighbors/#COFDetector_OutlierDetectionNeighbors","page":"COFDetector","title":"COFDetector","text":"","category":"section"},{"location":"models/COFDetector_OutlierDetectionNeighbors/","page":"COFDetector","title":"COFDetector","text":"COFDetector(k = 5,\n metric = Euclidean(),\n algorithm = :kdtree,\n leafsize = 10,\n reorder = true,\n parallel = false)","category":"page"},{"location":"models/COFDetector_OutlierDetectionNeighbors/","page":"COFDetector","title":"COFDetector","text":"Local outlier density based on chaining distance between graphs of neighbors, as described in [1].","category":"page"},{"location":"models/COFDetector_OutlierDetectionNeighbors/#Parameters","page":"COFDetector","title":"Parameters","text":"","category":"section"},{"location":"models/COFDetector_OutlierDetectionNeighbors/","page":"COFDetector","title":"COFDetector","text":"k::Integer","category":"page"},{"location":"models/COFDetector_OutlierDetectionNeighbors/","page":"COFDetector","title":"COFDetector","text":"Number of neighbors (must be greater than 0).","category":"page"},{"location":"models/COFDetector_OutlierDetectionNeighbors/","page":"COFDetector","title":"COFDetector","text":"metric::Metric","category":"page"},{"location":"models/COFDetector_OutlierDetectionNeighbors/","page":"COFDetector","title":"COFDetector","text":"This is one of the Metric types defined in the Distances.jl package. It is possible to define your own metrics by creating new types that are subtypes of Metric.","category":"page"},{"location":"models/COFDetector_OutlierDetectionNeighbors/","page":"COFDetector","title":"COFDetector","text":"algorithm::Symbol","category":"page"},{"location":"models/COFDetector_OutlierDetectionNeighbors/","page":"COFDetector","title":"COFDetector","text":"One of (:kdtree, :balltree). In a kdtree, points are recursively split into groups using hyper-planes. Therefore a KDTree only works with axis aligned metrics which are: Euclidean, Chebyshev, Minkowski and Cityblock. A brutetree linearly searches all points in a brute force fashion and works with any Metric. A balltree recursively splits points into groups bounded by hyper-spheres and works with any Metric.","category":"page"},{"location":"models/COFDetector_OutlierDetectionNeighbors/","page":"COFDetector","title":"COFDetector","text":"static::Union{Bool, Symbol}","category":"page"},{"location":"models/COFDetector_OutlierDetectionNeighbors/","page":"COFDetector","title":"COFDetector","text":"One of (true, false, :auto). Whether the input data for fitting and transform should be statically or dynamically allocated. If true, the data is statically allocated. If false, the data is dynamically allocated. If :auto, the data is dynamically allocated if the product of all dimensions except the last is greater than 100.","category":"page"},{"location":"models/COFDetector_OutlierDetectionNeighbors/","page":"COFDetector","title":"COFDetector","text":"leafsize::Int","category":"page"},{"location":"models/COFDetector_OutlierDetectionNeighbors/","page":"COFDetector","title":"COFDetector","text":"Determines at what number of points to stop splitting the tree further. There is a trade-off between traversing the tree and having to evaluate the metric function for increasing number of points.","category":"page"},{"location":"models/COFDetector_OutlierDetectionNeighbors/","page":"COFDetector","title":"COFDetector","text":"reorder::Bool","category":"page"},{"location":"models/COFDetector_OutlierDetectionNeighbors/","page":"COFDetector","title":"COFDetector","text":"While building the tree this will put points close in distance close in memory since this helps with cache locality. In this case, a copy of the original data will be made so that the original data is left unmodified. This can have a significant impact on performance and is by default set to true.","category":"page"},{"location":"models/COFDetector_OutlierDetectionNeighbors/","page":"COFDetector","title":"COFDetector","text":"parallel::Bool","category":"page"},{"location":"models/COFDetector_OutlierDetectionNeighbors/","page":"COFDetector","title":"COFDetector","text":"Parallelize score and predict using all threads available. The number of threads can be set with the JULIA_NUM_THREADS environment variable. Note: fit is not parallel.","category":"page"},{"location":"models/COFDetector_OutlierDetectionNeighbors/#Examples","page":"COFDetector","title":"Examples","text":"","category":"section"},{"location":"models/COFDetector_OutlierDetectionNeighbors/","page":"COFDetector","title":"COFDetector","text":"using OutlierDetection: COFDetector, fit, transform\ndetector = COFDetector()\nX = rand(10, 100)\nmodel, result = fit(detector, X; verbosity=0)\ntest_scores = transform(detector, model, X)","category":"page"},{"location":"models/COFDetector_OutlierDetectionNeighbors/#References","page":"COFDetector","title":"References","text":"","category":"section"},{"location":"models/COFDetector_OutlierDetectionNeighbors/","page":"COFDetector","title":"COFDetector","text":"[1] Tang, Jian; Chen, Zhixiang; Fu, Ada Wai-Chee; Cheung, David Wai-Lok (2002): Enhancing Effectiveness of Outlier Detections for Low Density Patterns.","category":"page"},{"location":"models/SMOTEN_Imbalance/#SMOTEN_Imbalance","page":"SMOTEN","title":"SMOTEN","text":"","category":"section"},{"location":"models/SMOTEN_Imbalance/","page":"SMOTEN","title":"SMOTEN","text":"Initiate a SMOTEN model with the given hyper-parameters.","category":"page"},{"location":"models/SMOTEN_Imbalance/","page":"SMOTEN","title":"SMOTEN","text":"SMOTEN","category":"page"},{"location":"models/SMOTEN_Imbalance/","page":"SMOTEN","title":"SMOTEN","text":"A model type for constructing a smoten, based on Imbalance.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/SMOTEN_Imbalance/","page":"SMOTEN","title":"SMOTEN","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/SMOTEN_Imbalance/","page":"SMOTEN","title":"SMOTEN","text":"SMOTEN = @load SMOTEN pkg=Imbalance","category":"page"},{"location":"models/SMOTEN_Imbalance/","page":"SMOTEN","title":"SMOTEN","text":"Do model = SMOTEN() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in SMOTEN(k=...).","category":"page"},{"location":"models/SMOTEN_Imbalance/","page":"SMOTEN","title":"SMOTEN","text":"SMOTEN implements the SMOTEN algorithm to correct for class imbalance as in N. V. Chawla, K. W. Bowyer, L. O.Hall, W. P. Kegelmeyer, “SMOTEN: synthetic minority over-sampling technique,” Journal of artificial intelligence research, 321-357, 2002.","category":"page"},{"location":"models/SMOTEN_Imbalance/#Training-data","page":"SMOTEN","title":"Training data","text":"","category":"section"},{"location":"models/SMOTEN_Imbalance/","page":"SMOTEN","title":"SMOTEN","text":"In MLJ or MLJBase, wrap the model in a machine by","category":"page"},{"location":"models/SMOTEN_Imbalance/","page":"SMOTEN","title":"SMOTEN","text":"mach = machine(model)","category":"page"},{"location":"models/SMOTEN_Imbalance/","page":"SMOTEN","title":"SMOTEN","text":"There is no need to provide any data here because the model is a static transformer.","category":"page"},{"location":"models/SMOTEN_Imbalance/","page":"SMOTEN","title":"SMOTEN","text":"Likewise, there is no need to fit!(mach).","category":"page"},{"location":"models/SMOTEN_Imbalance/","page":"SMOTEN","title":"SMOTEN","text":"For default values of the hyper-parameters, model can be constructed by","category":"page"},{"location":"models/SMOTEN_Imbalance/","page":"SMOTEN","title":"SMOTEN","text":"model = SMOTEN()","category":"page"},{"location":"models/SMOTEN_Imbalance/#Hyperparameters","page":"SMOTEN","title":"Hyperparameters","text":"","category":"section"},{"location":"models/SMOTEN_Imbalance/","page":"SMOTEN","title":"SMOTEN","text":"k=5: Number of nearest neighbors to consider in the SMOTEN algorithm. Should be within the range [1, n - 1], where n is the number of observations; otherwise set to the nearest of these two values.\nratios=1.0: A parameter that controls the amount of oversampling to be done for each class\nCan be a float and in this case each class will be oversampled to the size of the majority class times the float. By default, all classes are oversampled to the size of the majority class\nCan be a dictionary mapping each class label to the float ratio for that class\nrng::Union{AbstractRNG, Integer}=default_rng(): Either an AbstractRNG object or an Integer seed to be used with Xoshiro if the Julia VERSION supports it. Otherwise, uses MersenneTwister`.","category":"page"},{"location":"models/SMOTEN_Imbalance/#Transform-Inputs","page":"SMOTEN","title":"Transform Inputs","text":"","category":"section"},{"location":"models/SMOTEN_Imbalance/","page":"SMOTEN","title":"SMOTEN","text":"X: A matrix of integers or a table with element scitypes that subtype Finite. That is, for table inputs each column should have either OrderedFactor or Multiclass as the element scitype.\ny: An abstract vector of labels (e.g., strings) that correspond to the observations in X","category":"page"},{"location":"models/SMOTEN_Imbalance/#Transform-Outputs","page":"SMOTEN","title":"Transform Outputs","text":"","category":"section"},{"location":"models/SMOTEN_Imbalance/","page":"SMOTEN","title":"SMOTEN","text":"Xover: A matrix or table that includes original data and the new observations due to oversampling. depending on whether the input X is a matrix or table respectively\nyover: An abstract vector of labels corresponding to Xover","category":"page"},{"location":"models/SMOTEN_Imbalance/#Operations","page":"SMOTEN","title":"Operations","text":"","category":"section"},{"location":"models/SMOTEN_Imbalance/","page":"SMOTEN","title":"SMOTEN","text":"transform(mach, X, y): resample the data X and y using SMOTEN, returning both the new and original observations","category":"page"},{"location":"models/SMOTEN_Imbalance/#Example","page":"SMOTEN","title":"Example","text":"","category":"section"},{"location":"models/SMOTEN_Imbalance/","page":"SMOTEN","title":"SMOTEN","text":"using MLJ\nusing ScientificTypes\nimport Imbalance\n\n## set probability of each class\nclass_probs = [0.5, 0.2, 0.3] \nnum_rows = 100\nnum_continuous_feats = 0\n## want two categorical features with three and two possible values respectively\nnum_vals_per_category = [3, 2]\n\n## generate a table and categorical vector accordingly\nX, y = Imbalance.generate_imbalanced_data(num_rows, num_continuous_feats; \n class_probs, num_vals_per_category, rng=42) \njulia> Imbalance.checkbalance(y)\n1: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 19 (39.6%) \n2: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 33 (68.8%) \n0: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 48 (100.0%) \n\njulia> ScientificTypes.schema(X).scitypes\n(Count, Count)\n\n## coerce to a finite scitype (multiclass or ordered factor)\nX = coerce(X, autotype(X, :few_to_finite))\n\n## load SMOTEN\nSMOTEN = @load SMOTEN pkg=Imbalance\n\n## wrap the model in a machine\noversampler = SMOTEN(k=5, ratios=Dict(0=>1.0, 1=> 0.9, 2=>0.8), rng=42)\nmach = machine(oversampler)\n\n## provide the data to transform (there is nothing to fit)\nXover, yover = transform(mach, X, y)\n\njulia> Imbalance.checkbalance(yover)\n2: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 38 (79.2%) \n1: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 43 (89.6%) \n0: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 48 (100.0%) ","category":"page"},{"location":"models/NeuralNetworkClassifier_BetaML/#NeuralNetworkClassifier_BetaML","page":"NeuralNetworkClassifier","title":"NeuralNetworkClassifier","text":"","category":"section"},{"location":"models/NeuralNetworkClassifier_BetaML/","page":"NeuralNetworkClassifier","title":"NeuralNetworkClassifier","text":"mutable struct NeuralNetworkClassifier <: MLJModelInterface.Probabilistic","category":"page"},{"location":"models/NeuralNetworkClassifier_BetaML/","page":"NeuralNetworkClassifier","title":"NeuralNetworkClassifier","text":"A simple but flexible Feedforward Neural Network, from the Beta Machine Learning Toolkit (BetaML) for classification problems.","category":"page"},{"location":"models/NeuralNetworkClassifier_BetaML/#Parameters:","page":"NeuralNetworkClassifier","title":"Parameters:","text":"","category":"section"},{"location":"models/NeuralNetworkClassifier_BetaML/","page":"NeuralNetworkClassifier","title":"NeuralNetworkClassifier","text":"layers: Array of layer objects [def: nothing, i.e. basic network]. See subtypes(BetaML.AbstractLayer) for supported layers. The last \"softmax\" layer is automatically added.\nloss: Loss (cost) function [def: BetaML.crossentropy]. Should always assume y and ŷ as matrices.\nwarning: Warning\nIf you change the parameter loss, you need to either provide its derivative on the parameter dloss or use autodiff with dloss=nothing.\ndloss: Derivative of the loss function [def: BetaML.dcrossentropy, i.e. the derivative of the cross-entropy]. Use nothing for autodiff.\nepochs: Number of epochs, i.e. passages trough the whole training sample [def: 200]\nbatch_size: Size of each individual batch [def: 16]\nopt_alg: The optimisation algorithm to update the gradient at each batch [def: BetaML.ADAM()]. See subtypes(BetaML.OptimisationAlgorithm) for supported optimizers\nshuffle: Whether to randomly shuffle the data at each iteration (epoch) [def: true]\ndescr: An optional title and/or description for this model\ncb: A call back function to provide information during training [def: BetaML.fitting_info]\ncategories: The categories to represent as columns. [def: nothing, i.e. unique training values].\nhandle_unknown: How to handle categories not seens in training or not present in the provided categories array? \"error\" (default) rises an error, \"infrequent\" adds a specific column for these categories.\nother_categories_name: Which value during prediction to assign to this \"other\" category (i.e. categories not seen on training or not present in the provided categories array? [def: nothing, i.e. typemax(Int64) for integer vectors and \"other\" for other types]. This setting is active only if handle_unknown=\"infrequent\" and in that case it MUST be specified if Y is neither integer or strings\nrng: Random Number Generator [deafult: Random.GLOBAL_RNG]","category":"page"},{"location":"models/NeuralNetworkClassifier_BetaML/#Notes:","page":"NeuralNetworkClassifier","title":"Notes:","text":"","category":"section"},{"location":"models/NeuralNetworkClassifier_BetaML/","page":"NeuralNetworkClassifier","title":"NeuralNetworkClassifier","text":"data must be numerical\nthe label should be a n-records by n-dimensions matrix (e.g. a one-hot-encoded data for classification), where the output columns should be interpreted as the probabilities for each categories.","category":"page"},{"location":"models/NeuralNetworkClassifier_BetaML/#Example:","page":"NeuralNetworkClassifier","title":"Example:","text":"","category":"section"},{"location":"models/NeuralNetworkClassifier_BetaML/","page":"NeuralNetworkClassifier","title":"NeuralNetworkClassifier","text":"julia> using MLJ\n\njulia> X, y = @load_iris;\n\njulia> modelType = @load NeuralNetworkClassifier pkg = \"BetaML\" verbosity=0\nBetaML.Nn.NeuralNetworkClassifier\n\njulia> layers = [BetaML.DenseLayer(4,8,f=BetaML.relu),BetaML.DenseLayer(8,8,f=BetaML.relu),BetaML.DenseLayer(8,3,f=BetaML.relu),BetaML.VectorFunctionLayer(3,f=BetaML.softmax)];\n\njulia> model = modelType(layers=layers,opt_alg=BetaML.ADAM())\nNeuralNetworkClassifier(\n layers = BetaML.Nn.AbstractLayer[BetaML.Nn.DenseLayer([-0.376173352338049 0.7029289511758696 -0.5589563304592478 -0.21043274001651874; 0.044758889527899415 0.6687689636685921 0.4584331114653877 0.6820506583840453; … ; -0.26546358457167507 -0.28469736227283804 -0.164225549922154 -0.516785639164486; -0.5146043550684141 -0.0699113265130964 0.14959906603941908 -0.053706860039406834], [0.7003943613125758, -0.23990840466587576, -0.23823126271387746, 0.4018101580410387, 0.2274483050356888, -0.564975060667734, 0.1732063297031089, 0.11880299829896945], BetaML.Utils.relu, BetaML.Utils.drelu), BetaML.Nn.DenseLayer([-0.029467850439546583 0.4074661266592745 … 0.36775675246760053 -0.595524555448422; 0.42455597698371306 -0.2458082732997091 … -0.3324220683462514 0.44439454998610595; … ; -0.2890883863364267 -0.10109249362508033 … -0.0602680568207582 0.18177278845097555; -0.03432587226449335 -0.4301192922760063 … 0.5646018168286626 0.47269177680892693], [0.13777442835428688, 0.5473306726675433, 0.3781939472904011, 0.24021813428130567, -0.0714779477402877, -0.020386373530818958, 0.5465466618404464, -0.40339790713616525], BetaML.Utils.relu, BetaML.Utils.drelu), BetaML.Nn.DenseLayer([0.6565120540082393 0.7139211611842745 … 0.07809812467915389 -0.49346311403373844; -0.4544472987041656 0.6502667641568863 … 0.43634608676548214 0.7213049952968921; 0.41212264783075303 -0.21993289366360613 … 0.25365007887755064 -0.5664469566269569], [-0.6911986792747682, -0.2149343209329364, -0.6347727539063817], BetaML.Utils.relu, BetaML.Utils.drelu), BetaML.Nn.VectorFunctionLayer{0}(fill(NaN), 3, 3, BetaML.Utils.softmax, BetaML.Utils.dsoftmax, nothing)], \n loss = BetaML.Utils.crossentropy, \n dloss = BetaML.Utils.dcrossentropy, \n epochs = 100, \n batch_size = 32, \n opt_alg = BetaML.Nn.ADAM(BetaML.Nn.var\"#90#93\"(), 1.0, 0.9, 0.999, 1.0e-8, BetaML.Nn.Learnable[], BetaML.Nn.Learnable[]), \n shuffle = true, \n descr = \"\", \n cb = BetaML.Nn.fitting_info, \n categories = nothing, \n handle_unknown = \"error\", \n other_categories_name = nothing, \n rng = Random._GLOBAL_RNG())\n\njulia> mach = machine(model, X, y);\n\njulia> fit!(mach);\n\njulia> classes_est = predict(mach, X)\n150-element CategoricalDistributions.UnivariateFiniteVector{Multiclass{3}, String, UInt8, Float64}:\n UnivariateFinite{Multiclass{3}}(setosa=>0.575, versicolor=>0.213, virginica=>0.213)\n UnivariateFinite{Multiclass{3}}(setosa=>0.573, versicolor=>0.213, virginica=>0.213)\n ⋮\n UnivariateFinite{Multiclass{3}}(setosa=>0.236, versicolor=>0.236, virginica=>0.529)\n UnivariateFinite{Multiclass{3}}(setosa=>0.254, versicolor=>0.254, virginica=>0.492)","category":"page"},{"location":"models/GradientBoostingRegressor_MLJScikitLearnInterface/#GradientBoostingRegressor_MLJScikitLearnInterface","page":"GradientBoostingRegressor","title":"GradientBoostingRegressor","text":"","category":"section"},{"location":"models/GradientBoostingRegressor_MLJScikitLearnInterface/","page":"GradientBoostingRegressor","title":"GradientBoostingRegressor","text":"GradientBoostingRegressor","category":"page"},{"location":"models/GradientBoostingRegressor_MLJScikitLearnInterface/","page":"GradientBoostingRegressor","title":"GradientBoostingRegressor","text":"A model type for constructing a gradient boosting ensemble regression, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/GradientBoostingRegressor_MLJScikitLearnInterface/","page":"GradientBoostingRegressor","title":"GradientBoostingRegressor","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/GradientBoostingRegressor_MLJScikitLearnInterface/","page":"GradientBoostingRegressor","title":"GradientBoostingRegressor","text":"GradientBoostingRegressor = @load GradientBoostingRegressor pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/GradientBoostingRegressor_MLJScikitLearnInterface/","page":"GradientBoostingRegressor","title":"GradientBoostingRegressor","text":"Do model = GradientBoostingRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in GradientBoostingRegressor(loss=...).","category":"page"},{"location":"models/GradientBoostingRegressor_MLJScikitLearnInterface/","page":"GradientBoostingRegressor","title":"GradientBoostingRegressor","text":"This estimator builds an additive model in a forward stage-wise fashion; it allows for the optimization of arbitrary differentiable loss functions. In each stage a regression tree is fit on the negative gradient of the given loss function.","category":"page"},{"location":"models/GradientBoostingRegressor_MLJScikitLearnInterface/","page":"GradientBoostingRegressor","title":"GradientBoostingRegressor","text":"HistGradientBoostingRegressor is a much faster variant of this algorithm for intermediate datasets (n_samples >= 10_000).","category":"page"},{"location":"models/BayesianLDA_MultivariateStats/#BayesianLDA_MultivariateStats","page":"BayesianLDA","title":"BayesianLDA","text":"","category":"section"},{"location":"models/BayesianLDA_MultivariateStats/","page":"BayesianLDA","title":"BayesianLDA","text":"BayesianLDA","category":"page"},{"location":"models/BayesianLDA_MultivariateStats/","page":"BayesianLDA","title":"BayesianLDA","text":"A model type for constructing a Bayesian LDA model, based on MultivariateStats.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/BayesianLDA_MultivariateStats/","page":"BayesianLDA","title":"BayesianLDA","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/BayesianLDA_MultivariateStats/","page":"BayesianLDA","title":"BayesianLDA","text":"BayesianLDA = @load BayesianLDA pkg=MultivariateStats","category":"page"},{"location":"models/BayesianLDA_MultivariateStats/","page":"BayesianLDA","title":"BayesianLDA","text":"Do model = BayesianLDA() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in BayesianLDA(method=...).","category":"page"},{"location":"models/BayesianLDA_MultivariateStats/","page":"BayesianLDA","title":"BayesianLDA","text":"The Bayesian multiclass LDA algorithm learns a projection matrix as described in ordinary LDA. Predicted class posterior probability distributions are derived by applying Bayes' rule with a multivariate Gaussian class-conditional distribution. A prior class distribution can be specified by the user or inferred from training data class frequency.","category":"page"},{"location":"models/BayesianLDA_MultivariateStats/","page":"BayesianLDA","title":"BayesianLDA","text":"See also the package documentation. For more information about the algorithm, see Li, Zhu and Ogihara (2006): Using Discriminant Analysis for Multi-class Classification: An Experimental Investigation.","category":"page"},{"location":"models/BayesianLDA_MultivariateStats/#Training-data","page":"BayesianLDA","title":"Training data","text":"","category":"section"},{"location":"models/BayesianLDA_MultivariateStats/","page":"BayesianLDA","title":"BayesianLDA","text":"In MLJ or MLJBase, bind an instance model to data with","category":"page"},{"location":"models/BayesianLDA_MultivariateStats/","page":"BayesianLDA","title":"BayesianLDA","text":"mach = machine(model, X, y)","category":"page"},{"location":"models/BayesianLDA_MultivariateStats/","page":"BayesianLDA","title":"BayesianLDA","text":"Here:","category":"page"},{"location":"models/BayesianLDA_MultivariateStats/","page":"BayesianLDA","title":"BayesianLDA","text":"X is any table of input features (eg, a DataFrame) whose columns are of scitype Continuous; check column scitypes with schema(X).\ny is the target, which can be any AbstractVector whose element scitype is OrderedFactor or Multiclass; check the scitype with scitype(y)","category":"page"},{"location":"models/BayesianLDA_MultivariateStats/","page":"BayesianLDA","title":"BayesianLDA","text":"Train the machine using fit!(mach, rows=...).","category":"page"},{"location":"models/BayesianLDA_MultivariateStats/#Hyper-parameters","page":"BayesianLDA","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/BayesianLDA_MultivariateStats/","page":"BayesianLDA","title":"BayesianLDA","text":"method::Symbol=:gevd: choice of solver, one of :gevd or :whiten methods.\ncov_w::StatsBase.SimpleCovariance(): An estimator for the within-class covariance (used in computing the within-class scatter matrix, Sw). Any robust estimator from CovarianceEstimation.jl can be used.\ncov_b::StatsBase.SimpleCovariance(): The same as cov_w but for the between-class covariance (used in computing the between-class scatter matrix, Sb).\noutdim::Int=0: The output dimension, i.e., dimension of the transformed space, automatically set to min(indim, nclasses-1) if equal to 0.\nregcoef::Float64=1e-6: The regularization coefficient. A positive value regcoef*eigmax(Sw) where Sw is the within-class scatter matrix, is added to the diagonal of Sw to improve numerical stability. This can be useful if using the standard covariance estimator.\npriors::Union{Nothing, UnivariateFinite{<:Any, <:Any, <:Any, <:Real}, Dict{<:Any, <:Real}} = nothing: For use in prediction with Bayes rule. If priors = nothing then priors are estimated from the class proportions in the training data. Otherwise it requires a Dict or UnivariateFinite object specifying the classes with non-zero probabilities in the training target.","category":"page"},{"location":"models/BayesianLDA_MultivariateStats/#Operations","page":"BayesianLDA","title":"Operations","text":"","category":"section"},{"location":"models/BayesianLDA_MultivariateStats/","page":"BayesianLDA","title":"BayesianLDA","text":"transform(mach, Xnew): Return a lower dimensional projection of the input Xnew, which should have the same scitype as X above.\npredict(mach, Xnew): Return predictions of the target given features Xnew, which should have the same scitype as X above. Predictions are probabilistic but uncalibrated.\npredict_mode(mach, Xnew): Return the modes of the probabilistic predictions returned above.","category":"page"},{"location":"models/BayesianLDA_MultivariateStats/#Fitted-parameters","page":"BayesianLDA","title":"Fitted parameters","text":"","category":"section"},{"location":"models/BayesianLDA_MultivariateStats/","page":"BayesianLDA","title":"BayesianLDA","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/BayesianLDA_MultivariateStats/","page":"BayesianLDA","title":"BayesianLDA","text":"classes: The classes seen during model fitting.\nprojection_matrix: The learned projection matrix, of size (indim, outdim), where indim and outdim are the input and output dimensions respectively (See Report section below).\npriors: The class priors for classification. As inferred from training target y, if not user-specified. A UnivariateFinite object with levels consistent with levels(y).","category":"page"},{"location":"models/BayesianLDA_MultivariateStats/#Report","page":"BayesianLDA","title":"Report","text":"","category":"section"},{"location":"models/BayesianLDA_MultivariateStats/","page":"BayesianLDA","title":"BayesianLDA","text":"The fields of report(mach) are:","category":"page"},{"location":"models/BayesianLDA_MultivariateStats/","page":"BayesianLDA","title":"BayesianLDA","text":"indim: The dimension of the input space i.e the number of training features.\noutdim: The dimension of the transformed space the model is projected to.\nmean: The mean of the untransformed training data. A vector of length indim.\nnclasses: The number of classes directly observed in the training data (which can be less than the total number of classes in the class pool).\nclass_means: The class-specific means of the training data. A matrix of size (indim, nclasses) with the ith column being the class-mean of the ith class in classes (See fitted params section above).\nclass_weights: The weights (class counts) of each class. A vector of length nclasses with the ith element being the class weight of the ith class in classes. (See fitted params section above.)\nSb: The between class scatter matrix.\nSw: The within class scatter matrix.","category":"page"},{"location":"models/BayesianLDA_MultivariateStats/#Examples","page":"BayesianLDA","title":"Examples","text":"","category":"section"},{"location":"models/BayesianLDA_MultivariateStats/","page":"BayesianLDA","title":"BayesianLDA","text":"using MLJ\n\nBayesianLDA = @load BayesianLDA pkg=MultivariateStats\n\nX, y = @load_iris ## a table and a vector\n\nmodel = BayesianLDA()\nmach = machine(model, X, y) |> fit!\n\nXproj = transform(mach, X)\ny_hat = predict(mach, X)\nlabels = predict_mode(mach, X)","category":"page"},{"location":"models/BayesianLDA_MultivariateStats/","page":"BayesianLDA","title":"BayesianLDA","text":"See also LDA, SubspaceLDA, BayesianSubspaceLDA","category":"page"},{"location":"models/GaussianMixtureClusterer_BetaML/#GaussianMixtureClusterer_BetaML","page":"GaussianMixtureClusterer","title":"GaussianMixtureClusterer","text":"","category":"section"},{"location":"models/GaussianMixtureClusterer_BetaML/","page":"GaussianMixtureClusterer","title":"GaussianMixtureClusterer","text":"mutable struct GaussianMixtureClusterer <: MLJModelInterface.Unsupervised","category":"page"},{"location":"models/GaussianMixtureClusterer_BetaML/","page":"GaussianMixtureClusterer","title":"GaussianMixtureClusterer","text":"A Expectation-Maximisation clustering algorithm with customisable mixtures, from the Beta Machine Learning Toolkit (BetaML).","category":"page"},{"location":"models/GaussianMixtureClusterer_BetaML/#Hyperparameters:","page":"GaussianMixtureClusterer","title":"Hyperparameters:","text":"","category":"section"},{"location":"models/GaussianMixtureClusterer_BetaML/","page":"GaussianMixtureClusterer","title":"GaussianMixtureClusterer","text":"n_classes::Int64: Number of mixtures (latent classes) to consider [def: 3]\ninitial_probmixtures::AbstractVector{Float64}: Initial probabilities of the categorical distribution (n_classes x 1) [default: []]\nmixtures::Union{Type, Vector{<:BetaML.GMM.AbstractMixture}}: An array (of length n_classes) of the mixtures to employ (see the ?GMM module). Each mixture object can be provided with or without its parameters (e.g. mean and variance for the gaussian ones). Fully qualified mixtures are useful only if the initialisation_strategy parameter is set to \"gived\". This parameter can also be given symply in term of a type. In this case it is automatically extended to a vector of n_classes mixtures of the specified type. Note that mixing of different mixture types is not currently supported. [def: [DiagonalGaussian() for i in 1:n_classes]]\ntol::Float64: Tolerance to stop the algorithm [default: 10^(-6)]\nminimum_variance::Float64: Minimum variance for the mixtures [default: 0.05]\nminimum_covariance::Float64: Minimum covariance for the mixtures with full covariance matrix [default: 0]. This should be set different than minimum_variance (see notes).\ninitialisation_strategy::String: The computation method of the vector of the initial mixtures. One of the following:\n\"grid\": using a grid approach\n\"given\": using the mixture provided in the fully qualified mixtures parameter\n\"kmeans\": use first kmeans (itself initialised with a \"grid\" strategy) to set the initial mixture centers [default]\nNote that currently \"random\" and \"shuffle\" initialisations are not supported in gmm-based algorithms.\nmaximum_iterations::Int64: Maximum number of iterations [def: typemax(Int64), i.e. ∞]\nrng::Random.AbstractRNG: Random Number Generator [deafult: Random.GLOBAL_RNG]","category":"page"},{"location":"models/GaussianMixtureClusterer_BetaML/#Example:","page":"GaussianMixtureClusterer","title":"Example:","text":"","category":"section"},{"location":"models/GaussianMixtureClusterer_BetaML/","page":"GaussianMixtureClusterer","title":"GaussianMixtureClusterer","text":"\njulia> using MLJ\n\njulia> X, y = @load_iris;\n\njulia> modelType = @load GaussianMixtureClusterer pkg = \"BetaML\" verbosity=0\nBetaML.GMM.GaussianMixtureClusterer\n\njulia> model = modelType()\nGaussianMixtureClusterer(\n n_classes = 3, \n initial_probmixtures = Float64[], \n mixtures = BetaML.GMM.DiagonalGaussian{Float64}[BetaML.GMM.DiagonalGaussian{Float64}(nothing, nothing), BetaML.GMM.DiagonalGaussian{Float64}(nothing, nothing), BetaML.GMM.DiagonalGaussian{Float64}(nothing, nothing)], \n tol = 1.0e-6, \n minimum_variance = 0.05, \n minimum_covariance = 0.0, \n initialisation_strategy = \"kmeans\", \n maximum_iterations = 9223372036854775807, \n rng = Random._GLOBAL_RNG())\n\njulia> mach = machine(model, X);\n\njulia> fit!(mach);\n[ Info: Training machine(GaussianMixtureClusterer(n_classes = 3, …), …).\nIter. 1: Var. of the post 10.800150114964184 Log-likelihood -650.0186451891216\n\njulia> classes_est = predict(mach, X)\n150-element CategoricalDistributions.UnivariateFiniteVector{Multiclass{3}, Int64, UInt32, Float64}:\n UnivariateFinite{Multiclass{3}}(1=>1.0, 2=>4.17e-15, 3=>2.1900000000000003e-31)\n UnivariateFinite{Multiclass{3}}(1=>1.0, 2=>1.25e-13, 3=>5.87e-31)\n UnivariateFinite{Multiclass{3}}(1=>1.0, 2=>4.5e-15, 3=>1.55e-32)\n UnivariateFinite{Multiclass{3}}(1=>1.0, 2=>6.93e-14, 3=>3.37e-31)\n ⋮\n UnivariateFinite{Multiclass{3}}(1=>5.39e-25, 2=>0.0167, 3=>0.983)\n UnivariateFinite{Multiclass{3}}(1=>7.5e-29, 2=>0.000106, 3=>1.0)\n UnivariateFinite{Multiclass{3}}(1=>1.6e-20, 2=>0.594, 3=>0.406)","category":"page"},{"location":"models/RandomForestClassifier_BetaML/#RandomForestClassifier_BetaML","page":"RandomForestClassifier","title":"RandomForestClassifier","text":"","category":"section"},{"location":"models/RandomForestClassifier_BetaML/","page":"RandomForestClassifier","title":"RandomForestClassifier","text":"mutable struct RandomForestClassifier <: MLJModelInterface.Probabilistic","category":"page"},{"location":"models/RandomForestClassifier_BetaML/","page":"RandomForestClassifier","title":"RandomForestClassifier","text":"A simple Random Forest model for classification with support for Missing data, from the Beta Machine Learning Toolkit (BetaML).","category":"page"},{"location":"models/RandomForestClassifier_BetaML/#Hyperparameters:","page":"RandomForestClassifier","title":"Hyperparameters:","text":"","category":"section"},{"location":"models/RandomForestClassifier_BetaML/","page":"RandomForestClassifier","title":"RandomForestClassifier","text":"n_trees::Int64\nmax_depth::Int64: The maximum depth the tree is allowed to reach. When this is reached the node is forced to become a leaf [def: 0, i.e. no limits]\nmin_gain::Float64: The minimum information gain to allow for a node's partition [def: 0]\nmin_records::Int64: The minimum number of records a node must holds to consider for a partition of it [def: 2]\nmax_features::Int64: The maximum number of (random) features to consider at each partitioning [def: 0, i.e. square root of the data dimensions]\nsplitting_criterion::Function: This is the name of the function to be used to compute the information gain of a specific partition. This is done by measuring the difference betwwen the \"impurity\" of the labels of the parent node with those of the two child nodes, weighted by the respective number of items. [def: gini]. Either gini, entropy or a custom function. It can also be an anonymous function.\nβ::Float64: Parameter that regulate the weights of the scoring of each tree, to be (optionally) used in prediction based on the error of the individual trees computed on the records on which trees have not been trained. Higher values favour \"better\" trees, but too high values will cause overfitting [def: 0, i.e. uniform weigths]\nrng::Random.AbstractRNG: A Random Number Generator to be used in stochastic parts of the code [deafult: Random.GLOBAL_RNG]","category":"page"},{"location":"models/RandomForestClassifier_BetaML/#Example-:","page":"RandomForestClassifier","title":"Example :","text":"","category":"section"},{"location":"models/RandomForestClassifier_BetaML/","page":"RandomForestClassifier","title":"RandomForestClassifier","text":"julia> using MLJ\n\njulia> X, y = @load_iris;\n\njulia> modelType = @load RandomForestClassifier pkg = \"BetaML\" verbosity=0\nBetaML.Trees.RandomForestClassifier\n\njulia> model = modelType()\nRandomForestClassifier(\n n_trees = 30, \n max_depth = 0, \n min_gain = 0.0, \n min_records = 2, \n max_features = 0, \n splitting_criterion = BetaML.Utils.gini, \n β = 0.0, \n rng = Random._GLOBAL_RNG())\n\njulia> mach = machine(model, X, y);\n\njulia> fit!(mach);\n[ Info: Training machine(RandomForestClassifier(n_trees = 30, …), …).\n\njulia> cat_est = predict(mach, X)\n150-element CategoricalDistributions.UnivariateFiniteVector{Multiclass{3}, String, UInt32, Float64}:\n UnivariateFinite{Multiclass{3}}(setosa=>1.0, versicolor=>0.0, virginica=>0.0)\n UnivariateFinite{Multiclass{3}}(setosa=>1.0, versicolor=>0.0, virginica=>0.0)\n ⋮\n UnivariateFinite{Multiclass{3}}(setosa=>0.0, versicolor=>0.0, virginica=>1.0)\n UnivariateFinite{Multiclass{3}}(setosa=>0.0, versicolor=>0.0667, virginica=>0.933)","category":"page"},{"location":"models/DecisionTreeClassifier_DecisionTree/#DecisionTreeClassifier_DecisionTree","page":"DecisionTreeClassifier","title":"DecisionTreeClassifier","text":"","category":"section"},{"location":"models/DecisionTreeClassifier_DecisionTree/","page":"DecisionTreeClassifier","title":"DecisionTreeClassifier","text":"DecisionTreeClassifier","category":"page"},{"location":"models/DecisionTreeClassifier_DecisionTree/","page":"DecisionTreeClassifier","title":"DecisionTreeClassifier","text":"A model type for constructing a CART decision tree classifier, based on DecisionTree.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/DecisionTreeClassifier_DecisionTree/","page":"DecisionTreeClassifier","title":"DecisionTreeClassifier","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/DecisionTreeClassifier_DecisionTree/","page":"DecisionTreeClassifier","title":"DecisionTreeClassifier","text":"DecisionTreeClassifier = @load DecisionTreeClassifier pkg=DecisionTree","category":"page"},{"location":"models/DecisionTreeClassifier_DecisionTree/","page":"DecisionTreeClassifier","title":"DecisionTreeClassifier","text":"Do model = DecisionTreeClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in DecisionTreeClassifier(max_depth=...).","category":"page"},{"location":"models/DecisionTreeClassifier_DecisionTree/","page":"DecisionTreeClassifier","title":"DecisionTreeClassifier","text":"DecisionTreeClassifier implements the CART algorithm, originally published in Breiman, Leo; Friedman, J. H.; Olshen, R. A.; Stone, C. J. (1984): \"Classification and regression trees\". Monterey, CA: Wadsworth & Brooks/Cole Advanced Books & Software..","category":"page"},{"location":"models/DecisionTreeClassifier_DecisionTree/#Training-data","page":"DecisionTreeClassifier","title":"Training data","text":"","category":"section"},{"location":"models/DecisionTreeClassifier_DecisionTree/","page":"DecisionTreeClassifier","title":"DecisionTreeClassifier","text":"In MLJ or MLJBase, bind an instance model to data with","category":"page"},{"location":"models/DecisionTreeClassifier_DecisionTree/","page":"DecisionTreeClassifier","title":"DecisionTreeClassifier","text":"mach = machine(model, X, y)","category":"page"},{"location":"models/DecisionTreeClassifier_DecisionTree/","page":"DecisionTreeClassifier","title":"DecisionTreeClassifier","text":"where","category":"page"},{"location":"models/DecisionTreeClassifier_DecisionTree/","page":"DecisionTreeClassifier","title":"DecisionTreeClassifier","text":"X: any table of input features (eg, a DataFrame) whose columns each have one of the following element scitypes: Continuous, Count, or <:OrderedFactor; check column scitypes with schema(X)\ny: is the target, which can be any AbstractVector whose element scitype is <:OrderedFactor or <:Multiclass; check the scitype with scitype(y)","category":"page"},{"location":"models/DecisionTreeClassifier_DecisionTree/","page":"DecisionTreeClassifier","title":"DecisionTreeClassifier","text":"Train the machine using fit!(mach, rows=...).","category":"page"},{"location":"models/DecisionTreeClassifier_DecisionTree/#Hyperparameters","page":"DecisionTreeClassifier","title":"Hyperparameters","text":"","category":"section"},{"location":"models/DecisionTreeClassifier_DecisionTree/","page":"DecisionTreeClassifier","title":"DecisionTreeClassifier","text":"max_depth=-1: max depth of the decision tree (-1=any)\nmin_samples_leaf=1: max number of samples each leaf needs to have\nmin_samples_split=2: min number of samples needed for a split\nmin_purity_increase=0: min purity needed for a split\nn_subfeatures=0: number of features to select at random (0 for all)\npost_prune=false: set to true for post-fit pruning\nmerge_purity_threshold=1.0: (post-pruning) merge leaves having combined purity >= merge_purity_threshold\ndisplay_depth=5: max depth to show when displaying the tree\nfeature_importance: method to use for computing feature importances. One of (:impurity, :split)\nrng=Random.GLOBAL_RNG: random number generator or seed","category":"page"},{"location":"models/DecisionTreeClassifier_DecisionTree/#Operations","page":"DecisionTreeClassifier","title":"Operations","text":"","category":"section"},{"location":"models/DecisionTreeClassifier_DecisionTree/","page":"DecisionTreeClassifier","title":"DecisionTreeClassifier","text":"predict(mach, Xnew): return predictions of the target given features Xnew having the same scitype as X above. Predictions are probabilistic, but uncalibrated.\npredict_mode(mach, Xnew): instead return the mode of each prediction above.","category":"page"},{"location":"models/DecisionTreeClassifier_DecisionTree/#Fitted-parameters","page":"DecisionTreeClassifier","title":"Fitted parameters","text":"","category":"section"},{"location":"models/DecisionTreeClassifier_DecisionTree/","page":"DecisionTreeClassifier","title":"DecisionTreeClassifier","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/DecisionTreeClassifier_DecisionTree/","page":"DecisionTreeClassifier","title":"DecisionTreeClassifier","text":"raw_tree: the raw Node, Leaf or Root object returned by the core DecisionTree.jl algorithm\ntree: a visualizable, wrapped version of raw_tree implementing the AbstractTrees.jl interface; see \"Examples\" below\nencoding: dictionary of target classes keyed on integers used internally by DecisionTree.jl\nfeatures: the names of the features encountered in training, in an order consistent with the output of print_tree (see below)","category":"page"},{"location":"models/DecisionTreeClassifier_DecisionTree/#Report","page":"DecisionTreeClassifier","title":"Report","text":"","category":"section"},{"location":"models/DecisionTreeClassifier_DecisionTree/","page":"DecisionTreeClassifier","title":"DecisionTreeClassifier","text":"The fields of report(mach) are:","category":"page"},{"location":"models/DecisionTreeClassifier_DecisionTree/","page":"DecisionTreeClassifier","title":"DecisionTreeClassifier","text":"classes_seen: list of target classes actually observed in training\nprint_tree: alternative method to print the fitted tree, with single argument the tree depth; interpretation requires internal integer-class encoding (see \"Fitted parameters\" above).\nfeatures: the names of the features encountered in training, in an order consistent with the output of print_tree (see below)","category":"page"},{"location":"models/DecisionTreeClassifier_DecisionTree/#Accessor-functions","page":"DecisionTreeClassifier","title":"Accessor functions","text":"","category":"section"},{"location":"models/DecisionTreeClassifier_DecisionTree/","page":"DecisionTreeClassifier","title":"DecisionTreeClassifier","text":"feature_importances(mach) returns a vector of (feature::Symbol => importance) pairs; the type of importance is determined by the hyperparameter feature_importance (see above)","category":"page"},{"location":"models/DecisionTreeClassifier_DecisionTree/#Examples","page":"DecisionTreeClassifier","title":"Examples","text":"","category":"section"},{"location":"models/DecisionTreeClassifier_DecisionTree/","page":"DecisionTreeClassifier","title":"DecisionTreeClassifier","text":"using MLJ\nDecisionTreeClassifier = @load DecisionTreeClassifier pkg=DecisionTree\nmodel = DecisionTreeClassifier(max_depth=3, min_samples_split=3)\n\nX, y = @load_iris\nmach = machine(model, X, y) |> fit!\n\nXnew = (sepal_length = [6.4, 7.2, 7.4],\n sepal_width = [2.8, 3.0, 2.8],\n petal_length = [5.6, 5.8, 6.1],\n petal_width = [2.1, 1.6, 1.9],)\nyhat = predict(mach, Xnew) ## probabilistic predictions\npredict_mode(mach, Xnew) ## point predictions\npdf.(yhat, \"virginica\") ## probabilities for the \"verginica\" class\n\njulia> tree = fitted_params(mach).tree\npetal_length < 2.45\n├─ setosa (50/50)\n└─ petal_width < 1.75\n ├─ petal_length < 4.95\n │ ├─ versicolor (47/48)\n │ └─ virginica (4/6)\n └─ petal_length < 4.85\n ├─ virginica (2/3)\n └─ virginica (43/43)\n\nusing Plots, TreeRecipe\nplot(tree) ## for a graphical representation of the tree\n\nfeature_importances(mach)","category":"page"},{"location":"models/DecisionTreeClassifier_DecisionTree/","page":"DecisionTreeClassifier","title":"DecisionTreeClassifier","text":"See also DecisionTree.jl and the unwrapped model type MLJDecisionTreeInterface.DecisionTree.DecisionTreeClassifier.","category":"page"},{"location":"models/DNNDetector_OutlierDetectionNeighbors/#DNNDetector_OutlierDetectionNeighbors","page":"DNNDetector","title":"DNNDetector","text":"","category":"section"},{"location":"models/DNNDetector_OutlierDetectionNeighbors/","page":"DNNDetector","title":"DNNDetector","text":"DNNDetector(d = 0,\n metric = Euclidean(),\n algorithm = :kdtree,\n leafsize = 10,\n reorder = true,\n parallel = false)","category":"page"},{"location":"models/DNNDetector_OutlierDetectionNeighbors/","page":"DNNDetector","title":"DNNDetector","text":"Anomaly score based on the number of neighbors in a hypersphere of radius d. Knorr et al. [1] directly converted the resulting outlier scores to labels, thus this implementation does not fully reflect the approach from the paper.","category":"page"},{"location":"models/DNNDetector_OutlierDetectionNeighbors/#Parameters","page":"DNNDetector","title":"Parameters","text":"","category":"section"},{"location":"models/DNNDetector_OutlierDetectionNeighbors/","page":"DNNDetector","title":"DNNDetector","text":"d::Real","category":"page"},{"location":"models/DNNDetector_OutlierDetectionNeighbors/","page":"DNNDetector","title":"DNNDetector","text":"The hypersphere radius used to calculate the global density of an instance.","category":"page"},{"location":"models/DNNDetector_OutlierDetectionNeighbors/","page":"DNNDetector","title":"DNNDetector","text":"metric::Metric","category":"page"},{"location":"models/DNNDetector_OutlierDetectionNeighbors/","page":"DNNDetector","title":"DNNDetector","text":"This is one of the Metric types defined in the Distances.jl package. It is possible to define your own metrics by creating new types that are subtypes of Metric.","category":"page"},{"location":"models/DNNDetector_OutlierDetectionNeighbors/","page":"DNNDetector","title":"DNNDetector","text":"algorithm::Symbol","category":"page"},{"location":"models/DNNDetector_OutlierDetectionNeighbors/","page":"DNNDetector","title":"DNNDetector","text":"One of (:kdtree, :balltree). In a kdtree, points are recursively split into groups using hyper-planes. Therefore a KDTree only works with axis aligned metrics which are: Euclidean, Chebyshev, Minkowski and Cityblock. A brutetree linearly searches all points in a brute force fashion and works with any Metric. A balltree recursively splits points into groups bounded by hyper-spheres and works with any Metric.","category":"page"},{"location":"models/DNNDetector_OutlierDetectionNeighbors/","page":"DNNDetector","title":"DNNDetector","text":"static::Union{Bool, Symbol}","category":"page"},{"location":"models/DNNDetector_OutlierDetectionNeighbors/","page":"DNNDetector","title":"DNNDetector","text":"One of (true, false, :auto). Whether the input data for fitting and transform should be statically or dynamically allocated. If true, the data is statically allocated. If false, the data is dynamically allocated. If :auto, the data is dynamically allocated if the product of all dimensions except the last is greater than 100.","category":"page"},{"location":"models/DNNDetector_OutlierDetectionNeighbors/","page":"DNNDetector","title":"DNNDetector","text":"leafsize::Int","category":"page"},{"location":"models/DNNDetector_OutlierDetectionNeighbors/","page":"DNNDetector","title":"DNNDetector","text":"Determines at what number of points to stop splitting the tree further. There is a trade-off between traversing the tree and having to evaluate the metric function for increasing number of points.","category":"page"},{"location":"models/DNNDetector_OutlierDetectionNeighbors/","page":"DNNDetector","title":"DNNDetector","text":"reorder::Bool","category":"page"},{"location":"models/DNNDetector_OutlierDetectionNeighbors/","page":"DNNDetector","title":"DNNDetector","text":"While building the tree this will put points close in distance close in memory since this helps with cache locality. In this case, a copy of the original data will be made so that the original data is left unmodified. This can have a significant impact on performance and is by default set to true.","category":"page"},{"location":"models/DNNDetector_OutlierDetectionNeighbors/","page":"DNNDetector","title":"DNNDetector","text":"parallel::Bool","category":"page"},{"location":"models/DNNDetector_OutlierDetectionNeighbors/","page":"DNNDetector","title":"DNNDetector","text":"Parallelize score and predict using all threads available. The number of threads can be set with the JULIA_NUM_THREADS environment variable. Note: fit is not parallel.","category":"page"},{"location":"models/DNNDetector_OutlierDetectionNeighbors/#Examples","page":"DNNDetector","title":"Examples","text":"","category":"section"},{"location":"models/DNNDetector_OutlierDetectionNeighbors/","page":"DNNDetector","title":"DNNDetector","text":"using OutlierDetection: DNNDetector, fit, transform\ndetector = DNNDetector()\nX = rand(10, 100)\nmodel, result = fit(detector, X; verbosity=0)\ntest_scores = transform(detector, model, X)","category":"page"},{"location":"models/DNNDetector_OutlierDetectionNeighbors/#References","page":"DNNDetector","title":"References","text":"","category":"section"},{"location":"models/DNNDetector_OutlierDetectionNeighbors/","page":"DNNDetector","title":"DNNDetector","text":"[1] Knorr, Edwin M.; Ng, Raymond T. (1998): Algorithms for Mining Distance-Based Outliers in Large Datasets.","category":"page"},{"location":"models/RidgeRegressor_MultivariateStats/#RidgeRegressor_MultivariateStats","page":"RidgeRegressor","title":"RidgeRegressor","text":"","category":"section"},{"location":"models/RidgeRegressor_MultivariateStats/","page":"RidgeRegressor","title":"RidgeRegressor","text":"RidgeRegressor","category":"page"},{"location":"models/RidgeRegressor_MultivariateStats/","page":"RidgeRegressor","title":"RidgeRegressor","text":"A model type for constructing a ridge regressor, based on MultivariateStats.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/RidgeRegressor_MultivariateStats/","page":"RidgeRegressor","title":"RidgeRegressor","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/RidgeRegressor_MultivariateStats/","page":"RidgeRegressor","title":"RidgeRegressor","text":"RidgeRegressor = @load RidgeRegressor pkg=MultivariateStats","category":"page"},{"location":"models/RidgeRegressor_MultivariateStats/","page":"RidgeRegressor","title":"RidgeRegressor","text":"Do model = RidgeRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in RidgeRegressor(lambda=...).","category":"page"},{"location":"models/RidgeRegressor_MultivariateStats/","page":"RidgeRegressor","title":"RidgeRegressor","text":"RidgeRegressor adds a quadratic penalty term to least squares regression, for regularization. Ridge regression is particularly useful in the case of multicollinearity. Options exist to specify a bias term, and to adjust the strength of the penalty term.","category":"page"},{"location":"models/RidgeRegressor_MultivariateStats/#Training-data","page":"RidgeRegressor","title":"Training data","text":"","category":"section"},{"location":"models/RidgeRegressor_MultivariateStats/","page":"RidgeRegressor","title":"RidgeRegressor","text":"In MLJ or MLJBase, bind an instance model to data with","category":"page"},{"location":"models/RidgeRegressor_MultivariateStats/","page":"RidgeRegressor","title":"RidgeRegressor","text":"mach = machine(model, X, y)","category":"page"},{"location":"models/RidgeRegressor_MultivariateStats/","page":"RidgeRegressor","title":"RidgeRegressor","text":"Here:","category":"page"},{"location":"models/RidgeRegressor_MultivariateStats/","page":"RidgeRegressor","title":"RidgeRegressor","text":"X is any table of input features (eg, a DataFrame) whose columns are of scitype Continuous; check column scitypes with schema(X).\ny is the target, which can be any AbstractVector whose element scitype is Continuous; check the scitype with scitype(y)","category":"page"},{"location":"models/RidgeRegressor_MultivariateStats/","page":"RidgeRegressor","title":"RidgeRegressor","text":"Train the machine using fit!(mach, rows=...).","category":"page"},{"location":"models/RidgeRegressor_MultivariateStats/#Hyper-parameters","page":"RidgeRegressor","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/RidgeRegressor_MultivariateStats/","page":"RidgeRegressor","title":"RidgeRegressor","text":"lambda=1.0: Is the non-negative parameter for the regularization strength. If lambda is 0, ridge regression is equivalent to linear least squares regression, and as lambda approaches infinity, all the linear coefficients approach 0.\nbias=true: Include the bias term if true, otherwise fit without bias term.","category":"page"},{"location":"models/RidgeRegressor_MultivariateStats/#Operations","page":"RidgeRegressor","title":"Operations","text":"","category":"section"},{"location":"models/RidgeRegressor_MultivariateStats/","page":"RidgeRegressor","title":"RidgeRegressor","text":"predict(mach, Xnew): Return predictions of the target given new features Xnew, which should have the same scitype as X above.","category":"page"},{"location":"models/RidgeRegressor_MultivariateStats/#Fitted-parameters","page":"RidgeRegressor","title":"Fitted parameters","text":"","category":"section"},{"location":"models/RidgeRegressor_MultivariateStats/","page":"RidgeRegressor","title":"RidgeRegressor","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/RidgeRegressor_MultivariateStats/","page":"RidgeRegressor","title":"RidgeRegressor","text":"coefficients: The linear coefficients determined by the model.\nintercept: The intercept determined by the model.","category":"page"},{"location":"models/RidgeRegressor_MultivariateStats/#Examples","page":"RidgeRegressor","title":"Examples","text":"","category":"section"},{"location":"models/RidgeRegressor_MultivariateStats/","page":"RidgeRegressor","title":"RidgeRegressor","text":"using MLJ\n\nRidgeRegressor = @load RidgeRegressor pkg=MultivariateStats\npipe = Standardizer() |> RidgeRegressor(lambda=10)\n\nX, y = @load_boston\n\nmach = machine(pipe, X, y) |> fit!\nyhat = predict(mach, X)\ntraining_error = l1(yhat, y) |> mean","category":"page"},{"location":"models/RidgeRegressor_MultivariateStats/","page":"RidgeRegressor","title":"RidgeRegressor","text":"See also LinearRegressor, MultitargetLinearRegressor, MultitargetRidgeRegressor","category":"page"},{"location":"models/GradientBoostingClassifier_MLJScikitLearnInterface/#GradientBoostingClassifier_MLJScikitLearnInterface","page":"GradientBoostingClassifier","title":"GradientBoostingClassifier","text":"","category":"section"},{"location":"models/GradientBoostingClassifier_MLJScikitLearnInterface/","page":"GradientBoostingClassifier","title":"GradientBoostingClassifier","text":"GradientBoostingClassifier","category":"page"},{"location":"models/GradientBoostingClassifier_MLJScikitLearnInterface/","page":"GradientBoostingClassifier","title":"GradientBoostingClassifier","text":"A model type for constructing a gradient boosting classifier, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/GradientBoostingClassifier_MLJScikitLearnInterface/","page":"GradientBoostingClassifier","title":"GradientBoostingClassifier","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/GradientBoostingClassifier_MLJScikitLearnInterface/","page":"GradientBoostingClassifier","title":"GradientBoostingClassifier","text":"GradientBoostingClassifier = @load GradientBoostingClassifier pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/GradientBoostingClassifier_MLJScikitLearnInterface/","page":"GradientBoostingClassifier","title":"GradientBoostingClassifier","text":"Do model = GradientBoostingClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in GradientBoostingClassifier(loss=...).","category":"page"},{"location":"models/GradientBoostingClassifier_MLJScikitLearnInterface/","page":"GradientBoostingClassifier","title":"GradientBoostingClassifier","text":"This algorithm builds an additive model in a forward stage-wise fashion; it allows for the optimization of arbitrary differentiable loss functions. In each stage n_classes_ regression trees are fit on the negative gradient of the loss function, e.g. binary or multiclass log loss. Binary classification is a special case where only a single regression tree is induced.","category":"page"},{"location":"models/GradientBoostingClassifier_MLJScikitLearnInterface/","page":"GradientBoostingClassifier","title":"GradientBoostingClassifier","text":"HistGradientBoostingClassifier is a much faster variant of this algorithm for intermediate datasets (n_samples >= 10_000).","category":"page"},{"location":"models/SGDClassifier_MLJScikitLearnInterface/#SGDClassifier_MLJScikitLearnInterface","page":"SGDClassifier","title":"SGDClassifier","text":"","category":"section"},{"location":"models/SGDClassifier_MLJScikitLearnInterface/","page":"SGDClassifier","title":"SGDClassifier","text":"SGDClassifier","category":"page"},{"location":"models/SGDClassifier_MLJScikitLearnInterface/","page":"SGDClassifier","title":"SGDClassifier","text":"A model type for constructing a sgd classifier, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/SGDClassifier_MLJScikitLearnInterface/","page":"SGDClassifier","title":"SGDClassifier","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/SGDClassifier_MLJScikitLearnInterface/","page":"SGDClassifier","title":"SGDClassifier","text":"SGDClassifier = @load SGDClassifier pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/SGDClassifier_MLJScikitLearnInterface/","page":"SGDClassifier","title":"SGDClassifier","text":"Do model = SGDClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in SGDClassifier(loss=...).","category":"page"},{"location":"models/SGDClassifier_MLJScikitLearnInterface/#Hyper-parameters","page":"SGDClassifier","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/SGDClassifier_MLJScikitLearnInterface/","page":"SGDClassifier","title":"SGDClassifier","text":"loss = hinge\npenalty = l2\nalpha = 0.0001\nl1_ratio = 0.15\nfit_intercept = true\nmax_iter = 1000\ntol = 0.001\nshuffle = true\nverbose = 0\nepsilon = 0.1\nn_jobs = nothing\nrandom_state = nothing\nlearning_rate = optimal\neta0 = 0.0\npower_t = 0.5\nearly_stopping = false\nvalidation_fraction = 0.1\nn_iter_no_change = 5\nclass_weight = nothing\nwarm_start = false\naverage = false","category":"page"},{"location":"models/PCA_MultivariateStats/#PCA_MultivariateStats","page":"PCA","title":"PCA","text":"","category":"section"},{"location":"models/PCA_MultivariateStats/","page":"PCA","title":"PCA","text":"PCA","category":"page"},{"location":"models/PCA_MultivariateStats/","page":"PCA","title":"PCA","text":"A model type for constructing a pca, based on MultivariateStats.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/PCA_MultivariateStats/","page":"PCA","title":"PCA","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/PCA_MultivariateStats/","page":"PCA","title":"PCA","text":"PCA = @load PCA pkg=MultivariateStats","category":"page"},{"location":"models/PCA_MultivariateStats/","page":"PCA","title":"PCA","text":"Do model = PCA() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in PCA(maxoutdim=...).","category":"page"},{"location":"models/PCA_MultivariateStats/","page":"PCA","title":"PCA","text":"Principal component analysis learns a linear projection onto a lower dimensional space while preserving most of the initial variance seen in the training data.","category":"page"},{"location":"models/PCA_MultivariateStats/#Training-data","page":"PCA","title":"Training data","text":"","category":"section"},{"location":"models/PCA_MultivariateStats/","page":"PCA","title":"PCA","text":"In MLJ or MLJBase, bind an instance model to data with","category":"page"},{"location":"models/PCA_MultivariateStats/","page":"PCA","title":"PCA","text":"mach = machine(model, X)","category":"page"},{"location":"models/PCA_MultivariateStats/","page":"PCA","title":"PCA","text":"Here:","category":"page"},{"location":"models/PCA_MultivariateStats/","page":"PCA","title":"PCA","text":"X is any table of input features (eg, a DataFrame) whose columns are of scitype Continuous; check column scitypes with schema(X).","category":"page"},{"location":"models/PCA_MultivariateStats/","page":"PCA","title":"PCA","text":"Train the machine using fit!(mach, rows=...).","category":"page"},{"location":"models/PCA_MultivariateStats/#Hyper-parameters","page":"PCA","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/PCA_MultivariateStats/","page":"PCA","title":"PCA","text":"maxoutdim=0: Together with variance_ratio, controls the output dimension outdim chosen by the model. Specifically, suppose that k is the smallest integer such that retaining the k most significant principal components accounts for variance_ratio of the total variance in the training data. Then outdim = min(outdim, maxoutdim). If maxoutdim=0 (default) then the effective maxoutdim is min(n, indim - 1) where n is the number of observations and indim the number of features in the training data.\nvariance_ratio::Float64=0.99: The ratio of variance preserved after the transformation\nmethod=:auto: The method to use to solve the problem. Choices are\n:svd: Support Vector Decomposition of the matrix.\n:cov: Covariance matrix decomposition.\n:auto: Use :cov if the matrices first dimension is smaller than its second dimension and otherwise use :svd\nmean=nothing: if nothing, centering will be computed and applied, if set to 0 no centering (data is assumed pre-centered); if a vector is passed, the centering is done with that vector.","category":"page"},{"location":"models/PCA_MultivariateStats/#Operations","page":"PCA","title":"Operations","text":"","category":"section"},{"location":"models/PCA_MultivariateStats/","page":"PCA","title":"PCA","text":"transform(mach, Xnew): Return a lower dimensional projection of the input Xnew, which should have the same scitype as X above.\ninverse_transform(mach, Xsmall): For a dimension-reduced table Xsmall, such as returned by transform, reconstruct a table, having same the number of columns as the original training data X, that transforms to Xsmall. Mathematically, inverse_transform is a right-inverse for the PCA projection map, whose image is orthogonal to the kernel of that map. In particular, if Xsmall = transform(mach, Xnew), then inverse_transform(Xsmall) is only an approximation to Xnew.","category":"page"},{"location":"models/PCA_MultivariateStats/#Fitted-parameters","page":"PCA","title":"Fitted parameters","text":"","category":"section"},{"location":"models/PCA_MultivariateStats/","page":"PCA","title":"PCA","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/PCA_MultivariateStats/","page":"PCA","title":"PCA","text":"projection: Returns the projection matrix, which has size (indim, outdim), where indim and outdim are the number of features of the input and output respectively.","category":"page"},{"location":"models/PCA_MultivariateStats/#Report","page":"PCA","title":"Report","text":"","category":"section"},{"location":"models/PCA_MultivariateStats/","page":"PCA","title":"PCA","text":"The fields of report(mach) are:","category":"page"},{"location":"models/PCA_MultivariateStats/","page":"PCA","title":"PCA","text":"indim: Dimension (number of columns) of the training data and new data to be transformed.\noutdim = min(n, indim, maxoutdim) is the output dimension; here n is the number of observations.\ntprincipalvar: Total variance of the principal components.\ntresidualvar: Total residual variance.\ntvar: Total observation variance (principal + residual variance).\nmean: The mean of the untransformed training data, of length indim.\nprincipalvars: The variance of the principal components. An AbstractVector of length outdim\nloadings: The models loadings, weights for each variable used when calculating principal components. A matrix of size (indim, outdim) where indim and outdim are as defined above.","category":"page"},{"location":"models/PCA_MultivariateStats/#Examples","page":"PCA","title":"Examples","text":"","category":"section"},{"location":"models/PCA_MultivariateStats/","page":"PCA","title":"PCA","text":"using MLJ\n\nPCA = @load PCA pkg=MultivariateStats\n\nX, y = @load_iris ## a table and a vector\n\nmodel = PCA(maxoutdim=2)\nmach = machine(model, X) |> fit!\n\nXproj = transform(mach, X)","category":"page"},{"location":"models/PCA_MultivariateStats/","page":"PCA","title":"PCA","text":"See also KernelPCA, ICA, FactorAnalysis, PPCA","category":"page"},{"location":"models/ENNUndersampler_Imbalance/#ENNUndersampler_Imbalance","page":"ENNUndersampler","title":"ENNUndersampler","text":"","category":"section"},{"location":"models/ENNUndersampler_Imbalance/","page":"ENNUndersampler","title":"ENNUndersampler","text":"Initiate a ENN undersampling model with the given hyper-parameters.","category":"page"},{"location":"models/ENNUndersampler_Imbalance/","page":"ENNUndersampler","title":"ENNUndersampler","text":"ENNUndersampler","category":"page"},{"location":"models/ENNUndersampler_Imbalance/","page":"ENNUndersampler","title":"ENNUndersampler","text":"A model type for constructing a enn undersampler, based on Imbalance.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/ENNUndersampler_Imbalance/","page":"ENNUndersampler","title":"ENNUndersampler","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/ENNUndersampler_Imbalance/","page":"ENNUndersampler","title":"ENNUndersampler","text":"ENNUndersampler = @load ENNUndersampler pkg=Imbalance","category":"page"},{"location":"models/ENNUndersampler_Imbalance/","page":"ENNUndersampler","title":"ENNUndersampler","text":"Do model = ENNUndersampler() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in ENNUndersampler(k=...).","category":"page"},{"location":"models/ENNUndersampler_Imbalance/","page":"ENNUndersampler","title":"ENNUndersampler","text":"ENNUndersampler undersamples a dataset by removing (\"cleaning\") points that violate a certain condition such as having a different class compared to the majority of the neighbors as proposed in Dennis L Wilson. Asymptotic properties of nearest neighbor rules using edited data. IEEE Transactions on Systems, Man, and Cybernetics, pages 408–421, 1972.","category":"page"},{"location":"models/ENNUndersampler_Imbalance/#Training-data","page":"ENNUndersampler","title":"Training data","text":"","category":"section"},{"location":"models/ENNUndersampler_Imbalance/","page":"ENNUndersampler","title":"ENNUndersampler","text":"In MLJ or MLJBase, wrap the model in a machine by \tmach = machine(model)","category":"page"},{"location":"models/ENNUndersampler_Imbalance/","page":"ENNUndersampler","title":"ENNUndersampler","text":"There is no need to provide any data here because the model is a static transformer.","category":"page"},{"location":"models/ENNUndersampler_Imbalance/","page":"ENNUndersampler","title":"ENNUndersampler","text":"Likewise, there is no need to fit!(mach). ","category":"page"},{"location":"models/ENNUndersampler_Imbalance/","page":"ENNUndersampler","title":"ENNUndersampler","text":"For default values of the hyper-parameters, model can be constructed by \tmodel = ENNUndersampler()","category":"page"},{"location":"models/ENNUndersampler_Imbalance/#Hyperparameters","page":"ENNUndersampler","title":"Hyperparameters","text":"","category":"section"},{"location":"models/ENNUndersampler_Imbalance/","page":"ENNUndersampler","title":"ENNUndersampler","text":"k::Integer=5: Number of nearest neighbors to consider in the algorithm. Should be within the range 0 < k < n where n is the number of observations in the smallest class. It will be automatically set to m-1 for any class with m points where m ≤ k.\nkeep_condition::AbstractString=\"mode\": The condition that leads to cleaning a point upon violation. Takes one of \"exists\", \"mode\", \"only mode\" and \"all\"","category":"page"},{"location":"models/ENNUndersampler_Imbalance/","page":"ENNUndersampler","title":"ENNUndersampler","text":"- `\"exists\"`: the point has at least one neighbor from the same class\n- `\"mode\"`: the class of the point is one of the most frequent classes of the neighbors (there may be many)\n- `\"only mode\"`: the class of the point is the single most frequent class of the neighbors\n- `\"all\"`: the class of the point is the same as all the neighbors","category":"page"},{"location":"models/ENNUndersampler_Imbalance/","page":"ENNUndersampler","title":"ENNUndersampler","text":"min_ratios=1.0: A parameter that controls the maximum amount of undersampling to be done for each class. If this algorithm cleans the data to an extent that this is violated, some of the cleaned points will be revived randomly so that it is satisfied.\nCan be a float and in this case each class will be at most undersampled to the size of the minority class times the float. By default, all classes are undersampled to the size of the minority class\nCan be a dictionary mapping each class label to the float minimum ratio for that class\nforce_min_ratios=false: If true, and this algorithm cleans the data such that the ratios for each class exceed those specified in min_ratios then further undersampling will be perform so that the final ratios are equal to min_ratios.\nrng::Union{AbstractRNG, Integer}=default_rng(): Either an AbstractRNG object or an Integer seed to be used with Xoshiro if the Julia VERSION supports it. Otherwise, uses MersenneTwister`.\ntry_preserve_type::Bool=true: When true, the function will try to not change the type of the input table (e.g., DataFrame). However, for some tables, this may not succeed, and in this case, the table returned will be a column table (named-tuple of vectors). This parameter is ignored if the input is a matrix.","category":"page"},{"location":"models/ENNUndersampler_Imbalance/#Transform-Inputs","page":"ENNUndersampler","title":"Transform Inputs","text":"","category":"section"},{"location":"models/ENNUndersampler_Imbalance/","page":"ENNUndersampler","title":"ENNUndersampler","text":"X: A matrix or table of floats where each row is an observation from the dataset\ny: An abstract vector of labels (e.g., strings) that correspond to the observations in X","category":"page"},{"location":"models/ENNUndersampler_Imbalance/#Transform-Outputs","page":"ENNUndersampler","title":"Transform Outputs","text":"","category":"section"},{"location":"models/ENNUndersampler_Imbalance/","page":"ENNUndersampler","title":"ENNUndersampler","text":"X_under: A matrix or table that includes the data after undersampling depending on whether the input X is a matrix or table respectively\ny_under: An abstract vector of labels corresponding to X_under","category":"page"},{"location":"models/ENNUndersampler_Imbalance/#Operations","page":"ENNUndersampler","title":"Operations","text":"","category":"section"},{"location":"models/ENNUndersampler_Imbalance/","page":"ENNUndersampler","title":"ENNUndersampler","text":"transform(mach, X, y): resample the data X and y using ENNUndersampler, returning the undersampled versions","category":"page"},{"location":"models/ENNUndersampler_Imbalance/#Example","page":"ENNUndersampler","title":"Example","text":"","category":"section"},{"location":"models/ENNUndersampler_Imbalance/","page":"ENNUndersampler","title":"ENNUndersampler","text":"using MLJ\nimport Imbalance\n\n## set probability of each class\nclass_probs = [0.5, 0.2, 0.3] \nnum_rows, num_continuous_feats = 100, 5\n## generate a table and categorical vector accordingly\nX, y = Imbalance.generate_imbalanced_data(num_rows, num_continuous_feats; \n min_sep=0.01, stds=[3.0 3.0 3.0], class_probs, rng=42) \n\njulia> Imbalance.checkbalance(y; ref=\"minority\")\n1: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 19 (100.0%) \n2: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 33 (173.7%) \n0: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 48 (252.6%) \n\n## load ENN model type:\nENNUndersampler = @load ENNUndersampler pkg=Imbalance\n\n## underample the majority classes to sizes relative to the minority class:\nundersampler = ENNUndersampler(min_ratios=0.5, rng=42)\nmach = machine(undersampler)\nX_under, y_under = transform(mach, X, y)\n\njulia> Imbalance.checkbalance(y_under; ref=\"minority\")\n2: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 10 (100.0%) \n1: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 10 (100.0%) \n0: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 24 (240.0%) ","category":"page"},{"location":"acceleration_and_parallelism/#Acceleration-and-Parallelism","page":"Acceleration and Parallelism","title":"Acceleration and Parallelism","text":"","category":"section"},{"location":"acceleration_and_parallelism/#User-facing-interface","page":"Acceleration and Parallelism","title":"User-facing interface","text":"","category":"section"},{"location":"acceleration_and_parallelism/","page":"Acceleration and Parallelism","title":"Acceleration and Parallelism","text":"To enable composable, extensible acceleration of core MLJ methods, ComputationalResources.jl is utilized to provide some basic types and functions to make implementing acceleration easy. However, ambitious users or package authors have the option to define their own types to be passed as resources to acceleration, which must be <:ComputationalResources.AbstractResource.","category":"page"},{"location":"acceleration_and_parallelism/","page":"Acceleration and Parallelism","title":"Acceleration and Parallelism","text":"Methods which support some form of acceleration support the acceleration keyword argument, which can be passed a \"resource\" from ComputationalResources. For example, passing acceleration=CPUProcesses() will utilize Distributed's multiprocessing functionality to accelerate the computation, while acceleration=CPUThreads() will use Julia's PARTR threading model to perform acceleration.","category":"page"},{"location":"acceleration_and_parallelism/","page":"Acceleration and Parallelism","title":"Acceleration and Parallelism","text":"The default computational resource is CPU1(), which is simply serial processing via CPU. The default resource can be changed as in this example: MLJ.default_resource(CPUProcesses()). The argument must always have type <:ComputationalResource.AbstractResource. To inspect the current default, use MLJ.default_resource().","category":"page"},{"location":"acceleration_and_parallelism/","page":"Acceleration and Parallelism","title":"Acceleration and Parallelism","text":"note: Note\nYou cannot use CPUThreads() with models wrapping python code.","category":"page"},{"location":"models/MiniBatchKMeans_MLJScikitLearnInterface/#MiniBatchKMeans_MLJScikitLearnInterface","page":"MiniBatchKMeans","title":"MiniBatchKMeans","text":"","category":"section"},{"location":"models/MiniBatchKMeans_MLJScikitLearnInterface/","page":"MiniBatchKMeans","title":"MiniBatchKMeans","text":"MiniBatchKMeans","category":"page"},{"location":"models/MiniBatchKMeans_MLJScikitLearnInterface/","page":"MiniBatchKMeans","title":"MiniBatchKMeans","text":"A model type for constructing a Mini-Batch K-Means clustering., based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/MiniBatchKMeans_MLJScikitLearnInterface/","page":"MiniBatchKMeans","title":"MiniBatchKMeans","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/MiniBatchKMeans_MLJScikitLearnInterface/","page":"MiniBatchKMeans","title":"MiniBatchKMeans","text":"MiniBatchKMeans = @load MiniBatchKMeans pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/MiniBatchKMeans_MLJScikitLearnInterface/","page":"MiniBatchKMeans","title":"MiniBatchKMeans","text":"Do model = MiniBatchKMeans() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in MiniBatchKMeans(n_clusters=...).","category":"page"},{"location":"models/MiniBatchKMeans_MLJScikitLearnInterface/#Hyper-parameters","page":"MiniBatchKMeans","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/MiniBatchKMeans_MLJScikitLearnInterface/","page":"MiniBatchKMeans","title":"MiniBatchKMeans","text":"n_clusters = 8\nmax_iter = 100\nbatch_size = 100\nverbose = 0\ncompute_labels = true\nrandom_state = nothing\ntol = 0.0\nmax_no_improvement = 10\ninit_size = nothing\nn_init = 3\ninit = k-means++\nreassignment_ratio = 0.01","category":"page"},{"location":"models/TomekUndersampler_Imbalance/#TomekUndersampler_Imbalance","page":"TomekUndersampler","title":"TomekUndersampler","text":"","category":"section"},{"location":"models/TomekUndersampler_Imbalance/","page":"TomekUndersampler","title":"TomekUndersampler","text":"Initiate a tomek undersampling model with the given hyper-parameters.","category":"page"},{"location":"models/TomekUndersampler_Imbalance/","page":"TomekUndersampler","title":"TomekUndersampler","text":"TomekUndersampler","category":"page"},{"location":"models/TomekUndersampler_Imbalance/","page":"TomekUndersampler","title":"TomekUndersampler","text":"A model type for constructing a tomek undersampler, based on Imbalance.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/TomekUndersampler_Imbalance/","page":"TomekUndersampler","title":"TomekUndersampler","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/TomekUndersampler_Imbalance/","page":"TomekUndersampler","title":"TomekUndersampler","text":"TomekUndersampler = @load TomekUndersampler pkg=Imbalance","category":"page"},{"location":"models/TomekUndersampler_Imbalance/","page":"TomekUndersampler","title":"TomekUndersampler","text":"Do model = TomekUndersampler() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in TomekUndersampler(min_ratios=...).","category":"page"},{"location":"models/TomekUndersampler_Imbalance/","page":"TomekUndersampler","title":"TomekUndersampler","text":"TomekUndersampler undersamples by removing any point that is part of a tomek link in the data. As defined in, Ivan Tomek. Two modifications of cnn. IEEE Trans. Systems, Man and Cybernetics, 6:769–772, 1976.","category":"page"},{"location":"models/TomekUndersampler_Imbalance/#Training-data","page":"TomekUndersampler","title":"Training data","text":"","category":"section"},{"location":"models/TomekUndersampler_Imbalance/","page":"TomekUndersampler","title":"TomekUndersampler","text":"In MLJ or MLJBase, wrap the model in a machine by mach = machine(model)","category":"page"},{"location":"models/TomekUndersampler_Imbalance/","page":"TomekUndersampler","title":"TomekUndersampler","text":"There is no need to provide any data here because the model is a static transformer.","category":"page"},{"location":"models/TomekUndersampler_Imbalance/","page":"TomekUndersampler","title":"TomekUndersampler","text":"Likewise, there is no need to fit!(mach). ","category":"page"},{"location":"models/TomekUndersampler_Imbalance/","page":"TomekUndersampler","title":"TomekUndersampler","text":"For default values of the hyper-parameters, model can be constructed by model = TomekUndersampler()","category":"page"},{"location":"models/TomekUndersampler_Imbalance/#Hyperparameters","page":"TomekUndersampler","title":"Hyperparameters","text":"","category":"section"},{"location":"models/TomekUndersampler_Imbalance/","page":"TomekUndersampler","title":"TomekUndersampler","text":"min_ratios=1.0: A parameter that controls the maximum amount of undersampling to be done for each class. If this algorithm cleans the data to an extent that this is violated, some of the cleaned points will be revived randomly so that it is satisfied.\nCan be a float and in this case each class will be at most undersampled to the size of the minority class times the float. By default, all classes are undersampled to the size of the minority class\nCan be a dictionary mapping each class label to the float minimum ratio for that class\nforce_min_ratios=false: If true, and this algorithm cleans the data such that the ratios for each class exceed those specified in min_ratios then further undersampling will be perform so that the final ratios are equal to min_ratios.\nrng::Union{AbstractRNG, Integer}=default_rng(): Either an AbstractRNG object or an Integer seed to be used with Xoshiro if the Julia VERSION supports it. Otherwise, uses MersenneTwister`.\ntry_preserve_type::Bool=true: When true, the function will try to not change the type of the input table (e.g., DataFrame). However, for some tables, this may not succeed, and in this case, the table returned will be a column table (named-tuple of vectors). This parameter is ignored if the input is a matrix.","category":"page"},{"location":"models/TomekUndersampler_Imbalance/#Transform-Inputs","page":"TomekUndersampler","title":"Transform Inputs","text":"","category":"section"},{"location":"models/TomekUndersampler_Imbalance/","page":"TomekUndersampler","title":"TomekUndersampler","text":"X: A matrix or table of floats where each row is an observation from the dataset\ny: An abstract vector of labels (e.g., strings) that correspond to the observations in X","category":"page"},{"location":"models/TomekUndersampler_Imbalance/#Transform-Outputs","page":"TomekUndersampler","title":"Transform Outputs","text":"","category":"section"},{"location":"models/TomekUndersampler_Imbalance/","page":"TomekUndersampler","title":"TomekUndersampler","text":"X_under: A matrix or table that includes the data after undersampling depending on whether the input X is a matrix or table respectively\ny_under: An abstract vector of labels corresponding to X_under","category":"page"},{"location":"models/TomekUndersampler_Imbalance/#Operations","page":"TomekUndersampler","title":"Operations","text":"","category":"section"},{"location":"models/TomekUndersampler_Imbalance/","page":"TomekUndersampler","title":"TomekUndersampler","text":"transform(mach, X, y): resample the data X and y using TomekUndersampler, returning both the new and original observations","category":"page"},{"location":"models/TomekUndersampler_Imbalance/#Example","page":"TomekUndersampler","title":"Example","text":"","category":"section"},{"location":"models/TomekUndersampler_Imbalance/","page":"TomekUndersampler","title":"TomekUndersampler","text":"using MLJ\nimport Imbalance\n\n## set probability of each class\nclass_probs = [0.5, 0.2, 0.3] \nnum_rows, num_continuous_feats = 100, 5\n## generate a table and categorical vector accordingly\nX, y = Imbalance.generate_imbalanced_data(num_rows, num_continuous_feats; \n min_sep=0.01, stds=[3.0 3.0 3.0], class_probs, rng=42) \n\njulia> Imbalance.checkbalance(y; ref=\"minority\")\n1: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 19 (100.0%) \n2: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 33 (173.7%) \n0: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 48 (252.6%) \n\n## load TomekUndersampler model type:\nTomekUndersampler = @load TomekUndersampler pkg=Imbalance\n\n## Underample the majority classes to sizes relative to the minority class:\ntomek_undersampler = TomekUndersampler(min_ratios=1.0, rng=42)\nmach = machine(tomek_undersampler)\nX_under, y_under = transform(mach, X, y)\n\njulia> Imbalance.checkbalance(y_under; ref=\"minority\")\n1: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 19 (100.0%) \n2: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 22 (115.8%) \n0: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 36 (189.5%)","category":"page"},{"location":"models/OneRuleClassifier_OneRule/#OneRuleClassifier_OneRule","page":"OneRuleClassifier","title":"OneRuleClassifier","text":"","category":"section"},{"location":"models/OneRuleClassifier_OneRule/","page":"OneRuleClassifier","title":"OneRuleClassifier","text":"OneRuleClassifier","category":"page"},{"location":"models/OneRuleClassifier_OneRule/","page":"OneRuleClassifier","title":"OneRuleClassifier","text":"A model type for constructing a one rule classifier, based on OneRule.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/OneRuleClassifier_OneRule/","page":"OneRuleClassifier","title":"OneRuleClassifier","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/OneRuleClassifier_OneRule/","page":"OneRuleClassifier","title":"OneRuleClassifier","text":"OneRuleClassifier = @load OneRuleClassifier pkg=OneRule","category":"page"},{"location":"models/OneRuleClassifier_OneRule/","page":"OneRuleClassifier","title":"OneRuleClassifier","text":"Do model = OneRuleClassifier() to construct an instance with default hyper-parameters. ","category":"page"},{"location":"models/OneRuleClassifier_OneRule/","page":"OneRuleClassifier","title":"OneRuleClassifier","text":"OneRuleClassifier implements the OneRule method for classification by Robert Holte (\"Very simple classification rules perform well on most commonly used datasets\" in: Machine Learning 11.1 (1993), pp. 63-90). ","category":"page"},{"location":"models/OneRuleClassifier_OneRule/","page":"OneRuleClassifier","title":"OneRuleClassifier","text":"For more information see:\n\n- Witten, Ian H., Eibe Frank, and Mark A. Hall. \n Data Mining Practical Machine Learning Tools and Techniques Third Edition. \n Morgan Kaufmann, 2017, pp. 93-96.\n- [Machine Learning - (One|Simple) Rule](https://datacadamia.com/data_mining/one_rule)\n- [OneRClassifier - One Rule for Classification](http://rasbt.github.io/mlxtend/user_guide/classifier/OneRClassifier/)","category":"page"},{"location":"models/OneRuleClassifier_OneRule/#Training-data","page":"OneRuleClassifier","title":"Training data","text":"","category":"section"},{"location":"models/OneRuleClassifier_OneRule/","page":"OneRuleClassifier","title":"OneRuleClassifier","text":"In MLJ or MLJBase, bind an instance model to data with mach = machine(model, X, y) where","category":"page"},{"location":"models/OneRuleClassifier_OneRule/","page":"OneRuleClassifier","title":"OneRuleClassifier","text":"X: any table of input features (eg, a DataFrame) whose columns each have one of the following element scitypes: Multiclass, OrderedFactor, or <:Finite; check column scitypes with schema(X)\ny: is the target, which can be any AbstractVector whose element scitype is OrderedFactor or Multiclass; check the scitype with scitype(y)","category":"page"},{"location":"models/OneRuleClassifier_OneRule/","page":"OneRuleClassifier","title":"OneRuleClassifier","text":"Train the machine using fit!(mach, rows=...).","category":"page"},{"location":"models/OneRuleClassifier_OneRule/#Hyper-parameters","page":"OneRuleClassifier","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/OneRuleClassifier_OneRule/","page":"OneRuleClassifier","title":"OneRuleClassifier","text":"This classifier has no hyper-parameters.","category":"page"},{"location":"models/OneRuleClassifier_OneRule/#Operations","page":"OneRuleClassifier","title":"Operations","text":"","category":"section"},{"location":"models/OneRuleClassifier_OneRule/","page":"OneRuleClassifier","title":"OneRuleClassifier","text":"predict(mach, Xnew): return (deterministic) predictions of the target given features Xnew having the same scitype as X above.","category":"page"},{"location":"models/OneRuleClassifier_OneRule/#Fitted-parameters","page":"OneRuleClassifier","title":"Fitted parameters","text":"","category":"section"},{"location":"models/OneRuleClassifier_OneRule/","page":"OneRuleClassifier","title":"OneRuleClassifier","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/OneRuleClassifier_OneRule/","page":"OneRuleClassifier","title":"OneRuleClassifier","text":"tree: the tree (a OneTree) returned by the core OneTree.jl algorithm\nall_classes: all classes (i.e. levels) of the target (used also internally to transfer levels-information to predict)","category":"page"},{"location":"models/OneRuleClassifier_OneRule/#Report","page":"OneRuleClassifier","title":"Report","text":"","category":"section"},{"location":"models/OneRuleClassifier_OneRule/","page":"OneRuleClassifier","title":"OneRuleClassifier","text":"The fields of report(mach) are:","category":"page"},{"location":"models/OneRuleClassifier_OneRule/","page":"OneRuleClassifier","title":"OneRuleClassifier","text":"tree: The OneTree created based on the training data\nnrules: The number of rules tree contains\nerror_rate: fraction of wrongly classified instances\nerror_count: number of wrongly classified instances\nclasses_seen: list of target classes actually observed in training\nfeatures: the names of the features encountered in training","category":"page"},{"location":"models/OneRuleClassifier_OneRule/#Examples","page":"OneRuleClassifier","title":"Examples","text":"","category":"section"},{"location":"models/OneRuleClassifier_OneRule/","page":"OneRuleClassifier","title":"OneRuleClassifier","text":"using MLJ\n\nORClassifier = @load OneRuleClassifier pkg=OneRule\n\norc = ORClassifier()\n\noutlook = [\"sunny\", \"sunny\", \"overcast\", \"rainy\", \"rainy\", \"rainy\", \"overcast\", \"sunny\", \"sunny\", \"rainy\", \"sunny\", \"overcast\", \"overcast\", \"rainy\"]\ntemperature = [\"hot\", \"hot\", \"hot\", \"mild\", \"cool\", \"cool\", \"cool\", \"mild\", \"cool\", \"mild\", \"mild\", \"mild\", \"hot\", \"mild\"]\nhumidity = [\"high\", \"high\", \"high\", \"high\", \"normal\", \"normal\", \"normal\", \"high\", \"normal\", \"normal\", \"normal\", \"high\", \"normal\", \"high\"]\nwindy = [\"false\", \"true\", \"false\", \"false\", \"false\", \"true\", \"true\", \"false\", \"false\", \"false\", \"true\", \"true\", \"false\", \"true\"]\n\nweather_data = (outlook = outlook, temperature = temperature, humidity = humidity, windy = windy)\nplay_data = [\"no\", \"no\", \"yes\", \"yes\", \"yes\", \"no\", \"yes\", \"no\", \"yes\", \"yes\", \"yes\", \"yes\", \"yes\", \"no\"]\n\nweather = coerce(weather_data, Textual => Multiclass)\nplay = coerce(play, Multiclass)\n\nmach = machine(orc, weather, play)\nfit!(mach)\n\nyhat = MLJ.predict(mach, weather) ## in a real context 'new' `weather` data would be used\none_tree = fitted_params(mach).tree\nreport(mach).error_rate","category":"page"},{"location":"models/OneRuleClassifier_OneRule/","page":"OneRuleClassifier","title":"OneRuleClassifier","text":"See also OneRule.jl.","category":"page"},{"location":"models/MultinomialNBClassifier_NaiveBayes/#MultinomialNBClassifier_NaiveBayes","page":"MultinomialNBClassifier","title":"MultinomialNBClassifier","text":"","category":"section"},{"location":"models/MultinomialNBClassifier_NaiveBayes/","page":"MultinomialNBClassifier","title":"MultinomialNBClassifier","text":"MultinomialNBClassifier","category":"page"},{"location":"models/MultinomialNBClassifier_NaiveBayes/","page":"MultinomialNBClassifier","title":"MultinomialNBClassifier","text":"A model type for constructing a multinomial naive Bayes classifier, based on NaiveBayes.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/MultinomialNBClassifier_NaiveBayes/","page":"MultinomialNBClassifier","title":"MultinomialNBClassifier","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/MultinomialNBClassifier_NaiveBayes/","page":"MultinomialNBClassifier","title":"MultinomialNBClassifier","text":"MultinomialNBClassifier = @load MultinomialNBClassifier pkg=NaiveBayes","category":"page"},{"location":"models/MultinomialNBClassifier_NaiveBayes/","page":"MultinomialNBClassifier","title":"MultinomialNBClassifier","text":"Do model = MultinomialNBClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in MultinomialNBClassifier(alpha=...).","category":"page"},{"location":"models/MultinomialNBClassifier_NaiveBayes/","page":"MultinomialNBClassifier","title":"MultinomialNBClassifier","text":"The multinomial naive Bayes classifier is often applied when input features consist of a counts (scitype Count) and when observations for a fixed target class are generated from a multinomial distribution with fixed probability vector, but whose sample length varies from observation to observation. For example, features might represent word counts in text documents being classified by sentiment.","category":"page"},{"location":"models/MultinomialNBClassifier_NaiveBayes/#Training-data","page":"MultinomialNBClassifier","title":"Training data","text":"","category":"section"},{"location":"models/MultinomialNBClassifier_NaiveBayes/","page":"MultinomialNBClassifier","title":"MultinomialNBClassifier","text":"In MLJ or MLJBase, bind an instance model to data with","category":"page"},{"location":"models/MultinomialNBClassifier_NaiveBayes/","page":"MultinomialNBClassifier","title":"MultinomialNBClassifier","text":"mach = machine(model, X, y)","category":"page"},{"location":"models/MultinomialNBClassifier_NaiveBayes/","page":"MultinomialNBClassifier","title":"MultinomialNBClassifier","text":"Here:","category":"page"},{"location":"models/MultinomialNBClassifier_NaiveBayes/","page":"MultinomialNBClassifier","title":"MultinomialNBClassifier","text":"X is any table of input features (eg, a DataFrame) whose columns are of scitype Count; check the column scitypes with schema(X).\ny is the target, which can be any AbstractVector whose element scitype is Finite; check the scitype with schema(y).","category":"page"},{"location":"models/MultinomialNBClassifier_NaiveBayes/","page":"MultinomialNBClassifier","title":"MultinomialNBClassifier","text":"Train the machine using fit!(mach, rows=...).","category":"page"},{"location":"models/MultinomialNBClassifier_NaiveBayes/#Hyper-parameters","page":"MultinomialNBClassifier","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/MultinomialNBClassifier_NaiveBayes/","page":"MultinomialNBClassifier","title":"MultinomialNBClassifier","text":"alpha=1: Lindstone smoothing in estimation of multinomial probability vectors from training histograms (default corresponds to Laplacian smoothing).","category":"page"},{"location":"models/MultinomialNBClassifier_NaiveBayes/#Operations","page":"MultinomialNBClassifier","title":"Operations","text":"","category":"section"},{"location":"models/MultinomialNBClassifier_NaiveBayes/","page":"MultinomialNBClassifier","title":"MultinomialNBClassifier","text":"predict(mach, Xnew): return predictions of the target given new features Xnew, which should have the same scitype as X above.\npredict_mode(mach, Xnew): Return the mode of above predictions.","category":"page"},{"location":"models/MultinomialNBClassifier_NaiveBayes/#Fitted-parameters","page":"MultinomialNBClassifier","title":"Fitted parameters","text":"","category":"section"},{"location":"models/MultinomialNBClassifier_NaiveBayes/","page":"MultinomialNBClassifier","title":"MultinomialNBClassifier","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/MultinomialNBClassifier_NaiveBayes/","page":"MultinomialNBClassifier","title":"MultinomialNBClassifier","text":"c_counts: A dictionary containing the observed count of each input class.\nx_counts: A dictionary containing the categorical counts of each input class.\nx_totals: The sum of each count (input feature), ungrouped.\nn_obs: The total number of observations in the training data.","category":"page"},{"location":"models/MultinomialNBClassifier_NaiveBayes/#Examples","page":"MultinomialNBClassifier","title":"Examples","text":"","category":"section"},{"location":"models/MultinomialNBClassifier_NaiveBayes/","page":"MultinomialNBClassifier","title":"MultinomialNBClassifier","text":"using MLJ\nimport TextAnalysis\n\nCountTransformer = @load CountTransformer pkg=MLJText\nMultinomialNBClassifier = @load MultinomialNBClassifier pkg=NaiveBayes\n\ntokenized_docs = TextAnalysis.tokenize.([\n \"I am very mad. You never listen.\",\n \"You seem to be having trouble? Can I help you?\",\n \"Our boss is mad at me. I hope he dies.\",\n \"His boss wants to help me. She is nice.\",\n \"Thank you for your help. It is nice working with you.\",\n \"Never do that again! I am so mad. \",\n])\n\nsentiment = [\n \"negative\",\n \"positive\",\n \"negative\",\n \"positive\",\n \"positive\",\n \"negative\",\n]\n\nmach1 = machine(CountTransformer(), tokenized_docs) |> fit!\n\n## matrix of counts:\nX = transform(mach1, tokenized_docs)\n\n## to ensure scitype(y) <: AbstractVector{<:OrderedFactor}:\ny = coerce(sentiment, OrderedFactor)\n\nclassifier = MultinomialNBClassifier()\nmach2 = machine(classifier, X, y)\nfit!(mach2, rows=1:4)\n\n## probabilistic predictions:\ny_prob = predict(mach2, rows=5:6) ## distributions\npdf.(y_prob, \"positive\") ## probabilities for \"positive\"\nlog_loss(y_prob, y[5:6])\n\n## point predictions:\nyhat = mode.(y_prob) ## or `predict_mode(mach2, rows=5:6)`","category":"page"},{"location":"models/MultinomialNBClassifier_NaiveBayes/","page":"MultinomialNBClassifier","title":"MultinomialNBClassifier","text":"See also GaussianNBClassifier","category":"page"},{"location":"models/ProbabilisticNuSVC_LIBSVM/#ProbabilisticNuSVC_LIBSVM","page":"ProbabilisticNuSVC","title":"ProbabilisticNuSVC","text":"","category":"section"},{"location":"models/ProbabilisticNuSVC_LIBSVM/","page":"ProbabilisticNuSVC","title":"ProbabilisticNuSVC","text":"ProbabilisticNuSVC","category":"page"},{"location":"models/ProbabilisticNuSVC_LIBSVM/","page":"ProbabilisticNuSVC","title":"ProbabilisticNuSVC","text":"A model type for constructing a probabilistic ν-support vector classifier, based on LIBSVM.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/ProbabilisticNuSVC_LIBSVM/","page":"ProbabilisticNuSVC","title":"ProbabilisticNuSVC","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/ProbabilisticNuSVC_LIBSVM/","page":"ProbabilisticNuSVC","title":"ProbabilisticNuSVC","text":"ProbabilisticNuSVC = @load ProbabilisticNuSVC pkg=LIBSVM","category":"page"},{"location":"models/ProbabilisticNuSVC_LIBSVM/","page":"ProbabilisticNuSVC","title":"ProbabilisticNuSVC","text":"Do model = ProbabilisticNuSVC() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in ProbabilisticNuSVC(kernel=...).","category":"page"},{"location":"models/ProbabilisticNuSVC_LIBSVM/","page":"ProbabilisticNuSVC","title":"ProbabilisticNuSVC","text":"This model is identical to NuSVC with the exception that it predicts probabilities, instead of actual class labels. Probabilities are computed using Platt scaling, which will add to total computation time.","category":"page"},{"location":"models/ProbabilisticNuSVC_LIBSVM/","page":"ProbabilisticNuSVC","title":"ProbabilisticNuSVC","text":"Reference for algorithm and core C-library: C.-C. Chang and C.-J. Lin (2011): \"LIBSVM: a library for support vector machines.\" ACM Transactions on Intelligent Systems and Technology, 2(3):27:1–27:27. Updated at https://www.csie.ntu.edu.tw/~cjlin/papers/libsvm.pdf. ","category":"page"},{"location":"models/ProbabilisticNuSVC_LIBSVM/","page":"ProbabilisticNuSVC","title":"ProbabilisticNuSVC","text":"Platt, John (1999): \"Probabilistic Outputs for Support Vector Machines and Comparisons to Regularized Likelihood Methods.\"","category":"page"},{"location":"models/ProbabilisticNuSVC_LIBSVM/#Training-data","page":"ProbabilisticNuSVC","title":"Training data","text":"","category":"section"},{"location":"models/ProbabilisticNuSVC_LIBSVM/","page":"ProbabilisticNuSVC","title":"ProbabilisticNuSVC","text":"In MLJ or MLJBase, bind an instance model to data with:","category":"page"},{"location":"models/ProbabilisticNuSVC_LIBSVM/","page":"ProbabilisticNuSVC","title":"ProbabilisticNuSVC","text":"mach = machine(model, X, y)","category":"page"},{"location":"models/ProbabilisticNuSVC_LIBSVM/","page":"ProbabilisticNuSVC","title":"ProbabilisticNuSVC","text":"where","category":"page"},{"location":"models/ProbabilisticNuSVC_LIBSVM/","page":"ProbabilisticNuSVC","title":"ProbabilisticNuSVC","text":"X: any table of input features (eg, a DataFrame) whose columns each have Continuous element scitype; check column scitypes with schema(X)\ny: is the target, which can be any AbstractVector whose element scitype is <:OrderedFactor or <:Multiclass; check the scitype with scitype(y)","category":"page"},{"location":"models/ProbabilisticNuSVC_LIBSVM/","page":"ProbabilisticNuSVC","title":"ProbabilisticNuSVC","text":"Train the machine using fit!(mach, rows=...).","category":"page"},{"location":"models/ProbabilisticNuSVC_LIBSVM/#Hyper-parameters","page":"ProbabilisticNuSVC","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/ProbabilisticNuSVC_LIBSVM/","page":"ProbabilisticNuSVC","title":"ProbabilisticNuSVC","text":"kernel=LIBSVM.Kernel.RadialBasis: either an object that can be called, as in kernel(x1, x2), or one of the built-in kernels from the LIBSVM.jl package listed below. Here x1 and x2 are vectors whose lengths match the number of columns of the training data X (see \"Examples\" below).\nLIBSVM.Kernel.Linear: (x1, x2) -> x1'*x2\nLIBSVM.Kernel.Polynomial: (x1, x2) -> gamma*x1'*x2 + coef0)^degree\nLIBSVM.Kernel.RadialBasis: (x1, x2) -> (exp(-gamma*norm(x1 - x2)^2))\nLIBSVM.Kernel.Sigmoid: (x1, x2) - > tanh(gamma*x1'*x2 + coef0)\nHere gamma, coef0, degree are other hyper-parameters. Serialization of models with user-defined kernels comes with some restrictions. See LIVSVM.jl issue91\ngamma = 0.0: kernel parameter (see above); if gamma==-1.0 then gamma = 1/nfeatures is used in training, where nfeatures is the number of features (columns of X). If gamma==0.0 then gamma = 1/(var(Tables.matrix(X))*nfeatures) is used. Actual value used appears in the report (see below).\ncoef0 = 0.0: kernel parameter (see above)\ndegree::Int32 = Int32(3): degree in polynomial kernel (see above)\nnu=0.5 (range (0, 1]): An upper bound on the fraction of margin errors and a lower bound of the fraction of support vectors. Denoted ν in the cited paper. Changing nu changes the thickness of the margin (a neighborhood of the decision surface) and a margin error is said to have occurred if a training observation lies on the wrong side of the surface or within the margin.\ncachesize=200.0 cache memory size in MB\ntolerance=0.001: tolerance for the stopping criterion\nshrinking=true: whether to use shrinking heuristics","category":"page"},{"location":"models/ProbabilisticNuSVC_LIBSVM/#Operations","page":"ProbabilisticNuSVC","title":"Operations","text":"","category":"section"},{"location":"models/ProbabilisticNuSVC_LIBSVM/","page":"ProbabilisticNuSVC","title":"ProbabilisticNuSVC","text":"predict(mach, Xnew): return predictions of the target given features Xnew having the same scitype as X above.","category":"page"},{"location":"models/ProbabilisticNuSVC_LIBSVM/#Fitted-parameters","page":"ProbabilisticNuSVC","title":"Fitted parameters","text":"","category":"section"},{"location":"models/ProbabilisticNuSVC_LIBSVM/","page":"ProbabilisticNuSVC","title":"ProbabilisticNuSVC","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/ProbabilisticNuSVC_LIBSVM/","page":"ProbabilisticNuSVC","title":"ProbabilisticNuSVC","text":"libsvm_model: the trained model object created by the LIBSVM.jl package\nencoding: class encoding used internally by libsvm_model - a dictionary of class labels keyed on the internal integer representation","category":"page"},{"location":"models/ProbabilisticNuSVC_LIBSVM/#Report","page":"ProbabilisticNuSVC","title":"Report","text":"","category":"section"},{"location":"models/ProbabilisticNuSVC_LIBSVM/","page":"ProbabilisticNuSVC","title":"ProbabilisticNuSVC","text":"The fields of report(mach) are:","category":"page"},{"location":"models/ProbabilisticNuSVC_LIBSVM/","page":"ProbabilisticNuSVC","title":"ProbabilisticNuSVC","text":"gamma: actual value of the kernel parameter gamma used in training","category":"page"},{"location":"models/ProbabilisticNuSVC_LIBSVM/#Examples","page":"ProbabilisticNuSVC","title":"Examples","text":"","category":"section"},{"location":"models/ProbabilisticNuSVC_LIBSVM/#Using-a-built-in-kernel","page":"ProbabilisticNuSVC","title":"Using a built-in kernel","text":"","category":"section"},{"location":"models/ProbabilisticNuSVC_LIBSVM/","page":"ProbabilisticNuSVC","title":"ProbabilisticNuSVC","text":"using MLJ\nimport LIBSVM\n\nProbabilisticNuSVC = @load ProbabilisticNuSVC pkg=LIBSVM ## model type\nmodel = ProbabilisticNuSVC(kernel=LIBSVM.Kernel.Polynomial) ## instance\n\nX, y = @load_iris ## table, vector\nmach = machine(model, X, y) |> fit!\n\nXnew = (sepal_length = [6.4, 7.2, 7.4],\n sepal_width = [2.8, 3.0, 2.8],\n petal_length = [5.6, 5.8, 6.1],\n petal_width = [2.1, 1.6, 1.9],)\n\njulia> probs = predict(mach, Xnew)\n3-element UnivariateFiniteVector{Multiclass{3}, String, UInt32, Float64}:\n UnivariateFinite{Multiclass{3}}(setosa=>0.00313, versicolor=>0.0247, virginica=>0.972)\n UnivariateFinite{Multiclass{3}}(setosa=>0.000598, versicolor=>0.0155, virginica=>0.984)\n UnivariateFinite{Multiclass{3}}(setosa=>2.27e-6, versicolor=>2.73e-6, virginica=>1.0)\n\njulia> yhat = mode.(probs)\n3-element CategoricalArrays.CategoricalArray{String,1,UInt32}:\n \"virginica\"\n \"virginica\"\n \"virginica\"","category":"page"},{"location":"models/ProbabilisticNuSVC_LIBSVM/#User-defined-kernels","page":"ProbabilisticNuSVC","title":"User-defined kernels","text":"","category":"section"},{"location":"models/ProbabilisticNuSVC_LIBSVM/","page":"ProbabilisticNuSVC","title":"ProbabilisticNuSVC","text":"k(x1, x2) = x1'*x2 ## equivalent to `LIBSVM.Kernel.Linear`\nmodel = ProbabilisticNuSVC(kernel=k)\nmach = machine(model, X, y) |> fit!\n\nprobs = predict(mach, Xnew)","category":"page"},{"location":"models/ProbabilisticNuSVC_LIBSVM/","page":"ProbabilisticNuSVC","title":"ProbabilisticNuSVC","text":"See also the classifiers NuSVC, SVC, ProbabilisticSVC and LinearSVC. And see LIVSVM.jl and the original C implementation. documentation.","category":"page"},{"location":"models/UnivariateFillImputer_MLJModels/#UnivariateFillImputer_MLJModels","page":"UnivariateFillImputer","title":"UnivariateFillImputer","text":"","category":"section"},{"location":"models/UnivariateFillImputer_MLJModels/","page":"UnivariateFillImputer","title":"UnivariateFillImputer","text":"UnivariateFillImputer","category":"page"},{"location":"models/UnivariateFillImputer_MLJModels/","page":"UnivariateFillImputer","title":"UnivariateFillImputer","text":"A model type for constructing a single variable fill imputer, based on MLJModels.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/UnivariateFillImputer_MLJModels/","page":"UnivariateFillImputer","title":"UnivariateFillImputer","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/UnivariateFillImputer_MLJModels/","page":"UnivariateFillImputer","title":"UnivariateFillImputer","text":"UnivariateFillImputer = @load UnivariateFillImputer pkg=MLJModels","category":"page"},{"location":"models/UnivariateFillImputer_MLJModels/","page":"UnivariateFillImputer","title":"UnivariateFillImputer","text":"Do model = UnivariateFillImputer() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in UnivariateFillImputer(continuous_fill=...).","category":"page"},{"location":"models/UnivariateFillImputer_MLJModels/","page":"UnivariateFillImputer","title":"UnivariateFillImputer","text":"Use this model to imputing missing values in a vector with a fixed value learned from the non-missing values of training vector.","category":"page"},{"location":"models/UnivariateFillImputer_MLJModels/","page":"UnivariateFillImputer","title":"UnivariateFillImputer","text":"For imputing missing values in tabular data, use FillImputer instead.","category":"page"},{"location":"models/UnivariateFillImputer_MLJModels/#Training-data","page":"UnivariateFillImputer","title":"Training data","text":"","category":"section"},{"location":"models/UnivariateFillImputer_MLJModels/","page":"UnivariateFillImputer","title":"UnivariateFillImputer","text":"In MLJ or MLJBase, bind an instance model to data with","category":"page"},{"location":"models/UnivariateFillImputer_MLJModels/","page":"UnivariateFillImputer","title":"UnivariateFillImputer","text":"mach = machine(model, x)","category":"page"},{"location":"models/UnivariateFillImputer_MLJModels/","page":"UnivariateFillImputer","title":"UnivariateFillImputer","text":"where","category":"page"},{"location":"models/UnivariateFillImputer_MLJModels/","page":"UnivariateFillImputer","title":"UnivariateFillImputer","text":"x: any abstract vector with element scitype Union{Missing, T} where T is a subtype of Continuous, Multiclass, OrderedFactor or Count; check scitype using scitype(x)","category":"page"},{"location":"models/UnivariateFillImputer_MLJModels/","page":"UnivariateFillImputer","title":"UnivariateFillImputer","text":"Train the machine using fit!(mach, rows=...).","category":"page"},{"location":"models/UnivariateFillImputer_MLJModels/#Hyper-parameters","page":"UnivariateFillImputer","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/UnivariateFillImputer_MLJModels/","page":"UnivariateFillImputer","title":"UnivariateFillImputer","text":"continuous_fill: function or other callable to determine value to be imputed in the case of Continuous (abstract float) data; default is to apply median after skipping missing values\ncount_fill: function or other callable to determine value to be imputed in the case of Count (integer) data; default is to apply rounded median after skipping missing values\nfinite_fill: function or other callable to determine value to be imputed in the case of Multiclass or OrderedFactor data (categorical vectors); default is to apply mode after skipping missing values","category":"page"},{"location":"models/UnivariateFillImputer_MLJModels/#Operations","page":"UnivariateFillImputer","title":"Operations","text":"","category":"section"},{"location":"models/UnivariateFillImputer_MLJModels/","page":"UnivariateFillImputer","title":"UnivariateFillImputer","text":"transform(mach, xnew): return xnew with missing values imputed with the fill values learned when fitting mach","category":"page"},{"location":"models/UnivariateFillImputer_MLJModels/#Fitted-parameters","page":"UnivariateFillImputer","title":"Fitted parameters","text":"","category":"section"},{"location":"models/UnivariateFillImputer_MLJModels/","page":"UnivariateFillImputer","title":"UnivariateFillImputer","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/UnivariateFillImputer_MLJModels/","page":"UnivariateFillImputer","title":"UnivariateFillImputer","text":"filler: the fill value to be imputed in all new data","category":"page"},{"location":"models/UnivariateFillImputer_MLJModels/#Examples","page":"UnivariateFillImputer","title":"Examples","text":"","category":"section"},{"location":"models/UnivariateFillImputer_MLJModels/","page":"UnivariateFillImputer","title":"UnivariateFillImputer","text":"using MLJ\nimputer = UnivariateFillImputer()\n\nx_continuous = [1.0, 2.0, missing, 3.0]\nx_multiclass = coerce([\"y\", \"n\", \"y\", missing, \"y\"], Multiclass)\nx_count = [1, 1, 1, 2, missing, 3, 3]\n\nmach = machine(imputer, x_continuous)\nfit!(mach)\n\njulia> fitted_params(mach)\n(filler = 2.0,)\n\njulia> transform(mach, [missing, missing, 101.0])\n3-element Vector{Float64}:\n 2.0\n 2.0\n 101.0\n\nmach2 = machine(imputer, x_multiclass) |> fit!\n\njulia> transform(mach2, x_multiclass)\n5-element CategoricalArray{String,1,UInt32}:\n \"y\"\n \"n\"\n \"y\"\n \"y\"\n \"y\"\n\nmach3 = machine(imputer, x_count) |> fit!\n\njulia> transform(mach3, [missing, missing, 5])\n3-element Vector{Int64}:\n 2\n 2\n 5","category":"page"},{"location":"models/UnivariateFillImputer_MLJModels/","page":"UnivariateFillImputer","title":"UnivariateFillImputer","text":"For imputing tabular data, use FillImputer.","category":"page"},{"location":"models/RandomForestImputer_BetaML/#RandomForestImputer_BetaML","page":"RandomForestImputer","title":"RandomForestImputer","text":"","category":"section"},{"location":"models/RandomForestImputer_BetaML/","page":"RandomForestImputer","title":"RandomForestImputer","text":"mutable struct RandomForestImputer <: MLJModelInterface.Unsupervised","category":"page"},{"location":"models/RandomForestImputer_BetaML/","page":"RandomForestImputer","title":"RandomForestImputer","text":"Impute missing values using Random Forests, from the Beta Machine Learning Toolkit (BetaML).","category":"page"},{"location":"models/RandomForestImputer_BetaML/#Hyperparameters:","page":"RandomForestImputer","title":"Hyperparameters:","text":"","category":"section"},{"location":"models/RandomForestImputer_BetaML/","page":"RandomForestImputer","title":"RandomForestImputer","text":"n_trees::Int64: Number of (decision) trees in the forest [def: 30]\nmax_depth::Union{Nothing, Int64}: The maximum depth the tree is allowed to reach. When this is reached the node is forced to become a leaf [def: nothing, i.e. no limits]\nmin_gain::Float64: The minimum information gain to allow for a node's partition [def: 0]\nmin_records::Int64: The minimum number of records a node must holds to consider for a partition of it [def: 2]\nmax_features::Union{Nothing, Int64}: The maximum number of (random) features to consider at each partitioning [def: nothing, i.e. square root of the data dimension]\nforced_categorical_cols::Vector{Int64}: Specify the positions of the integer columns to treat as categorical instead of cardinal. [Default: empty vector (all numerical cols are treated as cardinal by default and the others as categorical)]\nsplitting_criterion::Union{Nothing, Function}: Either gini, entropy or variance. This is the name of the function to be used to compute the information gain of a specific partition. This is done by measuring the difference betwwen the \"impurity\" of the labels of the parent node with those of the two child nodes, weighted by the respective number of items. [def: nothing, i.e. gini for categorical labels (classification task) and variance for numerical labels(regression task)]. It can be an anonymous function.\nrecursive_passages::Int64: Define the times to go trough the various columns to impute their data. Useful when there are data to impute on multiple columns. The order of the first passage is given by the decreasing number of missing values per column, the other passages are random [default: 1].\nrng::Random.AbstractRNG: A Random Number Generator to be used in stochastic parts of the code [deafult: Random.GLOBAL_RNG]","category":"page"},{"location":"models/RandomForestImputer_BetaML/#Example:","page":"RandomForestImputer","title":"Example:","text":"","category":"section"},{"location":"models/RandomForestImputer_BetaML/","page":"RandomForestImputer","title":"RandomForestImputer","text":"julia> using MLJ\n\njulia> X = [1 10.5;1.5 missing; 1.8 8; 1.7 15; 3.2 40; missing missing; 3.3 38; missing -2.3; 5.2 -2.4] |> table ;\n\njulia> modelType = @load RandomForestImputer pkg = \"BetaML\" verbosity=0\nBetaML.Imputation.RandomForestImputer\n\njulia> model = modelType(n_trees=40)\nRandomForestImputer(\n n_trees = 40, \n max_depth = nothing, \n min_gain = 0.0, \n min_records = 2, \n max_features = nothing, \n forced_categorical_cols = Int64[], \n splitting_criterion = nothing, \n recursive_passages = 1, \n rng = Random._GLOBAL_RNG())\n\njulia> mach = machine(model, X);\n\njulia> fit!(mach);\n[ Info: Training machine(RandomForestImputer(n_trees = 40, …), …).\n\njulia> X_full = transform(mach) |> MLJ.matrix\n9×2 Matrix{Float64}:\n 1.0 10.5\n 1.5 10.3909\n 1.8 8.0\n 1.7 15.0\n 3.2 40.0\n 2.88375 8.66125\n 3.3 38.0\n 3.98125 -2.3\n 5.2 -2.4","category":"page"},{"location":"models/BayesianSubspaceLDA_MultivariateStats/#BayesianSubspaceLDA_MultivariateStats","page":"BayesianSubspaceLDA","title":"BayesianSubspaceLDA","text":"","category":"section"},{"location":"models/BayesianSubspaceLDA_MultivariateStats/","page":"BayesianSubspaceLDA","title":"BayesianSubspaceLDA","text":"BayesianSubspaceLDA","category":"page"},{"location":"models/BayesianSubspaceLDA_MultivariateStats/","page":"BayesianSubspaceLDA","title":"BayesianSubspaceLDA","text":"A model type for constructing a Bayesian subspace LDA model, based on MultivariateStats.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/BayesianSubspaceLDA_MultivariateStats/","page":"BayesianSubspaceLDA","title":"BayesianSubspaceLDA","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/BayesianSubspaceLDA_MultivariateStats/","page":"BayesianSubspaceLDA","title":"BayesianSubspaceLDA","text":"BayesianSubspaceLDA = @load BayesianSubspaceLDA pkg=MultivariateStats","category":"page"},{"location":"models/BayesianSubspaceLDA_MultivariateStats/","page":"BayesianSubspaceLDA","title":"BayesianSubspaceLDA","text":"Do model = BayesianSubspaceLDA() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in BayesianSubspaceLDA(normalize=...).","category":"page"},{"location":"models/BayesianSubspaceLDA_MultivariateStats/","page":"BayesianSubspaceLDA","title":"BayesianSubspaceLDA","text":"The Bayesian multiclass subspace linear discriminant analysis algorithm learns a projection matrix as described in SubspaceLDA. The posterior class probability distribution is derived as in BayesianLDA.","category":"page"},{"location":"models/BayesianSubspaceLDA_MultivariateStats/#Training-data","page":"BayesianSubspaceLDA","title":"Training data","text":"","category":"section"},{"location":"models/BayesianSubspaceLDA_MultivariateStats/","page":"BayesianSubspaceLDA","title":"BayesianSubspaceLDA","text":"In MLJ or MLJBase, bind an instance model to data with","category":"page"},{"location":"models/BayesianSubspaceLDA_MultivariateStats/","page":"BayesianSubspaceLDA","title":"BayesianSubspaceLDA","text":"mach = machine(model, X, y)","category":"page"},{"location":"models/BayesianSubspaceLDA_MultivariateStats/","page":"BayesianSubspaceLDA","title":"BayesianSubspaceLDA","text":"Here:","category":"page"},{"location":"models/BayesianSubspaceLDA_MultivariateStats/","page":"BayesianSubspaceLDA","title":"BayesianSubspaceLDA","text":"X is any table of input features (eg, a DataFrame) whose columns are of scitype Continuous; check column scitypes with schema(X).\ny is the target, which can be any AbstractVector whose element scitype is OrderedFactor or Multiclass; check the scitype with scitype(y).","category":"page"},{"location":"models/BayesianSubspaceLDA_MultivariateStats/","page":"BayesianSubspaceLDA","title":"BayesianSubspaceLDA","text":"Train the machine using fit!(mach, rows=...).","category":"page"},{"location":"models/BayesianSubspaceLDA_MultivariateStats/#Hyper-parameters","page":"BayesianSubspaceLDA","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/BayesianSubspaceLDA_MultivariateStats/","page":"BayesianSubspaceLDA","title":"BayesianSubspaceLDA","text":"normalize=true: Option to normalize the between class variance for the number of observations in each class, one of true or false.","category":"page"},{"location":"models/BayesianSubspaceLDA_MultivariateStats/","page":"BayesianSubspaceLDA","title":"BayesianSubspaceLDA","text":"outdim: the ouput dimension, automatically set to min(indim, nclasses-1) if equal to 0. If a non-zero outdim is passed, then the actual output dimension used is min(rank, outdim) where rank is the rank of the within-class covariance matrix.","category":"page"},{"location":"models/BayesianSubspaceLDA_MultivariateStats/","page":"BayesianSubspaceLDA","title":"BayesianSubspaceLDA","text":"priors::Union{Nothing, UnivariateFinite{<:Any, <:Any, <:Any, <:Real}, Dict{<:Any, <:Real}} = nothing: For use in prediction with Bayes rule. If priors = nothing then priors are estimated from the class proportions in the training data. Otherwise it requires a Dict or UnivariateFinite object specifying the classes with non-zero probabilities in the training target.","category":"page"},{"location":"models/BayesianSubspaceLDA_MultivariateStats/#Operations","page":"BayesianSubspaceLDA","title":"Operations","text":"","category":"section"},{"location":"models/BayesianSubspaceLDA_MultivariateStats/","page":"BayesianSubspaceLDA","title":"BayesianSubspaceLDA","text":"transform(mach, Xnew): Return a lower dimensional projection of the input Xnew, which should have the same scitype as X above.\npredict(mach, Xnew): Return predictions of the target given features Xnew, which should have same scitype as X above. Predictions are probabilistic but uncalibrated.\npredict_mode(mach, Xnew): Return the modes of the probabilistic predictions returned above.","category":"page"},{"location":"models/BayesianSubspaceLDA_MultivariateStats/#Fitted-parameters","page":"BayesianSubspaceLDA","title":"Fitted parameters","text":"","category":"section"},{"location":"models/BayesianSubspaceLDA_MultivariateStats/","page":"BayesianSubspaceLDA","title":"BayesianSubspaceLDA","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/BayesianSubspaceLDA_MultivariateStats/","page":"BayesianSubspaceLDA","title":"BayesianSubspaceLDA","text":"classes: The classes seen during model fitting.\nprojection_matrix: The learned projection matrix, of size (indim, outdim), where indim and outdim are the input and output dimensions respectively (See Report section below).\npriors: The class priors for classification. As inferred from training target y, if not user-specified. A UnivariateFinite object with levels consistent with levels(y).","category":"page"},{"location":"models/BayesianSubspaceLDA_MultivariateStats/#Report","page":"BayesianSubspaceLDA","title":"Report","text":"","category":"section"},{"location":"models/BayesianSubspaceLDA_MultivariateStats/","page":"BayesianSubspaceLDA","title":"BayesianSubspaceLDA","text":"The fields of report(mach) are:","category":"page"},{"location":"models/BayesianSubspaceLDA_MultivariateStats/","page":"BayesianSubspaceLDA","title":"BayesianSubspaceLDA","text":"indim: The dimension of the input space i.e the number of training features.\noutdim: The dimension of the transformed space the model is projected to.\nmean: The overall mean of the training data.\nnclasses: The number of classes directly observed in the training data (which can be less than the total number of classes in the class pool).","category":"page"},{"location":"models/BayesianSubspaceLDA_MultivariateStats/","page":"BayesianSubspaceLDA","title":"BayesianSubspaceLDA","text":"class_means: The class-specific means of the training data. A matrix of size (indim, nclasses) with the ith column being the class-mean of the ith class in classes (See fitted params section above).","category":"page"},{"location":"models/BayesianSubspaceLDA_MultivariateStats/","page":"BayesianSubspaceLDA","title":"BayesianSubspaceLDA","text":"class_weights: The weights (class counts) of each class. A vector of length nclasses with the ith element being the class weight of the ith class in classes. (See fitted params section above.)\nexplained_variance_ratio: The ratio of explained variance to total variance. Each dimension corresponds to an eigenvalue.","category":"page"},{"location":"models/BayesianSubspaceLDA_MultivariateStats/#Examples","page":"BayesianSubspaceLDA","title":"Examples","text":"","category":"section"},{"location":"models/BayesianSubspaceLDA_MultivariateStats/","page":"BayesianSubspaceLDA","title":"BayesianSubspaceLDA","text":"using MLJ\n\nBayesianSubspaceLDA = @load BayesianSubspaceLDA pkg=MultivariateStats\n\nX, y = @load_iris ## a table and a vector\n\nmodel = BayesianSubspaceLDA()\nmach = machine(model, X, y) |> fit!\n\nXproj = transform(mach, X)\ny_hat = predict(mach, X)\nlabels = predict_mode(mach, X)","category":"page"},{"location":"models/BayesianSubspaceLDA_MultivariateStats/","page":"BayesianSubspaceLDA","title":"BayesianSubspaceLDA","text":"See also LDA, BayesianLDA, SubspaceLDA","category":"page"},{"location":"models/DecisionTreeRegressor_DecisionTree/#DecisionTreeRegressor_DecisionTree","page":"DecisionTreeRegressor","title":"DecisionTreeRegressor","text":"","category":"section"},{"location":"models/DecisionTreeRegressor_DecisionTree/","page":"DecisionTreeRegressor","title":"DecisionTreeRegressor","text":"DecisionTreeRegressor","category":"page"},{"location":"models/DecisionTreeRegressor_DecisionTree/","page":"DecisionTreeRegressor","title":"DecisionTreeRegressor","text":"A model type for constructing a CART decision tree regressor, based on DecisionTree.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/DecisionTreeRegressor_DecisionTree/","page":"DecisionTreeRegressor","title":"DecisionTreeRegressor","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/DecisionTreeRegressor_DecisionTree/","page":"DecisionTreeRegressor","title":"DecisionTreeRegressor","text":"DecisionTreeRegressor = @load DecisionTreeRegressor pkg=DecisionTree","category":"page"},{"location":"models/DecisionTreeRegressor_DecisionTree/","page":"DecisionTreeRegressor","title":"DecisionTreeRegressor","text":"Do model = DecisionTreeRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in DecisionTreeRegressor(max_depth=...).","category":"page"},{"location":"models/DecisionTreeRegressor_DecisionTree/","page":"DecisionTreeRegressor","title":"DecisionTreeRegressor","text":"DecisionTreeRegressor implements the CART algorithm, originally published in Breiman, Leo; Friedman, J. H.; Olshen, R. A.; Stone, C. J. (1984): \"Classification and regression trees\". Monterey, CA: Wadsworth & Brooks/Cole Advanced Books & Software..","category":"page"},{"location":"models/DecisionTreeRegressor_DecisionTree/#Training-data","page":"DecisionTreeRegressor","title":"Training data","text":"","category":"section"},{"location":"models/DecisionTreeRegressor_DecisionTree/","page":"DecisionTreeRegressor","title":"DecisionTreeRegressor","text":"In MLJ or MLJBase, bind an instance model to data with","category":"page"},{"location":"models/DecisionTreeRegressor_DecisionTree/","page":"DecisionTreeRegressor","title":"DecisionTreeRegressor","text":"mach = machine(model, X, y)","category":"page"},{"location":"models/DecisionTreeRegressor_DecisionTree/","page":"DecisionTreeRegressor","title":"DecisionTreeRegressor","text":"where","category":"page"},{"location":"models/DecisionTreeRegressor_DecisionTree/","page":"DecisionTreeRegressor","title":"DecisionTreeRegressor","text":"X: any table of input features (eg, a DataFrame) whose columns each have one of the following element scitypes: Continuous, Count, or <:OrderedFactor; check column scitypes with schema(X)\ny: the target, which can be any AbstractVector whose element scitype is Continuous; check the scitype with scitype(y)","category":"page"},{"location":"models/DecisionTreeRegressor_DecisionTree/","page":"DecisionTreeRegressor","title":"DecisionTreeRegressor","text":"Train the machine with fit!(mach, rows=...).","category":"page"},{"location":"models/DecisionTreeRegressor_DecisionTree/#Hyperparameters","page":"DecisionTreeRegressor","title":"Hyperparameters","text":"","category":"section"},{"location":"models/DecisionTreeRegressor_DecisionTree/","page":"DecisionTreeRegressor","title":"DecisionTreeRegressor","text":"max_depth=-1: max depth of the decision tree (-1=any)\nmin_samples_leaf=1: max number of samples each leaf needs to have\nmin_samples_split=2: min number of samples needed for a split\nmin_purity_increase=0: min purity needed for a split\nn_subfeatures=0: number of features to select at random (0 for all)\npost_prune=false: set to true for post-fit pruning\nmerge_purity_threshold=1.0: (post-pruning) merge leaves having combined purity >= merge_purity_threshold\nfeature_importance: method to use for computing feature importances. One of (:impurity, :split)\nrng=Random.GLOBAL_RNG: random number generator or seed","category":"page"},{"location":"models/DecisionTreeRegressor_DecisionTree/#Operations","page":"DecisionTreeRegressor","title":"Operations","text":"","category":"section"},{"location":"models/DecisionTreeRegressor_DecisionTree/","page":"DecisionTreeRegressor","title":"DecisionTreeRegressor","text":"predict(mach, Xnew): return predictions of the target given new features Xnew having the same scitype as X above.","category":"page"},{"location":"models/DecisionTreeRegressor_DecisionTree/#Fitted-parameters","page":"DecisionTreeRegressor","title":"Fitted parameters","text":"","category":"section"},{"location":"models/DecisionTreeRegressor_DecisionTree/","page":"DecisionTreeRegressor","title":"DecisionTreeRegressor","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/DecisionTreeRegressor_DecisionTree/","page":"DecisionTreeRegressor","title":"DecisionTreeRegressor","text":"tree: the tree or stump object returned by the core DecisionTree.jl algorithm\nfeatures: the names of the features encountered in training","category":"page"},{"location":"models/DecisionTreeRegressor_DecisionTree/#Report","page":"DecisionTreeRegressor","title":"Report","text":"","category":"section"},{"location":"models/DecisionTreeRegressor_DecisionTree/","page":"DecisionTreeRegressor","title":"DecisionTreeRegressor","text":"features: the names of the features encountered in training","category":"page"},{"location":"models/DecisionTreeRegressor_DecisionTree/#Accessor-functions","page":"DecisionTreeRegressor","title":"Accessor functions","text":"","category":"section"},{"location":"models/DecisionTreeRegressor_DecisionTree/","page":"DecisionTreeRegressor","title":"DecisionTreeRegressor","text":"feature_importances(mach) returns a vector of (feature::Symbol => importance) pairs; the type of importance is determined by the hyperparameter feature_importance (see above)","category":"page"},{"location":"models/DecisionTreeRegressor_DecisionTree/#Examples","page":"DecisionTreeRegressor","title":"Examples","text":"","category":"section"},{"location":"models/DecisionTreeRegressor_DecisionTree/","page":"DecisionTreeRegressor","title":"DecisionTreeRegressor","text":"using MLJ\nDecisionTreeRegressor = @load DecisionTreeRegressor pkg=DecisionTree\nmodel = DecisionTreeRegressor(max_depth=3, min_samples_split=3)\n\nX, y = make_regression(100, 4; rng=123) ## synthetic data\nmach = machine(model, X, y) |> fit!\n\nXnew, _ = make_regression(3, 2; rng=123)\nyhat = predict(mach, Xnew) ## new predictions\n\njulia> fitted_params(mach).tree\nx1 < 0.2758\n├─ x2 < 0.9137\n│ ├─ x1 < -0.9582\n│ │ ├─ 0.9189256882087312 (0/12)\n│ │ └─ -0.23180616021065256 (0/38)\n│ └─ -1.6461153800037722 (0/9)\n└─ x1 < 1.062\n ├─ x2 < -0.4969\n │ ├─ -0.9330755147107384 (0/5)\n │ └─ -2.3287967825015548 (0/17)\n └─ x2 < 0.4598\n ├─ -2.931299926506291 (0/11)\n └─ -4.726518740473489 (0/8)\n\nfeature_importances(mach) ## get feature importances","category":"page"},{"location":"models/DecisionTreeRegressor_DecisionTree/","page":"DecisionTreeRegressor","title":"DecisionTreeRegressor","text":"See also DecisionTree.jl and the unwrapped model type MLJDecisionTreeInterface.DecisionTree.DecisionTreeRegressor.","category":"page"},{"location":"models/IForestDetector_OutlierDetectionPython/#IForestDetector_OutlierDetectionPython","page":"IForestDetector","title":"IForestDetector","text":"","category":"section"},{"location":"models/IForestDetector_OutlierDetectionPython/","page":"IForestDetector","title":"IForestDetector","text":"IForestDetector(n_estimators = 100,\n max_samples = \"auto\",\n max_features = 1.0\n bootstrap = false,\n random_state = nothing,\n verbose = 0,\n n_jobs = 1)","category":"page"},{"location":"models/IForestDetector_OutlierDetectionPython/","page":"IForestDetector","title":"IForestDetector","text":"https://pyod.readthedocs.io/en/latest/pyod.models.html#module-pyod.models.iforest","category":"page"},{"location":"models/RODDetector_OutlierDetectionPython/#RODDetector_OutlierDetectionPython","page":"RODDetector","title":"RODDetector","text":"","category":"section"},{"location":"models/RODDetector_OutlierDetectionPython/","page":"RODDetector","title":"RODDetector","text":"RODDetector(parallel_execution = false)","category":"page"},{"location":"models/RODDetector_OutlierDetectionPython/","page":"RODDetector","title":"RODDetector","text":"https://pyod.readthedocs.io/en/latest/pyod.models.html#module-pyod.models.rod","category":"page"},{"location":"models/RandomForestClassifier_MLJScikitLearnInterface/#RandomForestClassifier_MLJScikitLearnInterface","page":"RandomForestClassifier","title":"RandomForestClassifier","text":"","category":"section"},{"location":"models/RandomForestClassifier_MLJScikitLearnInterface/","page":"RandomForestClassifier","title":"RandomForestClassifier","text":"RandomForestClassifier","category":"page"},{"location":"models/RandomForestClassifier_MLJScikitLearnInterface/","page":"RandomForestClassifier","title":"RandomForestClassifier","text":"A model type for constructing a random forest classifier, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/RandomForestClassifier_MLJScikitLearnInterface/","page":"RandomForestClassifier","title":"RandomForestClassifier","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/RandomForestClassifier_MLJScikitLearnInterface/","page":"RandomForestClassifier","title":"RandomForestClassifier","text":"RandomForestClassifier = @load RandomForestClassifier pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/RandomForestClassifier_MLJScikitLearnInterface/","page":"RandomForestClassifier","title":"RandomForestClassifier","text":"Do model = RandomForestClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in RandomForestClassifier(n_estimators=...).","category":"page"},{"location":"models/RandomForestClassifier_MLJScikitLearnInterface/","page":"RandomForestClassifier","title":"RandomForestClassifier","text":"A random forest is a meta estimator that fits a number of classifying decision trees on various sub-samples of the dataset and uses averaging to improve the predictive accuracy and control over-fitting. The sub-sample size is controlled with the max_samples parameter if bootstrap=True (default), otherwise the whole dataset is used to build each tree.","category":"page"},{"location":"about_mlj/#About-MLJ","page":"About MLJ","title":"About MLJ","text":"","category":"section"},{"location":"about_mlj/","page":"About MLJ","title":"About MLJ","text":"MLJ (Machine Learning in Julia) is a toolbox written in Julia providing a common interface and meta-algorithms for selecting, tuning, evaluating, composing and comparing over 180 machine learning models written in Julia and other languages. In particular MLJ wraps a large number of scikit-learn models.","category":"page"},{"location":"about_mlj/","page":"About MLJ","title":"About MLJ","text":"MLJ is released under the MIT license.","category":"page"},{"location":"about_mlj/#Lightning-tour","page":"About MLJ","title":"Lightning tour","text":"","category":"section"},{"location":"about_mlj/","page":"About MLJ","title":"About MLJ","text":"For help learning to use MLJ, see Learning MLJ.","category":"page"},{"location":"about_mlj/","page":"About MLJ","title":"About MLJ","text":"A self-contained notebook and julia script of this demonstration is also available here.","category":"page"},{"location":"about_mlj/","page":"About MLJ","title":"About MLJ","text":"The first code snippet below creates a new Julia environment MLJ_tour and installs just those packages needed for the tour. See Installation for more on creating a Julia environment for use with MLJ.","category":"page"},{"location":"about_mlj/","page":"About MLJ","title":"About MLJ","text":"Julia installation instructions are here.","category":"page"},{"location":"about_mlj/","page":"About MLJ","title":"About MLJ","text":"using Pkg\nPkg.activate(\"MLJ_tour\", shared=true)\nPkg.add(\"MLJ\")\nPkg.add(\"MLJIteration\")\nPkg.add(\"EvoTrees\")","category":"page"},{"location":"about_mlj/","page":"About MLJ","title":"About MLJ","text":"In MLJ a model is just a container for hyper-parameters, and that's all. Here we will apply several kinds of model composition before binding the resulting \"meta-model\" to data in a machine for evaluation using cross-validation.","category":"page"},{"location":"about_mlj/","page":"About MLJ","title":"About MLJ","text":"Loading and instantiating a gradient tree-boosting model:","category":"page"},{"location":"about_mlj/","page":"About MLJ","title":"About MLJ","text":"using MLJ\nBooster = @load EvoTreeRegressor # loads code defining a model type\nbooster = Booster(max_depth=2) # specify hyper-parameter at construction\nbooster.nrounds=50 # or mutate afterwards","category":"page"},{"location":"about_mlj/","page":"About MLJ","title":"About MLJ","text":"This model is an example of an iterative model. As it stands, the number of iterations nrounds is fixed.","category":"page"},{"location":"about_mlj/#Composition-1:-Wrapping-the-model-to-make-it-\"self-iterating\"","page":"About MLJ","title":"Composition 1: Wrapping the model to make it \"self-iterating\"","text":"","category":"section"},{"location":"about_mlj/","page":"About MLJ","title":"About MLJ","text":"Let's create a new model that automatically learns the number of iterations, using the NumberSinceBest(3) criterion, as applied to an out-of-sample l1 loss:","category":"page"},{"location":"about_mlj/","page":"About MLJ","title":"About MLJ","text":"using MLJIteration\niterated_booster = IteratedModel(model=booster,\n resampling=Holdout(fraction_train=0.8),\n controls=[Step(2), NumberSinceBest(3), NumberLimit(300)],\n measure=l1,\n retrain=true)","category":"page"},{"location":"about_mlj/#Composition-2:-Preprocess-the-input-features","page":"About MLJ","title":"Composition 2: Preprocess the input features","text":"","category":"section"},{"location":"about_mlj/","page":"About MLJ","title":"About MLJ","text":"Combining the model with categorical feature encoding:","category":"page"},{"location":"about_mlj/","page":"About MLJ","title":"About MLJ","text":"pipe = ContinuousEncoder() |> iterated_booster","category":"page"},{"location":"about_mlj/#Composition-3:-Wrapping-the-model-to-make-it-\"self-tuning\"","page":"About MLJ","title":"Composition 3: Wrapping the model to make it \"self-tuning\"","text":"","category":"section"},{"location":"about_mlj/","page":"About MLJ","title":"About MLJ","text":"First, we define a hyper-parameter range for optimization of a (nested) hyper-parameter:","category":"page"},{"location":"about_mlj/","page":"About MLJ","title":"About MLJ","text":"max_depth_range = range(pipe,\n :(deterministic_iterated_model.model.max_depth),\n lower = 1,\n upper = 10)","category":"page"},{"location":"about_mlj/","page":"About MLJ","title":"About MLJ","text":"Now we can wrap the pipeline model in an optimization strategy to make it \"self-tuning\":","category":"page"},{"location":"about_mlj/","page":"About MLJ","title":"About MLJ","text":"self_tuning_pipe = TunedModel(model=pipe,\n tuning=RandomSearch(),\n ranges = max_depth_range,\n resampling=CV(nfolds=3, rng=456),\n measure=l1,\n acceleration=CPUThreads(),\n n=50)","category":"page"},{"location":"about_mlj/#Binding-to-data-and-evaluating-performance","page":"About MLJ","title":"Binding to data and evaluating performance","text":"","category":"section"},{"location":"about_mlj/","page":"About MLJ","title":"About MLJ","text":"Loading a selection of features and labels from the Ames House Price dataset:","category":"page"},{"location":"about_mlj/","page":"About MLJ","title":"About MLJ","text":"X, y = @load_reduced_ames;","category":"page"},{"location":"about_mlj/","page":"About MLJ","title":"About MLJ","text":"Evaluating the \"self-tuning\" pipeline model's performance using 5-fold cross-validation (implies multiple layers of nested resampling):","category":"page"},{"location":"about_mlj/","page":"About MLJ","title":"About MLJ","text":"julia> evaluate(self_tuning_pipe, X, y,\n measures=[l1, l2],\n resampling=CV(nfolds=5, rng=123),\n acceleration=CPUThreads(),\n verbosity=2)\nPerformanceEvaluation object with these fields:\n measure, measurement, operation, per_fold,\n per_observation, fitted_params_per_fold,\n report_per_fold, train_test_pairs\nExtract:\n┌───────────────┬─────────────┬───────────┬───────────────────────────────────────────────┐\n│ measure │ measurement │ operation │ per_fold │\n├───────────────┼─────────────┼───────────┼───────────────────────────────────────────────┤\n│ LPLoss(p = 1) │ 17200.0 │ predict │ [16500.0, 17100.0, 16300.0, 17500.0, 18900.0] │\n│ LPLoss(p = 2) │ 6.83e8 │ predict │ [6.14e8, 6.64e8, 5.98e8, 6.37e8, 9.03e8] │\n└───────────────┴─────────────┴───────────┴───────────────────────────────────────────────┘","category":"page"},{"location":"about_mlj/#Key-goals","page":"About MLJ","title":"Key goals","text":"","category":"section"},{"location":"about_mlj/","page":"About MLJ","title":"About MLJ","text":"Offer a consistent way to use, compose and tune machine learning models in Julia,\nPromote the improvement of the Julia ML/Stats ecosystem by making it easier to use models from a wide range of packages,\nUnlock performance gains by exploiting Julia's support for parallelism, automatic differentiation, GPU, optimization etc.","category":"page"},{"location":"about_mlj/#Key-features","page":"About MLJ","title":"Key features","text":"","category":"section"},{"location":"about_mlj/","page":"About MLJ","title":"About MLJ","text":"Data agnostic, train most models on any data X supported by the Tables.jl interface (needs Tables.istable(X) == true).\nExtensive, state-of-the-art, support for model composition (pipelines, stacks and, more generally, learning networks). See more below.\nConvenient syntax to tune and evaluate (composite) models.\nConsistent interface to handle probabilistic predictions.\nExtensible tuning interface, to support a growing number of optimization strategies, and designed to play well with model composition.\nOptions to accelerate model evaluation and tuning with multithreading and/or distributed processing.","category":"page"},{"location":"about_mlj/#Model-composability","page":"About MLJ","title":"Model composability","text":"","category":"section"},{"location":"about_mlj/","page":"About MLJ","title":"About MLJ","text":"The generic model composition API's provided by other toolboxes we have surveyed share one or more of the following shortcomings, which do not exist in MLJ:","category":"page"},{"location":"about_mlj/","page":"About MLJ","title":"About MLJ","text":"Composite models do not inherit all the behavior of ordinary models.\nComposition is limited to linear (non-branching) pipelines.\nSupervised components in a linear pipeline can only occur at the end of the pipeline.\nOnly static (unlearned) target transformations/inverse transformations are supported.\nHyper-parameters in homogeneous model ensembles cannot be coupled.\nModel stacking, with out-of-sample predictions for base learners, cannot be implemented (using the generic API alone).\nHyper-parameters and/or learned parameters of component models are not easily inspected or manipulated (by tuning algorithms, for example)\nComposite models cannot implement multiple operations, for example, both a predict and transform method (as in clustering models) or both a transform and inverse_transform method.","category":"page"},{"location":"about_mlj/","page":"About MLJ","title":"About MLJ","text":"Some of these features are demonstrated in this notebook","category":"page"},{"location":"about_mlj/","page":"About MLJ","title":"About MLJ","text":"For more information see the MLJ design paper or our detailed paper on the composition interface.","category":"page"},{"location":"about_mlj/#Getting-help-and-reporting-problems","page":"About MLJ","title":"Getting help and reporting problems","text":"","category":"section"},{"location":"about_mlj/","page":"About MLJ","title":"About MLJ","text":"Users are encouraged to provide feedback on their experience using MLJ and to report issues.","category":"page"},{"location":"about_mlj/","page":"About MLJ","title":"About MLJ","text":"For a query to have maximum exposure to maintainers and users, start a discussion thread at Julia Discourse Machine Learning and tag your issue \"mlj\". Queries can also be posted as issues, or on the #mlj slack workspace in the Julia Slack channel.","category":"page"},{"location":"about_mlj/","page":"About MLJ","title":"About MLJ","text":"Bugs, suggestions, and feature requests can be posted here.","category":"page"},{"location":"about_mlj/","page":"About MLJ","title":"About MLJ","text":"Users are also welcome to join the #mlj Julia slack channel to ask questions and make suggestions.","category":"page"},{"location":"about_mlj/#Installation","page":"About MLJ","title":"Installation","text":"","category":"section"},{"location":"about_mlj/","page":"About MLJ","title":"About MLJ","text":"Initially, it is recommended that MLJ and associated packages be installed in a new environment to avoid package conflicts. You can do this with","category":"page"},{"location":"about_mlj/","page":"About MLJ","title":"About MLJ","text":"julia> using Pkg; Pkg.activate(\"my_MLJ_env\", shared=true)","category":"page"},{"location":"about_mlj/","page":"About MLJ","title":"About MLJ","text":"Installing MLJ is also done with the package manager:","category":"page"},{"location":"about_mlj/","page":"About MLJ","title":"About MLJ","text":"julia> Pkg.add(\"MLJ\")","category":"page"},{"location":"about_mlj/","page":"About MLJ","title":"About MLJ","text":"Optional: To test your installation, run","category":"page"},{"location":"about_mlj/","page":"About MLJ","title":"About MLJ","text":"julia> Pkg.test(\"MLJ\")","category":"page"},{"location":"about_mlj/","page":"About MLJ","title":"About MLJ","text":"It is important to note that MLJ is essentially a big wrapper providing unified access to model-providing packages. For this reason, one generally needs to add further packages to your environment to make model-specific code available. This happens automatically when you use MLJ's interactive load command @iload, as in","category":"page"},{"location":"about_mlj/","page":"About MLJ","title":"About MLJ","text":"julia> Tree = @iload DecisionTreeClassifier # load type\njulia> tree = Tree() # instance","category":"page"},{"location":"about_mlj/","page":"About MLJ","title":"About MLJ","text":"where you will also be asked to choose a providing package, for more than one provide a DecisionTreeClassifier model. For more on identifying the name of an applicable model, see Model Search. For non-interactive loading of code (e.g., from a module or function) see Loading Model Code.","category":"page"},{"location":"about_mlj/","page":"About MLJ","title":"About MLJ","text":"It is recommended that you start with models from more mature packages such as DecisionTree.jl, ScikitLearn.jl or XGBoost.jl.","category":"page"},{"location":"about_mlj/","page":"About MLJ","title":"About MLJ","text":"MLJ is supported by several satellite packages (MLJTuning, MLJModelInterface, etc) which the general user is not required to install directly. Developers can learn more about these here.","category":"page"},{"location":"about_mlj/","page":"About MLJ","title":"About MLJ","text":"See also the alternative installation instructions for Modifying Behavior.","category":"page"},{"location":"about_mlj/#Funding","page":"About MLJ","title":"Funding","text":"","category":"section"},{"location":"about_mlj/","page":"About MLJ","title":"About MLJ","text":"MLJ was initially created as a Tools, Practices and Systems project at the Alan Turing Institute in 2019. Current funding is provided by a New Zealand Strategic Science Investment Fund awarded to the University of Auckland.","category":"page"},{"location":"about_mlj/#Citing-MLJ","page":"About MLJ","title":"Citing MLJ","text":"","category":"section"},{"location":"about_mlj/","page":"About MLJ","title":"About MLJ","text":"An overview of MLJ design:","category":"page"},{"location":"about_mlj/","page":"About MLJ","title":"About MLJ","text":"(Image: DOI)","category":"page"},{"location":"about_mlj/","page":"About MLJ","title":"About MLJ","text":"@article{Blaom2020,\n doi = {10.21105/joss.02704},\n url = {https://doi.org/10.21105/joss.02704},\n year = {2020},\n publisher = {The Open Journal},\n volume = {5},\n number = {55},\n pages = {2704},\n author = {Anthony D. Blaom and Franz Kiraly and Thibaut Lienart and Yiannis Simillides and Diego Arenas and Sebastian J. Vollmer},\n title = {{MLJ}: A Julia package for composable machine learning},\n journal = {Journal of Open Source Software}\n}","category":"page"},{"location":"about_mlj/","page":"About MLJ","title":"About MLJ","text":"An in-depth view of MLJ's model composition design:","category":"page"},{"location":"about_mlj/","page":"About MLJ","title":"About MLJ","text":"(Image: arXiv)","category":"page"},{"location":"about_mlj/","page":"About MLJ","title":"About MLJ","text":"@misc{blaom2020flexible,\n title={Flexible model composition in machine learning and its implementation in {MLJ}},\n author={Anthony D. Blaom and Sebastian J. Vollmer},\n year={2020},\n eprint={2012.15505},\n archivePrefix={arXiv},\n primaryClass={cs.LG}\n}","category":"page"},{"location":"models/PPCA_MultivariateStats/#PPCA_MultivariateStats","page":"PPCA","title":"PPCA","text":"","category":"section"},{"location":"models/PPCA_MultivariateStats/","page":"PPCA","title":"PPCA","text":"PPCA","category":"page"},{"location":"models/PPCA_MultivariateStats/","page":"PPCA","title":"PPCA","text":"A model type for constructing a probabilistic PCA model, based on MultivariateStats.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/PPCA_MultivariateStats/","page":"PPCA","title":"PPCA","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/PPCA_MultivariateStats/","page":"PPCA","title":"PPCA","text":"PPCA = @load PPCA pkg=MultivariateStats","category":"page"},{"location":"models/PPCA_MultivariateStats/","page":"PPCA","title":"PPCA","text":"Do model = PPCA() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in PPCA(maxoutdim=...).","category":"page"},{"location":"models/PPCA_MultivariateStats/","page":"PPCA","title":"PPCA","text":"Probabilistic principal component analysis is a dimension-reduction algorithm which represents a constrained form of the Gaussian distribution in which the number of free parameters can be restricted while still allowing the model to capture the dominant correlations in a data set. It is expressed as the maximum likelihood solution of a probabilistic latent variable model. For details, see Bishop (2006): C. M. Pattern Recognition and Machine Learning.","category":"page"},{"location":"models/PPCA_MultivariateStats/#Training-data","page":"PPCA","title":"Training data","text":"","category":"section"},{"location":"models/PPCA_MultivariateStats/","page":"PPCA","title":"PPCA","text":"In MLJ or MLJBase, bind an instance model to data with","category":"page"},{"location":"models/PPCA_MultivariateStats/","page":"PPCA","title":"PPCA","text":"mach = machine(model, X)","category":"page"},{"location":"models/PPCA_MultivariateStats/","page":"PPCA","title":"PPCA","text":"Here:","category":"page"},{"location":"models/PPCA_MultivariateStats/","page":"PPCA","title":"PPCA","text":"X is any table of input features (eg, a DataFrame) whose columns are of scitype Continuous; check column scitypes with schema(X).","category":"page"},{"location":"models/PPCA_MultivariateStats/","page":"PPCA","title":"PPCA","text":"Train the machine using fit!(mach, rows=...).","category":"page"},{"location":"models/PPCA_MultivariateStats/#Hyper-parameters","page":"PPCA","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/PPCA_MultivariateStats/","page":"PPCA","title":"PPCA","text":"maxoutdim=0: Controls the the dimension (number of columns) of the output, outdim. Specifically, outdim = min(n, indim, maxoutdim), where n is the number of observations and indim the input dimension.\nmethod::Symbol=:ml: The method to use to solve the problem, one of :ml, :em, :bayes.\nmaxiter::Int=1000: The maximum number of iterations.\ntol::Real=1e-6: The convergence tolerance.\nmean::Union{Nothing, Real, Vector{Float64}}=nothing: If nothing, centering will be computed and applied; if set to 0 no centering is applied (data is assumed pre-centered); if a vector, the centering is done with that vector.","category":"page"},{"location":"models/PPCA_MultivariateStats/#Operations","page":"PPCA","title":"Operations","text":"","category":"section"},{"location":"models/PPCA_MultivariateStats/","page":"PPCA","title":"PPCA","text":"transform(mach, Xnew): Return a lower dimensional projection of the input Xnew, which should have the same scitype as X above.\ninverse_transform(mach, Xsmall): For a dimension-reduced table Xsmall, such as returned by transform, reconstruct a table, having same the number of columns as the original training data X, that transforms to Xsmall. Mathematically, inverse_transform is a right-inverse for the PCA projection map, whose image is orthogonal to the kernel of that map. In particular, if Xsmall = transform(mach, Xnew), then inverse_transform(Xsmall) is only an approximation to Xnew.","category":"page"},{"location":"models/PPCA_MultivariateStats/#Fitted-parameters","page":"PPCA","title":"Fitted parameters","text":"","category":"section"},{"location":"models/PPCA_MultivariateStats/","page":"PPCA","title":"PPCA","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/PPCA_MultivariateStats/","page":"PPCA","title":"PPCA","text":"projection: Returns the projection matrix, which has size (indim, outdim), where indim and outdim are the number of features of the input and ouput respectively. Each column of the projection matrix corresponds to a principal component.","category":"page"},{"location":"models/PPCA_MultivariateStats/#Report","page":"PPCA","title":"Report","text":"","category":"section"},{"location":"models/PPCA_MultivariateStats/","page":"PPCA","title":"PPCA","text":"The fields of report(mach) are:","category":"page"},{"location":"models/PPCA_MultivariateStats/","page":"PPCA","title":"PPCA","text":"indim: Dimension (number of columns) of the training data and new data to be transformed.\noutdim: Dimension of transformed data.\ntvat: The variance of the components.\nloadings: The model's loadings matrix. A matrix of size (indim, outdim) where indim and outdim as as defined above.","category":"page"},{"location":"models/PPCA_MultivariateStats/#Examples","page":"PPCA","title":"Examples","text":"","category":"section"},{"location":"models/PPCA_MultivariateStats/","page":"PPCA","title":"PPCA","text":"using MLJ\n\nPPCA = @load PPCA pkg=MultivariateStats\n\nX, y = @load_iris ## a table and a vector\n\nmodel = PPCA(maxoutdim=2)\nmach = machine(model, X) |> fit!\n\nXproj = transform(mach, X)","category":"page"},{"location":"models/PPCA_MultivariateStats/","page":"PPCA","title":"PPCA","text":"See also KernelPCA, ICA, FactorAnalysis, PCA","category":"page"},{"location":"models/BM25Transformer_MLJText/#BM25Transformer_MLJText","page":"BM25Transformer","title":"BM25Transformer","text":"","category":"section"},{"location":"models/BM25Transformer_MLJText/","page":"BM25Transformer","title":"BM25Transformer","text":"BM25Transformer","category":"page"},{"location":"models/BM25Transformer_MLJText/","page":"BM25Transformer","title":"BM25Transformer","text":"A model type for constructing a b m25 transformer, based on MLJText.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/BM25Transformer_MLJText/","page":"BM25Transformer","title":"BM25Transformer","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/BM25Transformer_MLJText/","page":"BM25Transformer","title":"BM25Transformer","text":"BM25Transformer = @load BM25Transformer pkg=MLJText","category":"page"},{"location":"models/BM25Transformer_MLJText/","page":"BM25Transformer","title":"BM25Transformer","text":"Do model = BM25Transformer() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in BM25Transformer(max_doc_freq=...).","category":"page"},{"location":"models/BM25Transformer_MLJText/","page":"BM25Transformer","title":"BM25Transformer","text":"The transformer converts a collection of documents, tokenized or pre-parsed as bags of words/ngrams, to a matrix of Okapi BM25 document-word statistics. The BM25 scoring function uses both term frequency (TF) and inverse document frequency (IDF, defined below), as in TfidfTransformer, but additionally adjusts for the probability that a user will consider a search result relevant based, on the terms in the search query and those in each document.","category":"page"},{"location":"models/BM25Transformer_MLJText/","page":"BM25Transformer","title":"BM25Transformer","text":"In textbooks and implementations there is variation in the definition of IDF. Here two IDF definitions are available. The default, smoothed option provides the IDF for a term t as log((1 + n)/(1 + df(t))) + 1, where n is the total number of documents and df(t) the number of documents in which t appears. Setting smooth_df = false provides an IDF of log(n/df(t)) + 1.","category":"page"},{"location":"models/BM25Transformer_MLJText/","page":"BM25Transformer","title":"BM25Transformer","text":"References:","category":"page"},{"location":"models/BM25Transformer_MLJText/","page":"BM25Transformer","title":"BM25Transformer","text":"http://ethen8181.github.io/machine-learning/search/bm25_intro.html\nhttps://en.wikipedia.org/wiki/Okapi_BM25\nhttps://nlp.stanford.edu/IR-book/html/htmledition/okapi-bm25-a-non-binary-model-1.html","category":"page"},{"location":"models/BM25Transformer_MLJText/#Training-data","page":"BM25Transformer","title":"Training data","text":"","category":"section"},{"location":"models/BM25Transformer_MLJText/","page":"BM25Transformer","title":"BM25Transformer","text":"In MLJ or MLJBase, bind an instance model to data with","category":"page"},{"location":"models/BM25Transformer_MLJText/","page":"BM25Transformer","title":"BM25Transformer","text":"mach = machine(model, X)","category":"page"},{"location":"models/BM25Transformer_MLJText/","page":"BM25Transformer","title":"BM25Transformer","text":"Here:","category":"page"},{"location":"models/BM25Transformer_MLJText/","page":"BM25Transformer","title":"BM25Transformer","text":"X is any vector whose elements are either tokenized documents or bags of words/ngrams. Specifically, each element is one of the following:\nA vector of abstract strings (tokens), e.g., [\"I\", \"like\", \"Sam\", \".\", \"Sam\", \"is\", \"nice\", \".\"] (scitype AbstractVector{Textual})\nA dictionary of counts, indexed on abstract strings, e.g., Dict(\"I\"=>1, \"Sam\"=>2, \"Sam is\"=>1) (scitype Multiset{Textual}})\nA dictionary of counts, indexed on plain ngrams, e.g., Dict((\"I\",)=>1, (\"Sam\",)=>2, (\"I\", \"Sam\")=>1) (scitype Multiset{<:NTuple{N,Textual} where N}); here a plain ngram is a tuple of abstract strings.","category":"page"},{"location":"models/BM25Transformer_MLJText/","page":"BM25Transformer","title":"BM25Transformer","text":"Train the machine using fit!(mach, rows=...).","category":"page"},{"location":"models/BM25Transformer_MLJText/#Hyper-parameters","page":"BM25Transformer","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/BM25Transformer_MLJText/","page":"BM25Transformer","title":"BM25Transformer","text":"max_doc_freq=1.0: Restricts the vocabulary that the transformer will consider. Terms that occur in > max_doc_freq documents will not be considered by the transformer. For example, if max_doc_freq is set to 0.9, terms that are in more than 90% of the documents will be removed.\nmin_doc_freq=0.0: Restricts the vocabulary that the transformer will consider. Terms that occur in < max_doc_freq documents will not be considered by the transformer. A value of 0.01 means that only terms that are at least in 1% of the documents will be included.\nκ=2: The term frequency saturation characteristic. Higher values represent slower saturation. What we mean by saturation is the degree to which a term occurring extra times adds to the overall score.\nβ=0.075: Amplifies the particular document length compared to the average length. The bigger β is, the more document length is amplified in terms of the overall score. The default value is 0.75, and the bounds are restricted between 0 and 1.\nsmooth_idf=true: Control which definition of IDF to use (see above).","category":"page"},{"location":"models/BM25Transformer_MLJText/#Operations","page":"BM25Transformer","title":"Operations","text":"","category":"section"},{"location":"models/BM25Transformer_MLJText/","page":"BM25Transformer","title":"BM25Transformer","text":"transform(mach, Xnew): Based on the vocabulary, IDF, and mean word counts learned in training, return the matrix of BM25 scores for Xnew, a vector of the same form as X above. The matrix has size (n, p), where n = length(Xnew) and p the size of the vocabulary. Tokens/ngrams not appearing in the learned vocabulary are scored zero.","category":"page"},{"location":"models/BM25Transformer_MLJText/#Fitted-parameters","page":"BM25Transformer","title":"Fitted parameters","text":"","category":"section"},{"location":"models/BM25Transformer_MLJText/","page":"BM25Transformer","title":"BM25Transformer","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/BM25Transformer_MLJText/","page":"BM25Transformer","title":"BM25Transformer","text":"vocab: A vector containing the string used in the transformer's vocabulary.\nidf_vector: The transformer's calculated IDF vector.\nmean_words_in_docs: The mean number of words in each document.","category":"page"},{"location":"models/BM25Transformer_MLJText/#Examples","page":"BM25Transformer","title":"Examples","text":"","category":"section"},{"location":"models/BM25Transformer_MLJText/","page":"BM25Transformer","title":"BM25Transformer","text":"BM25Transformer accepts a variety of inputs. The example below transforms tokenized documents:","category":"page"},{"location":"models/BM25Transformer_MLJText/","page":"BM25Transformer","title":"BM25Transformer","text":"using MLJ\nimport TextAnalysis\n\nBM25Transformer = @load BM25Transformer pkg=MLJText\n\ndocs = [\"Hi my name is Sam.\", \"How are you today?\"]\nbm25_transformer = BM25Transformer()\n\njulia> tokenized_docs = TextAnalysis.tokenize.(docs)\n2-element Vector{Vector{String}}:\n [\"Hi\", \"my\", \"name\", \"is\", \"Sam\", \".\"]\n [\"How\", \"are\", \"you\", \"today\", \"?\"]\n\nmach = machine(bm25_transformer, tokenized_docs)\nfit!(mach)\n\nfitted_params(mach)\n\ntfidf_mat = transform(mach, tokenized_docs)","category":"page"},{"location":"models/BM25Transformer_MLJText/","page":"BM25Transformer","title":"BM25Transformer","text":"Alternatively, one can provide documents pre-parsed as ngrams counts:","category":"page"},{"location":"models/BM25Transformer_MLJText/","page":"BM25Transformer","title":"BM25Transformer","text":"using MLJ\nimport TextAnalysis\n\ndocs = [\"Hi my name is Sam.\", \"How are you today?\"]\ncorpus = TextAnalysis.Corpus(TextAnalysis.NGramDocument.(docs, 1, 2))\nngram_docs = TextAnalysis.ngrams.(corpus)\n\njulia> ngram_docs[1]\nDict{AbstractString, Int64} with 11 entries:\n \"is\" => 1\n \"my\" => 1\n \"name\" => 1\n \".\" => 1\n \"Hi\" => 1\n \"Sam\" => 1\n \"my name\" => 1\n \"Hi my\" => 1\n \"name is\" => 1\n \"Sam .\" => 1\n \"is Sam\" => 1\n\nbm25_transformer = BM25Transformer()\nmach = machine(bm25_transformer, ngram_docs)\nMLJ.fit!(mach)\nfitted_params(mach)\n\ntfidf_mat = transform(mach, ngram_docs)","category":"page"},{"location":"models/BM25Transformer_MLJText/","page":"BM25Transformer","title":"BM25Transformer","text":"See also TfidfTransformer, CountTransformer","category":"page"},{"location":"models/DeterministicConstantClassifier_MLJModels/#DeterministicConstantClassifier_MLJModels","page":"DeterministicConstantClassifier","title":"DeterministicConstantClassifier","text":"","category":"section"},{"location":"models/DeterministicConstantClassifier_MLJModels/","page":"DeterministicConstantClassifier","title":"DeterministicConstantClassifier","text":"DeterministicConstantClassifier","category":"page"},{"location":"models/DeterministicConstantClassifier_MLJModels/","page":"DeterministicConstantClassifier","title":"DeterministicConstantClassifier","text":"A model type for constructing a deterministic constant classifier, based on MLJModels.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/DeterministicConstantClassifier_MLJModels/","page":"DeterministicConstantClassifier","title":"DeterministicConstantClassifier","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/DeterministicConstantClassifier_MLJModels/","page":"DeterministicConstantClassifier","title":"DeterministicConstantClassifier","text":"DeterministicConstantClassifier = @load DeterministicConstantClassifier pkg=MLJModels","category":"page"},{"location":"models/DeterministicConstantClassifier_MLJModels/","page":"DeterministicConstantClassifier","title":"DeterministicConstantClassifier","text":"Do model = DeterministicConstantClassifier() to construct an instance with default hyper-parameters. ","category":"page"},{"location":"models/RidgeRegressor_MLJScikitLearnInterface/#RidgeRegressor_MLJScikitLearnInterface","page":"RidgeRegressor","title":"RidgeRegressor","text":"","category":"section"},{"location":"models/RidgeRegressor_MLJScikitLearnInterface/","page":"RidgeRegressor","title":"RidgeRegressor","text":"RidgeRegressor","category":"page"},{"location":"models/RidgeRegressor_MLJScikitLearnInterface/","page":"RidgeRegressor","title":"RidgeRegressor","text":"A model type for constructing a ridge regressor, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/RidgeRegressor_MLJScikitLearnInterface/","page":"RidgeRegressor","title":"RidgeRegressor","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/RidgeRegressor_MLJScikitLearnInterface/","page":"RidgeRegressor","title":"RidgeRegressor","text":"RidgeRegressor = @load RidgeRegressor pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/RidgeRegressor_MLJScikitLearnInterface/","page":"RidgeRegressor","title":"RidgeRegressor","text":"Do model = RidgeRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in RidgeRegressor(alpha=...).","category":"page"},{"location":"models/RidgeRegressor_MLJScikitLearnInterface/#Hyper-parameters","page":"RidgeRegressor","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/RidgeRegressor_MLJScikitLearnInterface/","page":"RidgeRegressor","title":"RidgeRegressor","text":"alpha = 1.0\nfit_intercept = true\ncopy_X = true\nmax_iter = 1000\ntol = 0.0001\nsolver = auto\nrandom_state = nothing","category":"page"},{"location":"models/MultiTaskLassoRegressor_MLJScikitLearnInterface/#MultiTaskLassoRegressor_MLJScikitLearnInterface","page":"MultiTaskLassoRegressor","title":"MultiTaskLassoRegressor","text":"","category":"section"},{"location":"models/MultiTaskLassoRegressor_MLJScikitLearnInterface/","page":"MultiTaskLassoRegressor","title":"MultiTaskLassoRegressor","text":"MultiTaskLassoRegressor","category":"page"},{"location":"models/MultiTaskLassoRegressor_MLJScikitLearnInterface/","page":"MultiTaskLassoRegressor","title":"MultiTaskLassoRegressor","text":"A model type for constructing a multi-target lasso regressor, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/MultiTaskLassoRegressor_MLJScikitLearnInterface/","page":"MultiTaskLassoRegressor","title":"MultiTaskLassoRegressor","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/MultiTaskLassoRegressor_MLJScikitLearnInterface/","page":"MultiTaskLassoRegressor","title":"MultiTaskLassoRegressor","text":"MultiTaskLassoRegressor = @load MultiTaskLassoRegressor pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/MultiTaskLassoRegressor_MLJScikitLearnInterface/","page":"MultiTaskLassoRegressor","title":"MultiTaskLassoRegressor","text":"Do model = MultiTaskLassoRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in MultiTaskLassoRegressor(alpha=...).","category":"page"},{"location":"models/MultiTaskLassoRegressor_MLJScikitLearnInterface/#Hyper-parameters","page":"MultiTaskLassoRegressor","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/MultiTaskLassoRegressor_MLJScikitLearnInterface/","page":"MultiTaskLassoRegressor","title":"MultiTaskLassoRegressor","text":"alpha = 1.0\nfit_intercept = true\nmax_iter = 1000\ntol = 0.0001\ncopy_X = true\nrandom_state = nothing\nselection = cyclic","category":"page"},{"location":"models/BaggingClassifier_MLJScikitLearnInterface/#BaggingClassifier_MLJScikitLearnInterface","page":"BaggingClassifier","title":"BaggingClassifier","text":"","category":"section"},{"location":"models/BaggingClassifier_MLJScikitLearnInterface/","page":"BaggingClassifier","title":"BaggingClassifier","text":"BaggingClassifier","category":"page"},{"location":"models/BaggingClassifier_MLJScikitLearnInterface/","page":"BaggingClassifier","title":"BaggingClassifier","text":"A model type for constructing a bagging ensemble classifier, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/BaggingClassifier_MLJScikitLearnInterface/","page":"BaggingClassifier","title":"BaggingClassifier","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/BaggingClassifier_MLJScikitLearnInterface/","page":"BaggingClassifier","title":"BaggingClassifier","text":"BaggingClassifier = @load BaggingClassifier pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/BaggingClassifier_MLJScikitLearnInterface/","page":"BaggingClassifier","title":"BaggingClassifier","text":"Do model = BaggingClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in BaggingClassifier(estimator=...).","category":"page"},{"location":"models/BaggingClassifier_MLJScikitLearnInterface/","page":"BaggingClassifier","title":"BaggingClassifier","text":"A Bagging classifier is an ensemble meta-estimator that fits base classifiers each on random subsets of the original dataset and then aggregate their individual predictions (either by voting or by averaging) to form a final prediction. Such a meta-estimator can typically be used as a way to reduce the variance of a black-box estimator (e.g., a decision tree), by introducing randomization into its construction procedure and then making an ensemble out of it.","category":"page"},{"location":"models/FeatureSelector_MLJModels/#FeatureSelector_MLJModels","page":"FeatureSelector","title":"FeatureSelector","text":"","category":"section"},{"location":"models/FeatureSelector_MLJModels/","page":"FeatureSelector","title":"FeatureSelector","text":"FeatureSelector","category":"page"},{"location":"models/FeatureSelector_MLJModels/","page":"FeatureSelector","title":"FeatureSelector","text":"A model type for constructing a feature selector, based on MLJModels.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/FeatureSelector_MLJModels/","page":"FeatureSelector","title":"FeatureSelector","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/FeatureSelector_MLJModels/","page":"FeatureSelector","title":"FeatureSelector","text":"FeatureSelector = @load FeatureSelector pkg=MLJModels","category":"page"},{"location":"models/FeatureSelector_MLJModels/","page":"FeatureSelector","title":"FeatureSelector","text":"Do model = FeatureSelector() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in FeatureSelector(features=...).","category":"page"},{"location":"models/FeatureSelector_MLJModels/","page":"FeatureSelector","title":"FeatureSelector","text":"Use this model to select features (columns) of a table, usually as part of a model Pipeline.","category":"page"},{"location":"models/FeatureSelector_MLJModels/#Training-data","page":"FeatureSelector","title":"Training data","text":"","category":"section"},{"location":"models/FeatureSelector_MLJModels/","page":"FeatureSelector","title":"FeatureSelector","text":"In MLJ or MLJBase, bind an instance model to data with","category":"page"},{"location":"models/FeatureSelector_MLJModels/","page":"FeatureSelector","title":"FeatureSelector","text":"mach = machine(model, X)","category":"page"},{"location":"models/FeatureSelector_MLJModels/","page":"FeatureSelector","title":"FeatureSelector","text":"where","category":"page"},{"location":"models/FeatureSelector_MLJModels/","page":"FeatureSelector","title":"FeatureSelector","text":"X: any table of input features, where \"table\" is in the sense of Tables.jl","category":"page"},{"location":"models/FeatureSelector_MLJModels/","page":"FeatureSelector","title":"FeatureSelector","text":"Train the machine using fit!(mach, rows=...).","category":"page"},{"location":"models/FeatureSelector_MLJModels/#Hyper-parameters","page":"FeatureSelector","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/FeatureSelector_MLJModels/","page":"FeatureSelector","title":"FeatureSelector","text":"features: one of the following, with the behavior indicated:\n[] (empty, the default): filter out all features (columns) which were not encountered in training\nnon-empty vector of feature names (symbols): keep only the specified features (ignore=false) or keep only unspecified features (ignore=true)\nfunction or other callable: keep a feature if the callable returns true on its name. For example, specifying FeatureSelector(features = name -> name in [:x1, :x3], ignore = true) has the same effect as FeatureSelector(features = [:x1, :x3], ignore = true), namely to select all features, with the exception of :x1 and :x3.\nignore: whether to ignore or keep specified features, as explained above","category":"page"},{"location":"models/FeatureSelector_MLJModels/#Operations","page":"FeatureSelector","title":"Operations","text":"","category":"section"},{"location":"models/FeatureSelector_MLJModels/","page":"FeatureSelector","title":"FeatureSelector","text":"transform(mach, Xnew): select features from the table Xnew as specified by the model, taking features seen during training into account, if relevant","category":"page"},{"location":"models/FeatureSelector_MLJModels/#Fitted-parameters","page":"FeatureSelector","title":"Fitted parameters","text":"","category":"section"},{"location":"models/FeatureSelector_MLJModels/","page":"FeatureSelector","title":"FeatureSelector","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/FeatureSelector_MLJModels/","page":"FeatureSelector","title":"FeatureSelector","text":"features_to_keep: the features that will be selected","category":"page"},{"location":"models/FeatureSelector_MLJModels/#Example","page":"FeatureSelector","title":"Example","text":"","category":"section"},{"location":"models/FeatureSelector_MLJModels/","page":"FeatureSelector","title":"FeatureSelector","text":"using MLJ\n\nX = (ordinal1 = [1, 2, 3],\n ordinal2 = coerce([\"x\", \"y\", \"x\"], OrderedFactor),\n ordinal3 = [10.0, 20.0, 30.0],\n ordinal4 = [-20.0, -30.0, -40.0],\n nominal = coerce([\"Your father\", \"he\", \"is\"], Multiclass));\n\nselector = FeatureSelector(features=[:ordinal3, ], ignore=true);\n\njulia> transform(fit!(machine(selector, X)), X)\n(ordinal1 = [1, 2, 3],\n ordinal2 = CategoricalValue{Symbol,UInt32}[\"x\", \"y\", \"x\"],\n ordinal4 = [-20.0, -30.0, -40.0],\n nominal = CategoricalValue{String,UInt32}[\"Your father\", \"he\", \"is\"],)\n","category":"page"},{"location":"models/GeneralImputer_BetaML/#GeneralImputer_BetaML","page":"GeneralImputer","title":"GeneralImputer","text":"","category":"section"},{"location":"models/GeneralImputer_BetaML/","page":"GeneralImputer","title":"GeneralImputer","text":"mutable struct GeneralImputer <: MLJModelInterface.Unsupervised","category":"page"},{"location":"models/GeneralImputer_BetaML/","page":"GeneralImputer","title":"GeneralImputer","text":"Impute missing values using arbitrary learning models, from the Beta Machine Learning Toolkit (BetaML).","category":"page"},{"location":"models/GeneralImputer_BetaML/","page":"GeneralImputer","title":"GeneralImputer","text":"Impute missing values using a vector (one per column) of arbitrary learning models (classifiers/regressors, not necessarily from BetaML) that implement the interface m = Model([options]), train!(m,X,Y) and predict(m,X).","category":"page"},{"location":"models/GeneralImputer_BetaML/#Hyperparameters:","page":"GeneralImputer","title":"Hyperparameters:","text":"","category":"section"},{"location":"models/GeneralImputer_BetaML/","page":"GeneralImputer","title":"GeneralImputer","text":"cols_to_impute::Union{String, Vector{Int64}}: Columns in the matrix for which to create an imputation model, i.e. to impute. It can be a vector of columns IDs (positions), or the keywords \"auto\" (default) or \"all\". With \"auto\" the model automatically detects the columns with missing data and impute only them. You may manually specify the columns or use \"all\" if you want to create a imputation model for that columns during training even if all training data are non-missing to apply then the training model to further data with possibly missing values.\nestimator::Any: An entimator model (regressor or classifier), with eventually its options (hyper-parameters), to be used to impute the various columns of the matrix. It can also be a cols_to_impute-length vector of different estimators to consider a different estimator for each column (dimension) to impute, for example when some columns are categorical (and will hence require a classifier) and some others are numerical (hence requiring a regressor). [default: nothing, i.e. use BetaML random forests, handling classification and regression jobs automatically].\nmissing_supported::Union{Bool, Vector{Bool}}: Wheter the estimator(s) used to predict the missing data support itself missing data in the training features (X). If not, when the model for a certain dimension is fitted, dimensions with missing data in the same rows of those where imputation is needed are dropped and then only non-missing rows in the other remaining dimensions are considered. It can be a vector of boolean values to specify this property for each individual estimator or a single booleann value to apply to all the estimators [default: false]\nfit_function::Union{Function, Vector{Function}}: The function used by the estimator(s) to fit the model. It should take as fist argument the model itself, as second argument a matrix representing the features, and as third argument a vector representing the labels. This parameter is mandatory for non-BetaML estimators and can be a single value or a vector (one per estimator) in case of different estimator packages used. [default: BetaML.fit!]\npredict_function::Union{Function, Vector{Function}}: The function used by the estimator(s) to predict the labels. It should take as fist argument the model itself and as second argument a matrix representing the features. This parameter is mandatory for non-BetaML estimators and can be a single value or a vector (one per estimator) in case of different estimator packages used. [default: BetaML.predict]\nrecursive_passages::Int64: Define the number of times to go trough the various columns to impute their data. Useful when there are data to impute on multiple columns. The order of the first passage is given by the decreasing number of missing values per column, the other passages are random [default: 1].\nrng::Random.AbstractRNG: A Random Number Generator to be used in stochastic parts of the code [deafult: Random.GLOBAL_RNG]. Note that this influence only the specific GeneralImputer code, the individual estimators may have their own rng (or similar) parameter.","category":"page"},{"location":"models/GeneralImputer_BetaML/#Examples-:","page":"GeneralImputer","title":"Examples :","text":"","category":"section"},{"location":"models/GeneralImputer_BetaML/","page":"GeneralImputer","title":"GeneralImputer","text":"Using BetaML models:","category":"page"},{"location":"models/GeneralImputer_BetaML/","page":"GeneralImputer","title":"GeneralImputer","text":"julia> using MLJ;\njulia> import BetaML ## The library from which to get the individual estimators to be used for each column imputation\njulia> X = [\"a\" 8.2;\n \"a\" missing;\n \"a\" 7.8;\n \"b\" 21;\n \"b\" 18;\n \"c\" -0.9;\n missing 20;\n \"c\" -1.8;\n missing -2.3;\n \"c\" -2.4] |> table ;\njulia> modelType = @load GeneralImputer pkg = \"BetaML\" verbosity=0\nBetaML.Imputation.GeneralImputer\njulia> model = modelType(estimator=BetaML.DecisionTreeEstimator(),recursive_passages=2);\njulia> mach = machine(model, X);\njulia> fit!(mach);\n[ Info: Training machine(GeneralImputer(cols_to_impute = auto, …), …).\njulia> X_full = transform(mach) |> MLJ.matrix\n10×2 Matrix{Any}:\n \"a\" 8.2\n \"a\" 8.0\n \"a\" 7.8\n \"b\" 21\n \"b\" 18\n \"c\" -0.9\n \"b\" 20\n \"c\" -1.8\n \"c\" -2.3\n \"c\" -2.4","category":"page"},{"location":"models/GeneralImputer_BetaML/","page":"GeneralImputer","title":"GeneralImputer","text":"Using third party packages (in this example DecisionTree):","category":"page"},{"location":"models/GeneralImputer_BetaML/","page":"GeneralImputer","title":"GeneralImputer","text":"julia> using MLJ;\njulia> import DecisionTree ## An example of external estimators to be used for each column imputation\njulia> X = [\"a\" 8.2;\n \"a\" missing;\n \"a\" 7.8;\n \"b\" 21;\n \"b\" 18;\n \"c\" -0.9;\n missing 20;\n \"c\" -1.8;\n missing -2.3;\n \"c\" -2.4] |> table ;\njulia> modelType = @load GeneralImputer pkg = \"BetaML\" verbosity=0\nBetaML.Imputation.GeneralImputer\njulia> model = modelType(estimator=[DecisionTree.DecisionTreeClassifier(),DecisionTree.DecisionTreeRegressor()], fit_function=DecisionTree.fit!,predict_function=DecisionTree.predict,recursive_passages=2);\njulia> mach = machine(model, X);\njulia> fit!(mach);\n[ Info: Training machine(GeneralImputer(cols_to_impute = auto, …), …).\njulia> X_full = transform(mach) |> MLJ.matrix\n10×2 Matrix{Any}:\n \"a\" 8.2\n \"a\" 7.51111\n \"a\" 7.8\n \"b\" 21\n \"b\" 18\n \"c\" -0.9\n \"b\" 20\n \"c\" -1.8\n \"c\" -2.3\n \"c\" -2.4","category":"page"},{"location":"third_party_packages/#Third-Party-Packages","page":"Third Party Packages","title":"Third Party Packages","text":"","category":"section"},{"location":"third_party_packages/","page":"Third Party Packages","title":"Third Party Packages","text":"A list of third-party packages with integration with MLJ.","category":"page"},{"location":"third_party_packages/","page":"Third Party Packages","title":"Third Party Packages","text":"Last updated December 2020.","category":"page"},{"location":"third_party_packages/","page":"Third Party Packages","title":"Third Party Packages","text":"Pull requests to update this list are very welcome. Otherwise, you may post an issue requesting this here.","category":"page"},{"location":"third_party_packages/#Packages-providing-models-in-the-MLJ-model-registry","page":"Third Party Packages","title":"Packages providing models in the MLJ model registry","text":"","category":"section"},{"location":"third_party_packages/","page":"Third Party Packages","title":"Third Party Packages","text":"See List of Supported Models","category":"page"},{"location":"third_party_packages/#Providing-unregistered-models:","page":"Third Party Packages","title":"Providing unregistered models:","text":"","category":"section"},{"location":"third_party_packages/","page":"Third Party Packages","title":"Third Party Packages","text":"SossMLJ.jl\nTimeSeriesClassification","category":"page"},{"location":"third_party_packages/#Packages-providing-other-kinds-of-functionality:","page":"Third Party Packages","title":"Packages providing other kinds of functionality:","text":"","category":"section"},{"location":"third_party_packages/","page":"Third Party Packages","title":"Third Party Packages","text":"MLJParticleSwarmOptimization.jl (hyper-parameter optimization strategy)\nTreeParzen.jl (hyper-parameter optimization strategy)\nShapley.jl (feature ranking / interpretation)\nShapML.jl (feature ranking / interpretation)\nFairness.jl (FAIRness metrics)\nOutlierDetection.jl (provides the ProbabilisticDetector wrapper and other outlier detection meta-functionality)\nConformalPrediction.jl (predictive uncertainty quantification through conformal prediction)","category":"page"},{"location":"learning_networks/#Learning-Networks","page":"Learning Networks","title":"Learning Networks","text":"","category":"section"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"Below is a practical guide to the MLJ implementation of learning networks, which have been described more abstractly in the article:","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"Anthony D. Blaom and Sebastian J. Voller (2020): Flexible model composition in machine learning and its implementation in MLJ. Preprint, arXiv:2012.15505.","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"Learning networks, an advanced but powerful MLJ feature, are \"blueprints\" for combining models in flexible ways, beyond ordinary linear pipelines and simple model ensembles. They are simple transformations of your existing workflows which can be \"exported\" to define new, re-usable composite model types (models which typically have other models as hyperparameters).","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"Pipeline models (see Pipeline), and model stacks (see Stack) are both implemented internally as exported learning networks.","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"note: Note\nWhile learning networks can be used for complex machine learning workflows, their main purpose is for defining new stand-alone model types, which behave just like any other model type: Instances can be evaluated, tuned, inserted into pipelines, etc. In serious applications, users are encouraged to export their learning networks, as explained under Exporting a learning network as a new model type below, after testing the network, using a small training dataset.","category":"page"},{"location":"learning_networks/#Learning-networks-by-example","page":"Learning Networks","title":"Learning networks by example","text":"","category":"section"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"Learning networks are best explained by way of example.","category":"page"},{"location":"learning_networks/#Lazy-computation","page":"Learning Networks","title":"Lazy computation","text":"","category":"section"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"The core idea of a learning network is delayed or lazy computation. Instead of","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"using MLJ\nMLJ.color_off()","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"X = 4\nY = 3\nZ = 2*X\nW = Y + Z\nW","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"we can do","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"using MLJ\n\nX = source(4)\nY = source(3)\nZ = 2*X\nW = Y + Z\nW()","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"In the first computation X, Y, Z and W are all bound to ordinary data. In the second, they are bound to objects called nodes. The special nodes X and Y constitute \"entry points\" for data, and are called source nodes. As the terminology suggests, we can imagine these objects as part of a \"network\" (a directed acyclic graph) which can aid conceptualization (but is less useful in more complicated examples):","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"(Image: )","category":"page"},{"location":"learning_networks/#The-origin-of-a-node","page":"Learning Networks","title":"The origin of a node","text":"","category":"section"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"The source nodes on which a given node depends are called the origins of the node:","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"os = origins(W)","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"X in os","category":"page"},{"location":"learning_networks/#Re-using-a-network","page":"Learning Networks","title":"Re-using a network","text":"","category":"section"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"The advantage of lazy evaluation is that we can change data at a source node to repeat the calculation with new data. One way to do this (discouraged in practice) is to use rebind!:","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"Z()","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"rebind!(X, 6) # demonstration only!\nZ()","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"However, if a node has a unique origin, then one instead calls the node on the new data one would like to rebind to that origin:","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"origins(Z)","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"Z(6)","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"Z(4)","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"This has the advantage that you don't need to locate the origin and rebind data directly, and the unique-origin restriction turns out to be sufficient for the applications to learning we have in mind.","category":"page"},{"location":"learning_networks/#node_overloading","page":"Learning Networks","title":"Overloading functions for use on nodes","text":"","category":"section"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"Several built-in function like * and + above are overloaded in MLJBase to work on nodes, as illustrated above. Others that work out-of-the-box include: MLJBase.matrix, MLJBase.table, vcat, hcat, mean, median, mode, first, last, as well as broadcasted versions of log, exp, mean, mode and median. A function like sqrt is not overloaded, so that Q = sqrt(Z) will throw an error. Instead, we do","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"Q = node(z->sqrt(z), Z)\nZ()","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"Q()","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"You can learn more about the node function under More on defining new nodes","category":"page"},{"location":"learning_networks/#A-network-that-learns","page":"Learning Networks","title":"A network that learns","text":"","category":"section"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"To incorporate learning in a network of nodes MLJ:","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"Allows binding of machines to nodes instead of data\nGenerates \"operation\" nodes when calling an operation like predict or transform on a machine and node input data. Such nodes point to both a machine (storing learned parameters) and the node from which to fetch data for applying the operation (which, unlike the nodes seen so far, depend on learned parameters to generate output).","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"For an example of a learning network that actually learns, we first synthesize some training data X, y, and production data Xnew:","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"using MLJ\nX, y = make_blobs(cluster_std=10.0, rng=123) # `X` is a table, `y` a vector\nXnew, _ = make_blobs(3) # `Xnew` is a table with the same number of columns\nnothing # hide","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"We choose a model do some dimension reduction, and another to perform classification:","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"pca = (@load PCA pkg=MultivariateStats verbosity=0)()\ntree = (@load DecisionTreeClassifier pkg=DecisionTree verbosity=0)()\nnothing # hide","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"To make our learning lazy, we wrap the training data as source nodes:","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"Xs = source(X)\nys = source(y)\nnothing # hide","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"And, finally, proceed as we would in an ordinary MLJ workflow, with the exception that there is no need to fit! our machines, as training will be carried out lazily later:","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"mach1 = machine(pca, Xs)\nx = transform(mach1, Xs) # defines a new node because `Xs` is a node\n\nmach2 = machine(tree, x, ys)\nyhat = predict(mach2, x) # defines a new node because `x` is a node","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"Note that mach1 and mach2 are not themselves nodes. They point to the nodes they need to call to get training data and they are in turn pointed to by other nodes. In fact, an interesting implementation detail is that an \"ordinary\" machine is not actually bound directly to data, but bound to data wrapped in source nodes.","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"machine(pca, Xnew).args[1] # `Xnew` is ordinary data","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"Before calling a node, we need to fit! the node, to trigger training of all the machines on which it depends:","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"fit!(yhat) # can include same keyword options for `fit!(::Machine, ...)`\nyhat()[1:2] # or `yhat(rows=2)`","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"This last represents the prediction on the training data, because that's what resides at our source nodes. However, yhat has the unique origin X (because \"training edges\" in the complete associated directed graph are excluded for this purpose). We can therefore call yhat on our production data to get the corresponding predictions:","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"yhat(Xnew)","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"Training is smart, in the sense that mutating a hyper-parameter of some component model does not force retraining of upstream machines:","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"tree.max_depth = 1\nfit!(yhat)\nyhat(Xnew)","category":"page"},{"location":"learning_networks/#Multithreaded-training","page":"Learning Networks","title":"Multithreaded training","text":"","category":"section"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"A more complicated learning network may contain machines that can be trained in parallel. In that case, a call like the following may speed up training:","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"tree.max_depth=2\nfit!(yhat, acceleration=CPUThreads())\nnothing # hide","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"Currently, only CPU1() (default) and CPUThreads() are supported here.","category":"page"},{"location":"learning_networks/#Exporting-a-learning-network-as-a-new-model-type","page":"Learning Networks","title":"Exporting a learning network as a new model type","text":"","category":"section"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"Once a learning network has been tested, typically on some small dummy data set, it is ready to be exported as a new, stand-alone, re-usable model type (unattached to any data). We demonstrate the process by way of examples of increasing complexity:","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"Example A - Mini-pipeline\nMore on replacing models with symbols\nExample B - Multiple operations: transform and inverse transform\nExample C - Blending predictions and exposing internal network state in reports\nExample D - Multiple nodes pointing to the same machine\nExample E - Coupling component model hyper-parameters\nMore on defining new nodes\nExample F - Wrapping a model in a data-dependent tuning strategy","category":"page"},{"location":"learning_networks/#Example-A-Mini-pipeline","page":"Learning Networks","title":"Example A - Mini-pipeline","text":"","category":"section"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"First we export the simple learning network defined above. (This is for illustration purposes; in practice using the Pipeline syntax model1 |> model2 syntax is more convenient.)","category":"page"},{"location":"learning_networks/#Step-1-Define-a-new-model-struct","page":"Learning Networks","title":"Step 1 - Define a new model struct","text":"","category":"section"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"We need a type with two fields, one for the preprocessor (pca in the network above) and one for the classifier (tree in the network above).","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"The DecisionTreeClassifier type of tree has supertype Probabilistic, because it makes probabilistic predictions, and we assume any other classifier we want to swap out will be the same.","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"supertype(typeof(tree))","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"In particular, our composite model will also need Probabilistic as supertype. In fact, we must give it the intermediate supertype ProbabilisticNetworkComposite <: Probabilistic, so that we additionally flag it as an exported learning network model type:","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"mutable struct CompositeA <: ProbabilisticNetworkComposite\n preprocessor\n classifier\nend","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"The common alternatives are DeterministicNetworkComposite and UnsupervisedNetworkComposite. But all options can be viewed as follows:","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"using MLJBase\nNetworkComposite","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"We next make our learning network model-generic by substituting each model instance with the corresponding symbol representing a property (field) of the new model struct:","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"mach1 = machine(:preprocessor, Xs) # <---- `pca` swapped out for `:preprocessor`\nx = transform(mach1, Xs)\nmach2 = machine(:classifier, x, ys) # <---- `tree` swapped out for `:classifier`\nyhat = predict(mach2, x)","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"Incidentally, this network can be used as before except we must provide an instance of CompositeA in our fit! calls, to indicate what actual models the symbols are being substituted with:","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"composite_a = CompositeA(pca, ConstantClassifier())\nfit!(yhat, composite=composite_a)\nyhat(Xnew)","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"In this case :preprocessor is being substituted by pca, and :classifier by ConstantClassifier() for training.","category":"page"},{"location":"learning_networks/#Step-2-Wrap-the-learning-network-in-prefit","page":"Learning Networks","title":"Step 2 - Wrap the learning network in prefit","text":"","category":"section"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"Literally copy and paste the learning network above into the definition of a method called prefit, as shown below (if you have implemented your own MLJ model, you will notice this has the same signature as MLJModelInterface.fit):","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"import MLJBase\nfunction MLJBase.prefit(composite::CompositeA, verbosity, X, y)\n\n # the learning network from above:\n Xs = source(X)\n ys = source(y)\n mach1 = machine(:preprocessor, Xs)\n x = transform(mach1, Xs)\n mach2 = machine(:classifier, x, ys)\n yhat = predict(mach2, x)\n\n verbosity > 0 && @info \"I'm a noisy fellow!\"\n\n # return \"learning network interface\":\n return (; predict=yhat)\nend","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"That's it.","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"Generally, prefit always returns a learning network interface; see MLJBase.prefit for what this means in general. In this example, the interface dictates that calling predict(mach, Xnew) on a machine mach bound to some instance of CompositeA should internally call yhat(Xnew).","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"Here's our new composite model type CompositeA in action, combining standardization with KNN classification:","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"using MLJ\nX, y = @load_iris\n\nknn = (@load KNNClassifier pkg=NearestNeighborModels verbosity=0)()\ncomposite_a = CompositeA(Standardizer(), knn)","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"mach = machine(composite_a, X, y) |> fit!\npredict(mach, X)[1:2]","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"report(mach).preprocessor","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"fitted_params(mach).classifier","category":"page"},{"location":"learning_networks/#More-on-replacing-models-with-symbols","page":"Learning Networks","title":"More on replacing models with symbols","text":"","category":"section"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"Only the first argument model in some expression machine(model, ...) can be replaced with a symbol. These replacements function as hooks for exposing reports and fitted parameters of component models in the report and fitted parameters of the composite model, but these replacements are not absolutely necessary. For example, instead of the line mach1 = machine(:preprocessor, Xs) in the prefit definition, we can do mach1 = machine(composite.preprocessor, Xs). However, report and fittted_params will not include items for the :preprocessor component model in that case.","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"If a component model is not explicitly bound to data in a machine (for example, because it is first wrapped in TunedModel) then there are ways to explicitly expose associated fitted parameters or report items. See Example F below.","category":"page"},{"location":"learning_networks/#Example-B-Multiple-operations:-transform-and-inverse-transform","page":"Learning Networks","title":"Example B - Multiple operations: transform and inverse transform","text":"","category":"section"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"Here's a second mini-pipeline example composing two transformers which both implement inverse transform. We show how to implement an inverse_transform for the composite model too.","category":"page"},{"location":"learning_networks/#Step-1-Define-a-new-model-struct-2","page":"Learning Networks","title":"Step 1 - Define a new model struct","text":"","category":"section"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"using MLJ\nimport MLJBase\n\nmutable struct CompositeB <: DeterministicNetworkComposite\n transformer1\n transformer2\nend","category":"page"},{"location":"learning_networks/#Step-2-Wrap-the-learning-network-in-prefit-2","page":"Learning Networks","title":"Step 2 - Wrap the learning network in prefit","text":"","category":"section"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"function MLJBase.prefit(composite::CompositeB, verbosity, X)\n Xs = source(X)\n\n mach1 = machine(:transformer1, Xs)\n X1 = transform(mach1, Xs)\n mach2 = machine(:transformer2, X1)\n X2 = transform(mach2, X1)\n\n W1 = inverse_transform(mach2, Xs)\n W2 = inverse_transform(mach1, W1)\n\n # the learning network interface:\n return (; transform=X2, inverse_transform=W2)\nend","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"Here's a demonstration:","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"X = rand(100)\n\ncomposite_b = CompositeB(UnivariateBoxCoxTransformer(), Standardizer())\nmach = machine(composite_b, X) |> fit!\nW = transform(mach, X)\n@assert inverse_transform(mach, W) ≈ X","category":"page"},{"location":"learning_networks/#Example-C-Blending-predictions-and-exposing-internal-network-state-in-reports","page":"Learning Networks","title":"Example C - Blending predictions and exposing internal network state in reports","text":"","category":"section"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"The code below defines a new composite model type CompositeC that predicts by taking the weighted average of two regressors, and additionally exposes, in the model's report, a measure of disagreement between the two models at time of training. In addition to the two regressors, the new model has two other fields:","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"mix, controlling the weighting\nacceleration, for the mode of acceleration for training the model (e.g., CPUThreads()).","category":"page"},{"location":"learning_networks/#Step-1-Define-a-new-model-struct-3","page":"Learning Networks","title":"Step 1 - Define a new model struct","text":"","category":"section"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"using MLJ\nimport MLJBase\n\nmutable struct CompositeC <: DeterministicNetworkComposite\n regressor1\n regressor2\n mix::Float64\n acceleration\nend","category":"page"},{"location":"learning_networks/#Step-2-Wrap-the-learning-network-in-prefit-3","page":"Learning Networks","title":"Step 2 - Wrap the learning network in prefit","text":"","category":"section"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"function MLJBase.prefit(composite::CompositeC, verbosity, X, y)\n\n Xs = source(X)\n ys = source(y)\n\n mach1 = machine(:regressor1, Xs, ys)\n mach2 = machine(:regressor2, Xs, ys)\n\n yhat1 = predict(mach1, Xs)\n yhat2 = predict(mach2, Xs)\n\n # node to return disagreement between the regressor predictions:\n disagreement = node((y1, y2) -> l2(y1, y2) |> mean, yhat1, yhat2)\n\n # get the weighted average the predictions of the regressors:\n λ = composite.mix\n yhat = (1 - λ)*yhat1 + λ*yhat2\n\n # the learning network interface:\n return (\n predict = yhat,\n report= (; training_disagreement=disagreement),\n acceleration = composite.acceleration,\n )\n\nend","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"Here's a demonstration:","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"X, y = make_regression() # a table and a vector\n\nknn = (@load KNNRegressor pkg=NearestNeighborModels verbosity=0)()\ntree = (@load DecisionTreeRegressor pkg=DecisionTree verbosity=0)()\ncomposite_c = CompositeC(knn, tree, 0.2, CPUThreads())\nmach = machine(composite_c, X, y) |> fit!\nXnew, _ = make_regression(3)\npredict(mach, Xnew)","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"report(mach)","category":"page"},{"location":"learning_networks/#Example-D-Multiple-nodes-pointing-to-the-same-machine","page":"Learning Networks","title":"Example D - Multiple nodes pointing to the same machine","text":"","category":"section"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"When incorporating learned target transformations (such as a standardization) in supervised learning, it is desirable to apply the inverse transformation to predictions, to return them to the original scale. This means re-using learned parameters from an earlier part of your workflow. This poses no problem here, as the next example demonstrates.","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"The model type CompositeD defined below applies a preprocessing transformation to input data X (e.g., standardization), learns a transformation for the target y (e.g., an optimal Box-Cox transformation), predicts new target values using a regressor (e.g., Ridge regression), and then inverse-transforms those predictions to restore them to the original scale. (This represents a model we could alternatively build using the TransformedTargetModel wrapper and a Pipeline.)","category":"page"},{"location":"learning_networks/#Step-1-Define-a-new-model-struct-4","page":"Learning Networks","title":"Step 1 - Define a new model struct","text":"","category":"section"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"using MLJ\nimport MLJBase\n\nmutable struct CompositeD <: DeterministicNetworkComposite\n preprocessor\n target_transformer\n regressor\n acceleration\nend","category":"page"},{"location":"learning_networks/#Step-2-Wrap-the-learning-network-in-prefit-4","page":"Learning Networks","title":"Step 2 - Wrap the learning network in prefit","text":"","category":"section"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"Notice that both of the nodes z and yhat in the wrapped learning network point to the same machine (learned parameters) mach2.","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"function MLJBase.prefit(composite::CompositeD, verbosity, X, y)\n\n Xs = source(X)\n ys = source(y)\n\n mach1 = machine(:preprocessor, Xs)\n W = transform(mach1, Xs)\n\n mach2 = machine(:target_transformer, ys)\n z = transform(mach2, ys)\n\n mach3 =machine(:regressor, W, z)\n zhat = predict(mach3, W)\n\n yhat = inverse_transform(mach2, zhat)\n\n # the learning network interface:\n return (\n predict = yhat,\n acceleration = composite.acceleration,\n )\n\nend","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"The flow of information in the wrapped learning network is visualized below.","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"(Image: )","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"Here's an application of our new composite to the Boston dataset:","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"X, y = @load_boston\n\nstand = Standardizer()\nbox = UnivariateBoxCoxTransformer()\nridge = (@load RidgeRegressor pkg=MultivariateStats verbosity=0)(lambda=92)\ncomposite_d = CompositeD(stand, box, ridge, CPU1())\nevaluate(composite_d, X, y, resampling=CV(nfolds=5), measure=l2, verbosity=0)","category":"page"},{"location":"learning_networks/#Example-E-Coupling-component-model-hyper-parameters","page":"Learning Networks","title":"Example E - Coupling component model hyper-parameters","text":"","category":"section"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"The composite model in this example combines a clustering model used to reduce the dimension of the feature space (KMeans or KMedoids from Clustering.jl) with ridge regression, but has the following \"coupling\" of the hyperparameters: The amount of ridge regularization depends on the number of specified clusters k, with less regularization for a greater number of clusters. It includes a user-specified coupling coefficient c, and exposes the solver hyper-parameter of the ridge regressor. (Neither the clusterer nor ridge regressor are themselves hyperparameters of the composite.)","category":"page"},{"location":"learning_networks/#Step-1-Define-a-new-model-struct-5","page":"Learning Networks","title":"Step 1 - Define a new model struct","text":"","category":"section"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"using MLJ\nimport MLJBase\n\nmutable struct CompositeE <: DeterministicNetworkComposite\n clusterer # `:kmeans` or `:kmedoids`\n k::Int # number of clusters\n solver # a ridge regression parameter we want to expose\n c::Float64 # a \"coupling\" coefficient\nend","category":"page"},{"location":"learning_networks/#Step-2-Wrap-the-learning-network-in-prefit-5","page":"Learning Networks","title":"Step 2 - Wrap the learning network in prefit","text":"","category":"section"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"RidgeRegressor = @load RidgeRegressor pkg=MLJLinearModels verbosity=0\nKMeans = @load KMeans pkg=Clustering verbosity=0\nKMedoids = @load KMedoids pkg=Clustering verbosity=0\n\nfunction MLJBase.prefit(composite::CompositeE, verbosity, X, y)\n\n Xs = source(X)\n ys = source(y)\n\n k = composite.k\n solver = composite.solver\n c = composite.c\n\n clusterer = composite.clusterer == :kmeans ? KMeans(; k) : KMedoids(; k)\n mach1 = machine(clusterer, Xs)\n Xsmall = transform(mach1, Xs)\n\n # the coupling - ridge regularization depends on the number of\n # clusters `k` and the coupling coefficient `c`:\n lambda = exp(-c/k)\n\n ridge = RidgeRegressor(; lambda, solver)\n mach2 = machine(ridge, Xsmall, ys)\n yhat = predict(mach2, Xsmall)\n\n return (predict=yhat,)\nend","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"Here's an application to the Boston dataset in which we optimize the coupling coefficient (see Tuning Models for more on hyper-parameter optimization):","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"X, y = @load_boston # a table and a vector\n\ncomposite_e = CompositeE(:kmeans, 3, nothing, 0.5)\nr = range(composite_e, :c, lower = -2, upper=2, scale=x->10^x)\ntuned_composite_e = TunedModel(\n composite_e,\n range=r,\n tuning=RandomSearch(rng=123),\n measure=l2,\n resampling=CV(nfolds=6),\n n=100,\n)\nmach = machine(tuned_composite_e, X, y) |> fit!\nreport(mach).best_model","category":"page"},{"location":"learning_networks/#More-on-defining-new-nodes","page":"Learning Networks","title":"More on defining new nodes","text":"","category":"section"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"Overloading ordinary functions for nodes has already been discussed above. Here's another example:","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"divide(x, y) = x/y\n\nX = source(2)\nY = source(3)\n\nZ = node(divide, X, Y)\nnothing # hide","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"This means Z() returns divide(X(), Y()), which is divide(2, 3) in this case:","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"Z()","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"We cannot call Z with arguments (e.g., Z(2)) because it does not have a unique origin.","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"In all the node examples so far, the first argument of node is a function, and all other arguments are nodes - one node for each argument of the function. A node constructed in this way is called a static node. A dynamic node, which directly depends on the outcome of a training event, is constructed by giving a machine as the second argument, to be passed as the first argument of the function in a node call. For example, we can do","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"Xs = source(rand(4))\nmach = machine(Standardizer(), Xs)\nN = node(transform, mach, Xs) |> fit!\nnothing # hide","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"Then N has the following calling properties:","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"N() returns transform(mach, Xs())\nN(Xnew) returns transform(mach, Xs(Xnew)); here Xs(Xnew) is just Xnew because Xs is just a source node.)","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"N()","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"N(rand(2))","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"In fact, this is precisely how the transform method is internally overloaded to work, when called with a node argument (to return a node instead of data). That is, internally there exists code that amounts to the definition","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"transform(mach, X::AbstractNode) = node(transform, mach, X)","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"Here AbstractNode is the common super-type of Node and Source.","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"It sometimes useful to create dynamic nodes with no node arguments, as in","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"Xs = source(rand(10))\nmach = machine(Standardizer(), Xs)\nN = node(fitted_params, mach) |> fit!\nN()","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"Static nodes can have also have zero node arguments. These may be viewed as \"constant\" nodes:","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"N = Node(()-> 42)\nN()","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"Example F below demonstrates the use of static and dynamic nodes. For more details, see the node docstring.","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"There is also an experimental macro @node. If Z is an AbstractNode (Z = source(16), say) then instead of","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"Q = node(z->sqrt(z), Z)","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"one can do","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"Q = @node sqrt(Z)","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"(so that Q() == 4). Here's a more complicated application of @node to row-shuffle a table:","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"using Random\nX = (x1 = [1, 2, 3, 4, 5],\n x2 = [:one, :two, :three, :four, :five])\nrows(X) = 1:nrows(X)\n\nXs = source(X)\nrs = @node rows(Xs)\nW = @node selectrows(Xs, @node shuffle(rs))\n\njulia> W()\n(x1 = [5, 1, 3, 2, 4],\n x2 = Symbol[:five, :one, :three, :two, :four],)\n","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"Important. An argument not in global scope is assumed by @node to be a node or source.","category":"page"},{"location":"learning_networks/#Example-F-Wrapping-a-model-in-a-data-dependent-tuning-strategy","page":"Learning Networks","title":"Example F - Wrapping a model in a data-dependent tuning strategy","text":"","category":"section"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"When the regularization parameter of a Lasso model is optimized, one commonly searches over a parameter range depending on properties of the training data. Indeed, Lasso (and, more generally, elastic net) implementations commonly provide a method to carry out this data-dependent optimization automatically, using cross-validation. The following example shows how to transform the LassoRegressor model type from MLJLinearModels.jl into a self-tuning model type LassoCVRegressor using the commonly implemented data-dependent tuning strategy. A new dimensionless hyperparameter epsilon controls the lower bound on the parameter range.","category":"page"},{"location":"learning_networks/#Step-1-Define-a-new-model-struct-6","page":"Learning Networks","title":"Step 1 - Define a new model struct","text":"","category":"section"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"using MLJ\nimport MLJBase\n\nmutable struct LassoCVRegressor <: DeterministicNetworkComposite\n lasso # the atomic lasso model (`lasso.lambda` is ignored)\n epsilon::Float64 # controls lower bound of `lasso.lambda` in tuning\n resampling # resampling strategy for optimization of `lambda`\nend\n\n# keyword constructor for convenience:\nLassoRegressor = @load LassoRegressor pkg=MLJLinearModels verbosity=0\nLassoCVRegressor(;\n lasso=LassoRegressor(),\n epsilon=0.001,\n resampling=CV(nfolds=6),\n) = LassoCVRegressor(\n lasso,\n epsilon,\n resampling,\n)\nnothing # hide","category":"page"},{"location":"learning_networks/#Step-2-Wrap-the-learning-network-in-prefit-6","page":"Learning Networks","title":"Step 2 - Wrap the learning network in prefit","text":"","category":"section"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"In this case, there is no model -> :symbol replacement that makes sense here, because the model is getting wrapped by TunedModel before being bound to nodes in a machine. However, we can expose the the learned lasso coefs and intercept using fitted parameter nodes; and expose the optimal lambda, and range searched, using report nodes (as previously demonstrated in Example C).","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"function MLJBase.prefit(composite::LassoCVRegressor, verbosity, X, y)\n\n λ_max = maximum(abs.(MLJ.matrix(X)'y))\n\n Xs = source(X)\n ys = source(y)\n\n r = range(\n composite.lasso,\n :lambda,\n lower=composite.epsilon*λ_max,\n upper=λ_max,\n scale=:log10,\n )\n\n lambda_range = node(()->r) # a \"constant\" report node\n\n tuned_lasso = TunedModel(\n composite.lasso,\n tuning=Grid(shuffle=false),\n range = r,\n measure = l2,\n resampling=composite.resampling,\n )\n mach = machine(tuned_lasso, Xs, ys)\n\n R = node(report, mach) # `R()` returns `report(mach)`\n lambda = node(r -> r.best_model.lambda, R) # a report node\n\n F = node(fitted_params, mach) # `F()` returns `fitted_params(mach)`\n coefs = node(f->f.best_fitted_params.coefs, F) # a fitted params node\n intercept = node(f->f.best_fitted_params.intercept, F) # a fitted params node\n\n yhat = predict(mach, Xs)\n\n return (\n predict=yhat,\n fitted_params=(; coefs, intercept),\n report=(; lambda, lambda_range),\n )\n\nend","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"Here's a demonstration:","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"X, _ = make_regression(1000, 3, rng=123)\ny = X.x2 - X.x2 + 0.005*X.x3 + 0.05*rand(1000)\nlasso_cv = LassoCVRegressor(epsilon=1e-5)\nmach = machine(lasso_cv, X, y) |> fit!\nreport(mach)","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"fitted_params(mach)","category":"page"},{"location":"learning_networks/#The-learning-network-API","page":"Learning Networks","title":"The learning network API","text":"","category":"section"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"Two new julia types are part of learning networks: Source and Node, which share a common abstract supertype AbstractNode.","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"Formally, a learning network defines two labeled directed acyclic graphs (DAG's) whose nodes are Node or Source objects, and whose labels are Machine objects. We obtain the first DAG from directed edges of the form N1 - N2 whenever N1 is an argument of N2 (see below). Only this DAG is relevant when calling a node, as discussed in the examples above and below. To form the second DAG (relevant when calling or calling fit! on a node) one adds edges for which N1 is training argument of the machine which labels N1. We call the second, larger DAG, the completed learning network (but note only edges of the smaller network are explicitly drawn in diagrams, for simplicity).","category":"page"},{"location":"learning_networks/#Source-nodes","page":"Learning Networks","title":"Source nodes","text":"","category":"section"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"Only source nodes can reference concrete data. A Source object has a single field, data.","category":"page"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"MLJBase.Source\nsource(X)\nrebind!\nsources\norigins","category":"page"},{"location":"learning_networks/#MLJBase.Source","page":"Learning Networks","title":"MLJBase.Source","text":"Source\n\nType for a learning network source node. Constructed using source, as in source() or source(rand(2,3)).\n\nSee also source, Node.\n\n\n\n\n\n","category":"type"},{"location":"learning_networks/#MLJBase.source-Tuple{Any}","page":"Learning Networks","title":"MLJBase.source","text":"Xs = source(X=nothing)\n\nDefine, a learning network Source object, wrapping some input data X, which can be nothing for purposes of exporting the network as stand-alone model. For training and testing the unexported network, appropriate vectors, tables, or other data containers are expected.\n\nThe calling behaviour of a Source object is this:\n\nXs() = X\nXs(rows=r) = selectrows(X, r) # eg, X[r,:] for a DataFrame\nXs(Xnew) = Xnew\n\nSee also: MLJBase.prefit, sources, origins, node.\n\n\n\n\n\n","category":"method"},{"location":"learning_networks/#MLJBase.rebind!","page":"Learning Networks","title":"MLJBase.rebind!","text":"rebind!(s, X)\n\nAttach new data X to an existing source node s. Not a public method.\n\n\n\n\n\n","category":"function"},{"location":"learning_networks/#MLJBase.sources","page":"Learning Networks","title":"MLJBase.sources","text":"sources(N::AbstractNode)\n\nA vector of all sources referenced by calls N() and fit!(N). These are the sources of the ancestor graph of N when including training edges.\n\nNot to be confused with origins(N), in which training edges are excluded.\n\nSee also: origins, source.\n\n\n\n\n\n","category":"function"},{"location":"learning_networks/#MLJBase.origins","page":"Learning Networks","title":"MLJBase.origins","text":"origins(N)\n\nReturn a list of all origins of a node N accessed by a call N(). These are the source nodes of ancestor graph of N if edges corresponding to training arguments are excluded. A Node object cannot be called on new data unless it has a unique origin.\n\nNot to be confused with sources(N) which refers to the same graph but without the training edge deletions.\n\nSee also: node, source.\n\n\n\n\n\n","category":"function"},{"location":"learning_networks/#Nodes","page":"Learning Networks","title":"Nodes","text":"","category":"section"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"Node\nnode","category":"page"},{"location":"learning_networks/#MLJBase.Node","page":"Learning Networks","title":"MLJBase.Node","text":"Node{T<:Union{Machine,Nothing}}\n\nType for nodes in a learning network that are not Source nodes.\n\nThe key components of a Node are:\n\nAn operation, which will either be static (a fixed function) or dynamic (such as predict or transform).\nA Machine object, on which to dispatch the operation (nothing if the operation is static). The training arguments of the machine are generally other nodes, including Source nodes.\nUpstream connections to other nodes, called its arguments, possibly including Source nodes, one for each data argument of the operation (typically there's just one).\n\nWhen a node N is called, as in N(), it applies the operation on the machine (if there is one) together with the outcome of calls to its node arguments, to compute the return value. For details on a node's calling behavior, see node.\n\nSee also node, Source, origins, sources, fit!.\n\n\n\n\n\n","category":"type"},{"location":"learning_networks/#MLJBase.node","page":"Learning Networks","title":"MLJBase.node","text":"J = node(f, mach::Machine, args...)\n\nDefines a dynamic Node object J wrapping a dynamic operation f (predict, predict_mean, transform, etc), a nodal machine mach and arguments args. Its calling behaviour, which depends on the outcome of training mach (and, implicitly, on training outcomes affecting its arguments) is this:\n\nJ() = f(mach, args[1](), args[2](), ..., args[n]())\nJ(rows=r) = f(mach, args[1](rows=r), args[2](rows=r), ..., args[n](rows=r))\nJ(X) = f(mach, args[1](X), args[2](X), ..., args[n](X))\n\nGenerally n=1 or n=2 in this latter case.\n\npredict(mach, X::AbsractNode, y::AbstractNode)\npredict_mean(mach, X::AbstractNode, y::AbstractNode)\npredict_median(mach, X::AbstractNode, y::AbstractNode)\npredict_mode(mach, X::AbstractNode, y::AbstractNode)\ntransform(mach, X::AbstractNode)\ninverse_transform(mach, X::AbstractNode)\n\nShortcuts for J = node(predict, mach, X, y), etc.\n\nCalling a node is a recursive operation which terminates in the call to a source node (or nodes). Calling nodes on new data X fails unless the number of such nodes is one.\n\nSee also: Node, @node, source, origins.\n\n\n\n\n\n","category":"function"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"@node","category":"page"},{"location":"learning_networks/#MLJBase.@node","page":"Learning Networks","title":"MLJBase.@node","text":"@node f(...)\n\nConstruct a new node that applies the function f to some combination of nodes, sources and other arguments.\n\nImportant. An argument not in global scope is assumed to be a node or source.\n\nExamples\n\nX = source(π)\nW = @node sin(X)\njulia> W()\n0\n\nX = source(1:10)\nY = @node selectrows(X, 3:4)\njulia> Y()\n3:4\n\njulia> Y([\"one\", \"two\", \"three\", \"four\"])\n2-element Array{Symbol,1}:\n \"three\"\n \"four\"\n\nX1 = source(4)\nX2 = source(5)\nadd(a, b, c) = a + b + c\nN = @node add(X1, 1, X2)\njulia> N()\n10\n\n\nSee also node\n\n\n\n\n\n","category":"macro"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"MLJBase.prefit","category":"page"},{"location":"learning_networks/#MLJBase.prefit","page":"Learning Networks","title":"MLJBase.prefit","text":"MLJBase.prefit(model, verbosity, data...)\n\nReturns a learning network interface (see below) for a learning network with source nodes that wrap data.\n\nA user overloads MLJBase.prefit when exporting a learning network as a new stand-alone model type, of which model above will be an instance. See the MLJ reference manual for details.\n\nA learning network interface is a named tuple declaring certain interface points in a learning network, to be used when \"exporting\" the network as a new stand-alone model type. Examples are\n\n (predict=yhat,)\n (transform=Xsmall, acceleration=CPUThreads())\n (predict=yhat, transform=W, report=(loss=loss_node,))\n\nHere yhat, Xsmall, W and loss_node are nodes in the network.\n\nThe keys of the learning network interface always one of the following:\n\nThe name of an operation, such as :predict, :predict_mode, :transform, :inverse_transform. See \"Operation keys\" below.\n:report, for exposing results of calling a node with no arguments in the composite model report. See \"Including report nodes\" below.\n:fitted_params, for exposing results of calling a node with no arguments as fitted parameters of the composite model. See \"Including fitted parameter nodes\" below.\n:acceleration, for articulating acceleration mode for training the network, e.g., CPUThreads(). Corresponding value must be an AbstractResource. If not included, CPU1() is used.\n\nOperation keys\n\nIf the key is an operation, then the value must be a node n in the network with a unique origin (length(origins(n)) === 1). The intention of a declaration such as predict=yhat is that the exported model type implements predict, which, when applied to new data Xnew, should return yhat(Xnew).\n\nIncluding report nodes\n\nIf the key is :report, then the corresponding value must be a named tuple\n\n (k1=n1, k2=n2, ...)\n\nwhose values are all nodes. For each k=n pair, the key k will appear as a key in the composite model report, with a corresponding value of deepcopy(n()), called immediatately after training or updating the network. For examples, refer to the \"Learning Networks\" section of the MLJ manual.\n\nIncluding fitted parameter nodes\n\nIf the key is :fitted_params, then the behaviour is as for report nodes but results are exposed as fitted parameters of the composite model instead of the report.\n\n\n\n\n\n","category":"function"},{"location":"learning_networks/","page":"Learning Networks","title":"Learning Networks","text":"See more on fitting nodes at fit! and fit_only!.","category":"page"},{"location":"models/MultitargetKNNClassifier_NearestNeighborModels/#MultitargetKNNClassifier_NearestNeighborModels","page":"MultitargetKNNClassifier","title":"MultitargetKNNClassifier","text":"","category":"section"},{"location":"models/MultitargetKNNClassifier_NearestNeighborModels/","page":"MultitargetKNNClassifier","title":"MultitargetKNNClassifier","text":"MultitargetKNNClassifier","category":"page"},{"location":"models/MultitargetKNNClassifier_NearestNeighborModels/","page":"MultitargetKNNClassifier","title":"MultitargetKNNClassifier","text":"A model type for constructing a multitarget K-nearest neighbor classifier, based on NearestNeighborModels.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/MultitargetKNNClassifier_NearestNeighborModels/","page":"MultitargetKNNClassifier","title":"MultitargetKNNClassifier","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/MultitargetKNNClassifier_NearestNeighborModels/","page":"MultitargetKNNClassifier","title":"MultitargetKNNClassifier","text":"MultitargetKNNClassifier = @load MultitargetKNNClassifier pkg=NearestNeighborModels","category":"page"},{"location":"models/MultitargetKNNClassifier_NearestNeighborModels/","page":"MultitargetKNNClassifier","title":"MultitargetKNNClassifier","text":"Do model = MultitargetKNNClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in MultitargetKNNClassifier(K=...).","category":"page"},{"location":"models/MultitargetKNNClassifier_NearestNeighborModels/","page":"MultitargetKNNClassifier","title":"MultitargetKNNClassifier","text":"Multi-target K-Nearest Neighbors Classifier (MultitargetKNNClassifier) is a variation of KNNClassifier that assumes the target variable is vector-valued with Multiclass or OrderedFactor components. (Target data must be presented as a table, however.)","category":"page"},{"location":"models/MultitargetKNNClassifier_NearestNeighborModels/#Training-data","page":"MultitargetKNNClassifier","title":"Training data","text":"","category":"section"},{"location":"models/MultitargetKNNClassifier_NearestNeighborModels/","page":"MultitargetKNNClassifier","title":"MultitargetKNNClassifier","text":"In MLJ or MLJBase, bind an instance model to data with","category":"page"},{"location":"models/MultitargetKNNClassifier_NearestNeighborModels/","page":"MultitargetKNNClassifier","title":"MultitargetKNNClassifier","text":"mach = machine(model, X, y)","category":"page"},{"location":"models/MultitargetKNNClassifier_NearestNeighborModels/","page":"MultitargetKNNClassifier","title":"MultitargetKNNClassifier","text":"OR","category":"page"},{"location":"models/MultitargetKNNClassifier_NearestNeighborModels/","page":"MultitargetKNNClassifier","title":"MultitargetKNNClassifier","text":"mach = machine(model, X, y, w)","category":"page"},{"location":"models/MultitargetKNNClassifier_NearestNeighborModels/","page":"MultitargetKNNClassifier","title":"MultitargetKNNClassifier","text":"Here:","category":"page"},{"location":"models/MultitargetKNNClassifier_NearestNeighborModels/","page":"MultitargetKNNClassifier","title":"MultitargetKNNClassifier","text":"X is any table of input features (eg, a DataFrame) whose columns are of scitype Continuous; check column scitypes with schema(X).\nyis the target, which can be any table of responses whose element scitype is either<:Finite(<:Multiclassor<:OrderedFactorwill do); check the columns scitypes withschema(y). Each column ofy` is assumed to belong to a common categorical pool.\nw is the observation weights which can either be nothing(default) or an AbstractVector whose element scitype is Count or Continuous. This is different from weights kernel which is a model hyperparameter, see below.","category":"page"},{"location":"models/MultitargetKNNClassifier_NearestNeighborModels/","page":"MultitargetKNNClassifier","title":"MultitargetKNNClassifier","text":"Train the machine using fit!(mach, rows=...).","category":"page"},{"location":"models/MultitargetKNNClassifier_NearestNeighborModels/#Hyper-parameters","page":"MultitargetKNNClassifier","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/MultitargetKNNClassifier_NearestNeighborModels/","page":"MultitargetKNNClassifier","title":"MultitargetKNNClassifier","text":"K::Int=5 : number of neighbors\nalgorithm::Symbol = :kdtree : one of (:kdtree, :brutetree, :balltree)\nmetric::Metric = Euclidean() : any Metric from Distances.jl for the distance between points. For algorithm = :kdtree only metrics which are instances of Union{Distances.Chebyshev, Distances.Cityblock, Distances.Euclidean, Distances.Minkowski, Distances.WeightedCityblock, Distances.WeightedEuclidean, Distances.WeightedMinkowski} are supported.\nleafsize::Int = algorithm == 10 : determines the number of points at which to stop splitting the tree. This option is ignored and always taken as 0 for algorithm = :brutetree, since brutetree isn't actually a tree.\nreorder::Bool = true : if true then points which are close in distance are placed close in memory. In this case, a copy of the original data will be made so that the original data is left unmodified. Setting this to true can significantly improve performance of the specified algorithm (except :brutetree). This option is ignored and always taken as false for algorithm = :brutetree.\nweights::KNNKernel=Uniform() : kernel used in assigning weights to the k-nearest neighbors for each observation. An instance of one of the types in list_kernels(). User-defined weighting functions can be passed by wrapping the function in a UserDefinedKernel kernel (do ?NearestNeighborModels.UserDefinedKernel for more info). If observation weights w are passed during machine construction then the weight assigned to each neighbor vote is the product of the kernel generated weight for that neighbor and the corresponding observation weight.\noutput_type::Type{<:MultiUnivariateFinite}=DictTable : One of (ColumnTable, DictTable). The type of table type to use for predictions. Setting to ColumnTable might improve performance for narrow tables while setting to DictTable improves performance for wide tables.","category":"page"},{"location":"models/MultitargetKNNClassifier_NearestNeighborModels/#Operations","page":"MultitargetKNNClassifier","title":"Operations","text":"","category":"section"},{"location":"models/MultitargetKNNClassifier_NearestNeighborModels/","page":"MultitargetKNNClassifier","title":"MultitargetKNNClassifier","text":"predict(mach, Xnew): Return predictions of the target given features Xnew, which should have same scitype as X above. Predictions are either a ColumnTable or DictTable of UnivariateFiniteVector columns depending on the value set for the output_type parameter discussed above. The probabilistic predictions are uncalibrated.\npredict_mode(mach, Xnew): Return the modes of each column of the table of probabilistic predictions returned above.","category":"page"},{"location":"models/MultitargetKNNClassifier_NearestNeighborModels/#Fitted-parameters","page":"MultitargetKNNClassifier","title":"Fitted parameters","text":"","category":"section"},{"location":"models/MultitargetKNNClassifier_NearestNeighborModels/","page":"MultitargetKNNClassifier","title":"MultitargetKNNClassifier","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/MultitargetKNNClassifier_NearestNeighborModels/","page":"MultitargetKNNClassifier","title":"MultitargetKNNClassifier","text":"tree: An instance of either KDTree, BruteTree or BallTree depending on the value of the algorithm hyperparameter (See hyper-parameters section above). These are data structures that stores the training data with the view of making quicker nearest neighbor searches on test data points.","category":"page"},{"location":"models/MultitargetKNNClassifier_NearestNeighborModels/#Examples","page":"MultitargetKNNClassifier","title":"Examples","text":"","category":"section"},{"location":"models/MultitargetKNNClassifier_NearestNeighborModels/","page":"MultitargetKNNClassifier","title":"MultitargetKNNClassifier","text":"using MLJ, StableRNGs\n\n## set rng for reproducibility\nrng = StableRNG(10)\n\n## Dataset generation\nn, p = 10, 3\nX = table(randn(rng, n, p)) ## feature table\nfruit, color = categorical([\"apple\", \"orange\"]), categorical([\"blue\", \"green\"])\ny = [(fruit = rand(rng, fruit), color = rand(rng, color)) for _ in 1:n] ## target_table\n## Each column in y has a common categorical pool as expected\nselectcols(y, :fruit) ## categorical array\nselectcols(y, :color) ## categorical array\n\n## Load MultitargetKNNClassifier\nMultitargetKNNClassifier = @load MultitargetKNNClassifier pkg=NearestNeighborModels\n\n## view possible kernels\nNearestNeighborModels.list_kernels()\n\n## MultitargetKNNClassifier instantiation\nmodel = MultitargetKNNClassifier(K=3, weights = NearestNeighborModels.Inverse())\n\n## wrap model and required data in an MLJ machine and fit\nmach = machine(model, X, y) |> fit!\n\n## predict\ny_hat = predict(mach, X)\nlabels = predict_mode(mach, X)\n","category":"page"},{"location":"models/MultitargetKNNClassifier_NearestNeighborModels/","page":"MultitargetKNNClassifier","title":"MultitargetKNNClassifier","text":"See also KNNClassifier","category":"page"},{"location":"models/AdaBoostRegressor_MLJScikitLearnInterface/#AdaBoostRegressor_MLJScikitLearnInterface","page":"AdaBoostRegressor","title":"AdaBoostRegressor","text":"","category":"section"},{"location":"models/AdaBoostRegressor_MLJScikitLearnInterface/","page":"AdaBoostRegressor","title":"AdaBoostRegressor","text":"AdaBoostRegressor","category":"page"},{"location":"models/AdaBoostRegressor_MLJScikitLearnInterface/","page":"AdaBoostRegressor","title":"AdaBoostRegressor","text":"A model type for constructing a AdaBoost ensemble regression, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/AdaBoostRegressor_MLJScikitLearnInterface/","page":"AdaBoostRegressor","title":"AdaBoostRegressor","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/AdaBoostRegressor_MLJScikitLearnInterface/","page":"AdaBoostRegressor","title":"AdaBoostRegressor","text":"AdaBoostRegressor = @load AdaBoostRegressor pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/AdaBoostRegressor_MLJScikitLearnInterface/","page":"AdaBoostRegressor","title":"AdaBoostRegressor","text":"Do model = AdaBoostRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in AdaBoostRegressor(estimator=...).","category":"page"},{"location":"models/AdaBoostRegressor_MLJScikitLearnInterface/","page":"AdaBoostRegressor","title":"AdaBoostRegressor","text":"An AdaBoost regressor is a meta-estimator that begins by fitting a regressor on the original dataset and then fits additional copies of the regressor on the same dataset but where the weights of instances are adjusted according to the error of the current prediction. As such, subsequent regressors focus more on difficult cases.","category":"page"},{"location":"models/AdaBoostRegressor_MLJScikitLearnInterface/","page":"AdaBoostRegressor","title":"AdaBoostRegressor","text":"This class implements the algorithm known as AdaBoost.R2.","category":"page"},{"location":"models/KMeans_MLJScikitLearnInterface/#KMeans_MLJScikitLearnInterface","page":"KMeans","title":"KMeans","text":"","category":"section"},{"location":"models/KMeans_MLJScikitLearnInterface/","page":"KMeans","title":"KMeans","text":"KMeans","category":"page"},{"location":"models/KMeans_MLJScikitLearnInterface/","page":"KMeans","title":"KMeans","text":"A model type for constructing a k means, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/KMeans_MLJScikitLearnInterface/","page":"KMeans","title":"KMeans","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/KMeans_MLJScikitLearnInterface/","page":"KMeans","title":"KMeans","text":"KMeans = @load KMeans pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/KMeans_MLJScikitLearnInterface/","page":"KMeans","title":"KMeans","text":"Do model = KMeans() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in KMeans(n_clusters=...).","category":"page"},{"location":"models/KMeans_MLJScikitLearnInterface/","page":"KMeans","title":"KMeans","text":"K-Means algorithm: find K centroids corresponding to K clusters in the data.","category":"page"},{"location":"models/UnivariateStandardizer_MLJModels/#UnivariateStandardizer_MLJModels","page":"UnivariateStandardizer","title":"UnivariateStandardizer","text":"","category":"section"},{"location":"models/UnivariateStandardizer_MLJModels/","page":"UnivariateStandardizer","title":"UnivariateStandardizer","text":"UnivariateStandardizer()","category":"page"},{"location":"models/UnivariateStandardizer_MLJModels/","page":"UnivariateStandardizer","title":"UnivariateStandardizer","text":"Transformer type for standardizing (whitening) single variable data.","category":"page"},{"location":"models/UnivariateStandardizer_MLJModels/","page":"UnivariateStandardizer","title":"UnivariateStandardizer","text":"This model may be deprecated in the future. Consider using Standardizer, which handles both tabular and univariate data.","category":"page"},{"location":"models/OrthogonalMatchingPursuitRegressor_MLJScikitLearnInterface/#OrthogonalMatchingPursuitRegressor_MLJScikitLearnInterface","page":"OrthogonalMatchingPursuitRegressor","title":"OrthogonalMatchingPursuitRegressor","text":"","category":"section"},{"location":"models/OrthogonalMatchingPursuitRegressor_MLJScikitLearnInterface/","page":"OrthogonalMatchingPursuitRegressor","title":"OrthogonalMatchingPursuitRegressor","text":"OrthogonalMatchingPursuitRegressor","category":"page"},{"location":"models/OrthogonalMatchingPursuitRegressor_MLJScikitLearnInterface/","page":"OrthogonalMatchingPursuitRegressor","title":"OrthogonalMatchingPursuitRegressor","text":"A model type for constructing a orthogonal matching pursuit regressor, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/OrthogonalMatchingPursuitRegressor_MLJScikitLearnInterface/","page":"OrthogonalMatchingPursuitRegressor","title":"OrthogonalMatchingPursuitRegressor","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/OrthogonalMatchingPursuitRegressor_MLJScikitLearnInterface/","page":"OrthogonalMatchingPursuitRegressor","title":"OrthogonalMatchingPursuitRegressor","text":"OrthogonalMatchingPursuitRegressor = @load OrthogonalMatchingPursuitRegressor pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/OrthogonalMatchingPursuitRegressor_MLJScikitLearnInterface/","page":"OrthogonalMatchingPursuitRegressor","title":"OrthogonalMatchingPursuitRegressor","text":"Do model = OrthogonalMatchingPursuitRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in OrthogonalMatchingPursuitRegressor(n_nonzero_coefs=...).","category":"page"},{"location":"models/OrthogonalMatchingPursuitRegressor_MLJScikitLearnInterface/#Hyper-parameters","page":"OrthogonalMatchingPursuitRegressor","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/OrthogonalMatchingPursuitRegressor_MLJScikitLearnInterface/","page":"OrthogonalMatchingPursuitRegressor","title":"OrthogonalMatchingPursuitRegressor","text":"n_nonzero_coefs = nothing\ntol = nothing\nfit_intercept = true\nnormalize = false\nprecompute = auto","category":"page"},{"location":"learning_curves/#Learning-Curves","page":"Learning Curves","title":"Learning Curves","text":"","category":"section"},{"location":"learning_curves/","page":"Learning Curves","title":"Learning Curves","text":"A learning curve in MLJ is a plot of some performance estimate, as a function of some model hyperparameter. This can be useful when tuning a single model hyperparameter, or when deciding how many iterations are required for some iterative model. The learning_curve method does not actually generate a plot but generates the data needed to do so.","category":"page"},{"location":"learning_curves/","page":"Learning Curves","title":"Learning Curves","text":"To generate learning curves you can bind data to a model by instantiating a machine. You can choose to supply all available data, as performance estimates are computed using a resampling strategy, defaulting to Holdout(fraction_train=0.7).","category":"page"},{"location":"learning_curves/","page":"Learning Curves","title":"Learning Curves","text":"using MLJ\nX, y = @load_boston;\n\natom = (@load RidgeRegressor pkg=MLJLinearModels)()\nensemble = EnsembleModel(model=atom, n=1000)\nmach = machine(ensemble, X, y)\n\nr_lambda = range(ensemble, :(model.lambda), lower=1e-1, upper=100, scale=:log10)\ncurve = MLJ.learning_curve(mach;\n range=r_lambda,\n resampling=CV(nfolds=3),\n measure=l1)","category":"page"},{"location":"learning_curves/","page":"Learning Curves","title":"Learning Curves","text":"using Plots\nplot(curve.parameter_values,\n curve.measurements,\n xlab=curve.parameter_name,\n xscale=curve.parameter_scale,\n ylab = \"CV estimate of RMS error\")","category":"page"},{"location":"learning_curves/","page":"Learning Curves","title":"Learning Curves","text":"(Image: )","category":"page"},{"location":"learning_curves/","page":"Learning Curves","title":"Learning Curves","text":"If the range hyperparameter is the number of iterations in some iterative model, learning_curve will not restart the training from scratch for each new value, unless a non-holdout resampling strategy is specified (and provided the model implements an appropriate update method). To obtain multiple curves (that are distinct) you will need to pass the name of the model random number generator, rng_name, and specify the random number generators to be used using rngs=... (an integer automatically generates the number specified):","category":"page"},{"location":"learning_curves/","page":"Learning Curves","title":"Learning Curves","text":"atom.lambda= 7.3\nr_n = range(ensemble, :n, lower=1, upper=50)\ncurves = MLJ.learning_curve(mach;\n range=r_n,\n measure=l1,\n verbosity=0,\n rng_name=:rng,\n rngs=4)","category":"page"},{"location":"learning_curves/","page":"Learning Curves","title":"Learning Curves","text":"plot(curves.parameter_values,\n curves.measurements,\n xlab=curves.parameter_name,\n ylab=\"Holdout estimate of RMS error\")","category":"page"},{"location":"learning_curves/","page":"Learning Curves","title":"Learning Curves","text":"(Image: )","category":"page"},{"location":"learning_curves/#API-reference","page":"Learning Curves","title":"API reference","text":"","category":"section"},{"location":"learning_curves/","page":"Learning Curves","title":"Learning Curves","text":"MLJTuning.learning_curve","category":"page"},{"location":"learning_curves/#MLJTuning.learning_curve","page":"Learning Curves","title":"MLJTuning.learning_curve","text":"curve = learning_curve(mach; resolution=30,\n resampling=Holdout(),\n repeats=1,\n measure=default_measure(machine.model),\n rows=nothing,\n weights=nothing,\n operation=nothing,\n range=nothing,\n acceleration=default_resource(),\n acceleration_grid=CPU1(),\n rngs=nothing,\n rng_name=nothing)\n\nGiven a supervised machine mach, returns a named tuple of objects suitable for generating a plot of performance estimates, as a function of the single hyperparameter specified in range. The tuple curve has the following keys: :parameter_name, :parameter_scale, :parameter_values, :measurements.\n\nTo generate multiple curves for a model with a random number generator (RNG) as a hyperparameter, specify the name, rng_name, of the (possibly nested) RNG field, and a vector rngs of RNG's, one for each curve. Alternatively, set rngs to the number of curves desired, in which case RNG's are automatically generated. The individual curve computations can be distributed across multiple processes using acceleration=CPUProcesses() or acceleration=CPUThreads(). See the second example below for a demonstration.\n\nX, y = @load_boston;\natom = @load RidgeRegressor pkg=MultivariateStats\nensemble = EnsembleModel(atom=atom, n=1000)\nmach = machine(ensemble, X, y)\nr_lambda = range(ensemble, :(atom.lambda), lower=10, upper=500, scale=:log10)\ncurve = learning_curve(mach; range=r_lambda, resampling=CV(), measure=mav)\nusing Plots\nplot(curve.parameter_values,\n curve.measurements,\n xlab=curve.parameter_name,\n xscale=curve.parameter_scale,\n ylab = \"CV estimate of RMS error\")\n\nIf using a Holdout() resampling strategy (with no shuffling) and if the specified hyperparameter is the number of iterations in some iterative model (and that model has an appropriately overloaded MLJModelInterface.update method) then training is not restarted from scratch for each increment of the parameter, ie the model is trained progressively.\n\natom.lambda=200\nr_n = range(ensemble, :n, lower=1, upper=250)\ncurves = learning_curve(mach; range=r_n, verbosity=0, rng_name=:rng, rngs=3)\nplot!(curves.parameter_values,\n curves.measurements,\n xlab=curves.parameter_name,\n ylab=\"Holdout estimate of RMS error\")\n\n\n\nlearning_curve(model::Supervised, X, y; kwargs...)\nlearning_curve(model::Supervised, X, y, w; kwargs...)\n\nPlot a learning curve (or curves) directly, without first constructing a machine.\n\nSummary of key-word options\n\nresolution - number of points generated from range (number model evaluations); default is 30\nacceleration - parallelization option for passing to evaluate!; an instance of CPU1, CPUProcesses or CPUThreads from the ComputationalResources.jl; default is default_resource()\nacceleration_grid - parallelization option for distributing each performancde evaluation\nrngs - for specifying random number generator(s) to be passed to the model (see above)\nrng_name - name of the model hyper-parameter representing a random number generator (see above); possibly nested\n\nOther key-word options are documented at TunedModel.\n\n\n\n\n\n","category":"function"},{"location":"models/EvoLinearRegressor_EvoLinear/#EvoLinearRegressor_EvoLinear","page":"EvoLinearRegressor","title":"EvoLinearRegressor","text":"","category":"section"},{"location":"models/EvoLinearRegressor_EvoLinear/","page":"EvoLinearRegressor","title":"EvoLinearRegressor","text":"EvoLinearRegressor(; kwargs...)","category":"page"},{"location":"models/EvoLinearRegressor_EvoLinear/","page":"EvoLinearRegressor","title":"EvoLinearRegressor","text":"A model type for constructing a EvoLinearRegressor, based on EvoLinear.jl, and implementing both an internal API and the MLJ model interface.","category":"page"},{"location":"models/EvoLinearRegressor_EvoLinear/#Keyword-arguments","page":"EvoLinearRegressor","title":"Keyword arguments","text":"","category":"section"},{"location":"models/EvoLinearRegressor_EvoLinear/","page":"EvoLinearRegressor","title":"EvoLinearRegressor","text":"loss=:mse: loss function to be minimised. Can be one of:\n:mse\n:logistic\n:poisson\n:gamma\n:tweedie\nnrounds=10: maximum number of training rounds.\neta=1: Learning rate. Typically in the range [1e-2, 1].\nL1=0: Regularization penalty applied by shrinking to 0 weight update if update is < L1. No penalty if update > L1. Results in sparse feature selection. Typically in the [0, 1] range on normalized features.\nL2=0: Regularization penalty applied to the squared of the weight update value. Restricts large parameter values. Typically in the [0, 1] range on normalized features.\nrng=123: random seed. Not used at the moment.\nupdater=:all: training method. Only :all is supported at the moment. Gradients for each feature are computed simultaneously, then bias is updated based on all features update.\ndevice=:cpu: Only :cpu is supported at the moment.","category":"page"},{"location":"models/EvoLinearRegressor_EvoLinear/#Internal-API","page":"EvoLinearRegressor","title":"Internal API","text":"","category":"section"},{"location":"models/EvoLinearRegressor_EvoLinear/","page":"EvoLinearRegressor","title":"EvoLinearRegressor","text":"Do config = EvoLinearRegressor() to construct an hyper-parameter struct with default hyper-parameters. Provide keyword arguments as listed above to override defaults, for example:","category":"page"},{"location":"models/EvoLinearRegressor_EvoLinear/","page":"EvoLinearRegressor","title":"EvoLinearRegressor","text":"EvoLinearRegressor(loss=:logistic, L1=1e-3, L2=1e-2, nrounds=100)","category":"page"},{"location":"models/EvoLinearRegressor_EvoLinear/#Training-model","page":"EvoLinearRegressor","title":"Training model","text":"","category":"section"},{"location":"models/EvoLinearRegressor_EvoLinear/","page":"EvoLinearRegressor","title":"EvoLinearRegressor","text":"A model is built using fit:","category":"page"},{"location":"models/EvoLinearRegressor_EvoLinear/","page":"EvoLinearRegressor","title":"EvoLinearRegressor","text":"config = EvoLinearRegressor()\nm = fit(config; x, y, w)","category":"page"},{"location":"models/EvoLinearRegressor_EvoLinear/#Inference","page":"EvoLinearRegressor","title":"Inference","text":"","category":"section"},{"location":"models/EvoLinearRegressor_EvoLinear/","page":"EvoLinearRegressor","title":"EvoLinearRegressor","text":"Fitted results is an EvoLinearModel which acts as a prediction function when passed a features matrix as argument. ","category":"page"},{"location":"models/EvoLinearRegressor_EvoLinear/","page":"EvoLinearRegressor","title":"EvoLinearRegressor","text":"preds = m(x)","category":"page"},{"location":"models/EvoLinearRegressor_EvoLinear/#MLJ-Interface","page":"EvoLinearRegressor","title":"MLJ Interface","text":"","category":"section"},{"location":"models/EvoLinearRegressor_EvoLinear/","page":"EvoLinearRegressor","title":"EvoLinearRegressor","text":"From MLJ, the type can be imported using:","category":"page"},{"location":"models/EvoLinearRegressor_EvoLinear/","page":"EvoLinearRegressor","title":"EvoLinearRegressor","text":"EvoLinearRegressor = @load EvoLinearRegressor pkg=EvoLinear","category":"page"},{"location":"models/EvoLinearRegressor_EvoLinear/","page":"EvoLinearRegressor","title":"EvoLinearRegressor","text":"Do model = EvoLinearRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in EvoLinearRegressor(loss=...).","category":"page"},{"location":"models/EvoLinearRegressor_EvoLinear/#Training-model-2","page":"EvoLinearRegressor","title":"Training model","text":"","category":"section"},{"location":"models/EvoLinearRegressor_EvoLinear/","page":"EvoLinearRegressor","title":"EvoLinearRegressor","text":"In MLJ or MLJBase, bind an instance model to data with mach = machine(model, X, y) where: ","category":"page"},{"location":"models/EvoLinearRegressor_EvoLinear/","page":"EvoLinearRegressor","title":"EvoLinearRegressor","text":"X: any table of input features (eg, a DataFrame) whose columns each have one of the following element scitypes: Continuous, Count, or <:OrderedFactor; check column scitypes with schema(X)\ny: is the target, which can be any AbstractVector whose element scitype is <:Continuous; check the scitype with scitype(y)","category":"page"},{"location":"models/EvoLinearRegressor_EvoLinear/","page":"EvoLinearRegressor","title":"EvoLinearRegressor","text":"Train the machine using fit!(mach, rows=...).","category":"page"},{"location":"models/EvoLinearRegressor_EvoLinear/#Operations","page":"EvoLinearRegressor","title":"Operations","text":"","category":"section"},{"location":"models/EvoLinearRegressor_EvoLinear/","page":"EvoLinearRegressor","title":"EvoLinearRegressor","text":"predict(mach, Xnew): return predictions of the target given","category":"page"},{"location":"models/EvoLinearRegressor_EvoLinear/","page":"EvoLinearRegressor","title":"EvoLinearRegressor","text":"features Xnew having the same scitype as X above. Predictions are deterministic.","category":"page"},{"location":"models/EvoLinearRegressor_EvoLinear/#Fitted-parameters","page":"EvoLinearRegressor","title":"Fitted parameters","text":"","category":"section"},{"location":"models/EvoLinearRegressor_EvoLinear/","page":"EvoLinearRegressor","title":"EvoLinearRegressor","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/EvoLinearRegressor_EvoLinear/","page":"EvoLinearRegressor","title":"EvoLinearRegressor","text":":fitresult: the EvoLinearModel object returned by EvoLnear.jl fitting algorithm.","category":"page"},{"location":"models/EvoLinearRegressor_EvoLinear/#Report","page":"EvoLinearRegressor","title":"Report","text":"","category":"section"},{"location":"models/EvoLinearRegressor_EvoLinear/","page":"EvoLinearRegressor","title":"EvoLinearRegressor","text":"The fields of report(mach) are:","category":"page"},{"location":"models/EvoLinearRegressor_EvoLinear/","page":"EvoLinearRegressor","title":"EvoLinearRegressor","text":":coef: Vector of coefficients (βs) associated to each of the features.\n:bias: Value of the bias.\n:names: Names of each of the features.","category":"page"},{"location":"models/KernelPerceptronClassifier_BetaML/#KernelPerceptronClassifier_BetaML","page":"KernelPerceptronClassifier","title":"KernelPerceptronClassifier","text":"","category":"section"},{"location":"models/KernelPerceptronClassifier_BetaML/","page":"KernelPerceptronClassifier","title":"KernelPerceptronClassifier","text":"mutable struct KernelPerceptronClassifier <: MLJModelInterface.Probabilistic","category":"page"},{"location":"models/KernelPerceptronClassifier_BetaML/","page":"KernelPerceptronClassifier","title":"KernelPerceptronClassifier","text":"The kernel perceptron algorithm using one-vs-one for multiclass, from the Beta Machine Learning Toolkit (BetaML).","category":"page"},{"location":"models/KernelPerceptronClassifier_BetaML/#Hyperparameters:","page":"KernelPerceptronClassifier","title":"Hyperparameters:","text":"","category":"section"},{"location":"models/KernelPerceptronClassifier_BetaML/","page":"KernelPerceptronClassifier","title":"KernelPerceptronClassifier","text":"kernel::Function: Kernel function to employ. See ?radial_kernel or ?polynomial_kernel (once loaded the BetaML package) for details or check ?BetaML.Utils to verify if other kernels are defined (you can alsways define your own kernel) [def: radial_kernel]\nepochs::Int64: Maximum number of epochs, i.e. passages trough the whole training sample [def: 100]\ninitial_errors::Union{Nothing, Vector{Vector{Int64}}}: Initial distribution of the number of errors errors [def: nothing, i.e. zeros]. If provided, this should be a nModels-lenght vector of nRecords integer values vectors , where nModels is computed as (n_classes * (n_classes - 1)) / 2\nshuffle::Bool: Whether to randomly shuffle the data at each iteration (epoch) [def: true]\nrng::Random.AbstractRNG: A Random Number Generator to be used in stochastic parts of the code [deafult: Random.GLOBAL_RNG]","category":"page"},{"location":"models/KernelPerceptronClassifier_BetaML/#Example:","page":"KernelPerceptronClassifier","title":"Example:","text":"","category":"section"},{"location":"models/KernelPerceptronClassifier_BetaML/","page":"KernelPerceptronClassifier","title":"KernelPerceptronClassifier","text":"julia> using MLJ\n\njulia> X, y = @load_iris;\n\njulia> modelType = @load KernelPerceptronClassifier pkg = \"BetaML\"\n[ Info: For silent loading, specify `verbosity=0`. \nimport BetaML ✔\nBetaML.Perceptron.KernelPerceptronClassifier\n\njulia> model = modelType()\nKernelPerceptronClassifier(\n kernel = BetaML.Utils.radial_kernel, \n epochs = 100, \n initial_errors = nothing, \n shuffle = true, \n rng = Random._GLOBAL_RNG())\n\njulia> mach = machine(model, X, y);\n\njulia> fit!(mach);\n\njulia> est_classes = predict(mach, X)\n150-element CategoricalDistributions.UnivariateFiniteVector{Multiclass{3}, String, UInt8, Float64}:\n UnivariateFinite{Multiclass{3}}(setosa=>0.665, versicolor=>0.245, virginica=>0.09)\n UnivariateFinite{Multiclass{3}}(setosa=>0.665, versicolor=>0.245, virginica=>0.09)\n ⋮\n UnivariateFinite{Multiclass{3}}(setosa=>0.09, versicolor=>0.245, virginica=>0.665)\n UnivariateFinite{Multiclass{3}}(setosa=>0.09, versicolor=>0.665, virginica=>0.245)","category":"page"},{"location":"model_search/#model_search","page":"Model Search","title":"Model Search","text":"","category":"section"},{"location":"model_search/","page":"Model Search","title":"Model Search","text":"MLJ has a model registry, allowing the user to search models and their properties, without loading all the packages containing model code. In turn, this allows one to efficiently find all models solving a given machine learning task. The task itself is specified with the help of the matching method, and the search executed with the models methods, as detailed below. ","category":"page"},{"location":"model_search/","page":"Model Search","title":"Model Search","text":"For commonly encountered problems with model search, see also Preparing Data.","category":"page"},{"location":"model_search/","page":"Model Search","title":"Model Search","text":"A table of all models is also given at List of Supported Models.","category":"page"},{"location":"model_search/#Model-metadata","page":"Model Search","title":"Model metadata","text":"","category":"section"},{"location":"model_search/","page":"Model Search","title":"Model Search","text":"Terminology. In this section the word \"model\" refers to a metadata entry in the model registry, as opposed to an actual model struct that such an entry represents. One can obtain such an entry with the info command:","category":"page"},{"location":"model_search/","page":"Model Search","title":"Model Search","text":"using MLJ\nMLJ.color_off()","category":"page"},{"location":"model_search/","page":"Model Search","title":"Model Search","text":"info(\"PCA\")","category":"page"},{"location":"model_search/","page":"Model Search","title":"Model Search","text":"So a \"model\" in the present context is just a named tuple containing metadata, and not an actual model type or instance. If two models with the same name occur in different packages, the package name must be specified, as in info(\"LinearRegressor\", pkg=\"GLM\"). ","category":"page"},{"location":"model_search/","page":"Model Search","title":"Model Search","text":"Model document strings can be retreived, without importing the defining code, using the doc function:","category":"page"},{"location":"model_search/","page":"Model Search","title":"Model Search","text":"doc(\"DecisionTreeClassifier\", pkg=\"DecisionTree\")","category":"page"},{"location":"model_search/#General-model-queries","page":"Model Search","title":"General model queries","text":"","category":"section"},{"location":"model_search/","page":"Model Search","title":"Model Search","text":"We list all models (named tuples) using models(), and list the models for which code is already loaded with localmodels():","category":"page"},{"location":"model_search/","page":"Model Search","title":"Model Search","text":"localmodels()\nlocalmodels()[2]","category":"page"},{"location":"model_search/","page":"Model Search","title":"Model Search","text":"One can search for models containing specified strings or regular expressions in their docstring attributes, as in","category":"page"},{"location":"model_search/","page":"Model Search","title":"Model Search","text":"models(\"forest\")","category":"page"},{"location":"model_search/","page":"Model Search","title":"Model Search","text":"or by specifying a filter (Bool-valued function):","category":"page"},{"location":"model_search/","page":"Model Search","title":"Model Search","text":"filter(model) = model.is_supervised &&\n model.input_scitype >: MLJ.Table(Continuous) &&\n model.target_scitype >: AbstractVector{<:Multiclass{3}} &&\n model.prediction_type == :deterministic\nmodels(filter)","category":"page"},{"location":"model_search/","page":"Model Search","title":"Model Search","text":"Multiple test arguments may be passed to models, which are applied conjunctively.","category":"page"},{"location":"model_search/#Matching-models-to-data","page":"Model Search","title":"Matching models to data","text":"","category":"section"},{"location":"model_search/","page":"Model Search","title":"Model Search","text":"Common searches are streamlined with the help of the matching command, defined as follows:","category":"page"},{"location":"model_search/","page":"Model Search","title":"Model Search","text":"matching(model, X, y) == true exactly when model is supervised and admits inputs and targets with the scientific types of X and y, respectively\nmatching(model, X) == true exactly when model is unsupervised and admits inputs with the scientific types of X.","category":"page"},{"location":"model_search/","page":"Model Search","title":"Model Search","text":"So, to search for all supervised probabilistic models handling input X and target y, one can define the testing function task by","category":"page"},{"location":"model_search/","page":"Model Search","title":"Model Search","text":"task(model) = matching(model, X, y) && model.prediction_type == :probabilistic","category":"page"},{"location":"model_search/","page":"Model Search","title":"Model Search","text":"And execute the search with","category":"page"},{"location":"model_search/","page":"Model Search","title":"Model Search","text":"models(task)","category":"page"},{"location":"model_search/","page":"Model Search","title":"Model Search","text":"Also defined are Bool-valued callable objects matching(model), matching(X, y) and matching(X), with obvious behavior. For example, matching(X, y)(model) = matching(model, X, y).","category":"page"},{"location":"model_search/","page":"Model Search","title":"Model Search","text":"So, to search for all models compatible with input X and target y, for example, one executes","category":"page"},{"location":"model_search/","page":"Model Search","title":"Model Search","text":"models(matching(X, y))","category":"page"},{"location":"model_search/","page":"Model Search","title":"Model Search","text":"while the preceding search can also be written","category":"page"},{"location":"model_search/","page":"Model Search","title":"Model Search","text":"models() do model\n matching(model, X, y) &&\n model.prediction_type == :probabilistic\nend","category":"page"},{"location":"model_search/#API","page":"Model Search","title":"API","text":"","category":"section"},{"location":"model_search/","page":"Model Search","title":"Model Search","text":"models\nlocalmodels","category":"page"},{"location":"model_search/#MLJModels.models","page":"Model Search","title":"MLJModels.models","text":"models()\n\nList all models in the MLJ registry. Here and below model means the registry metadata entry for a genuine model type (a proxy for types whose defining code may not be loaded).\n\nmodels(filters..)\n\nList all models m for which filter(m) is true, for each filter in filters.\n\nmodels(matching(X, y))\n\nList all supervised models compatible with training data X, y.\n\nmodels(matching(X))\n\nList all unsupervised models compatible with training data X.\n\nExcluded in the listings are the built-in model-wraps, like EnsembleModel, TunedModel, and IteratedModel.\n\nExample\n\nIf\n\ntask(model) = model.is_supervised && model.is_probabilistic\n\nthen models(task) lists all supervised models making probabilistic predictions.\n\nSee also: localmodels.\n\n\n\n\n\nmodels(needle::Union{AbstractString,Regex})\n\nList all models whole name or docstring matches a given needle.\n\n\n\n\n\n","category":"function"},{"location":"model_search/#MLJModels.localmodels","page":"Model Search","title":"MLJModels.localmodels","text":"localmodels(; modl=Main)\nlocalmodels(filters...; modl=Main)\nlocalmodels(needle::Union{AbstractString,Regex}; modl=Main)\n\nList all models currently available to the user from the module modl without importing a package, and which additional pass through the specified filters. Here a filter is a Bool-valued function on models.\n\nUse load_path to get the path to some model returned, as in these examples:\n\nms = localmodels()\nmodel = ms[1]\nload_path(model)\n\nSee also models, load_path.\n\n\n\n\n\n","category":"function"},{"location":"models/HistGradientBoostingClassifier_MLJScikitLearnInterface/#HistGradientBoostingClassifier_MLJScikitLearnInterface","page":"HistGradientBoostingClassifier","title":"HistGradientBoostingClassifier","text":"","category":"section"},{"location":"models/HistGradientBoostingClassifier_MLJScikitLearnInterface/","page":"HistGradientBoostingClassifier","title":"HistGradientBoostingClassifier","text":"HistGradientBoostingClassifier","category":"page"},{"location":"models/HistGradientBoostingClassifier_MLJScikitLearnInterface/","page":"HistGradientBoostingClassifier","title":"HistGradientBoostingClassifier","text":"A model type for constructing a hist gradient boosting classifier, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/HistGradientBoostingClassifier_MLJScikitLearnInterface/","page":"HistGradientBoostingClassifier","title":"HistGradientBoostingClassifier","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/HistGradientBoostingClassifier_MLJScikitLearnInterface/","page":"HistGradientBoostingClassifier","title":"HistGradientBoostingClassifier","text":"HistGradientBoostingClassifier = @load HistGradientBoostingClassifier pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/HistGradientBoostingClassifier_MLJScikitLearnInterface/","page":"HistGradientBoostingClassifier","title":"HistGradientBoostingClassifier","text":"Do model = HistGradientBoostingClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in HistGradientBoostingClassifier(loss=...).","category":"page"},{"location":"models/HistGradientBoostingClassifier_MLJScikitLearnInterface/","page":"HistGradientBoostingClassifier","title":"HistGradientBoostingClassifier","text":"This algorithm builds an additive model in a forward stage-wise fashion; it allows for the optimization of arbitrary differentiable loss functions. In each stage n_classes_ regression trees are fit on the negative gradient of the loss function, e.g. binary or multiclass log loss. Binary classification is a special case where only a single regression tree is induced.","category":"page"},{"location":"models/HistGradientBoostingClassifier_MLJScikitLearnInterface/","page":"HistGradientBoostingClassifier","title":"HistGradientBoostingClassifier","text":"HistGradientBoostingClassifier is a much faster variant of this algorithm for intermediate datasets (n_samples >= 10_000).","category":"page"},{"location":"models/LinearBinaryClassifier_GLM/#LinearBinaryClassifier_GLM","page":"LinearBinaryClassifier","title":"LinearBinaryClassifier","text":"","category":"section"},{"location":"models/LinearBinaryClassifier_GLM/","page":"LinearBinaryClassifier","title":"LinearBinaryClassifier","text":"LinearBinaryClassifier","category":"page"},{"location":"models/LinearBinaryClassifier_GLM/","page":"LinearBinaryClassifier","title":"LinearBinaryClassifier","text":"A model type for constructing a linear binary classifier, based on GLM.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/LinearBinaryClassifier_GLM/","page":"LinearBinaryClassifier","title":"LinearBinaryClassifier","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/LinearBinaryClassifier_GLM/","page":"LinearBinaryClassifier","title":"LinearBinaryClassifier","text":"LinearBinaryClassifier = @load LinearBinaryClassifier pkg=GLM","category":"page"},{"location":"models/LinearBinaryClassifier_GLM/","page":"LinearBinaryClassifier","title":"LinearBinaryClassifier","text":"Do model = LinearBinaryClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in LinearBinaryClassifier(fit_intercept=...).","category":"page"},{"location":"models/LinearBinaryClassifier_GLM/","page":"LinearBinaryClassifier","title":"LinearBinaryClassifier","text":"LinearBinaryClassifier is a generalized linear model, specialised to the case of a binary target variable, with a user-specified link function. Options exist to specify an intercept or offset feature.","category":"page"},{"location":"models/LinearBinaryClassifier_GLM/#Training-data","page":"LinearBinaryClassifier","title":"Training data","text":"","category":"section"},{"location":"models/LinearBinaryClassifier_GLM/","page":"LinearBinaryClassifier","title":"LinearBinaryClassifier","text":"In MLJ or MLJBase, bind an instance model to data with one of:","category":"page"},{"location":"models/LinearBinaryClassifier_GLM/","page":"LinearBinaryClassifier","title":"LinearBinaryClassifier","text":"mach = machine(model, X, y)\nmach = machine(model, X, y, w)","category":"page"},{"location":"models/LinearBinaryClassifier_GLM/","page":"LinearBinaryClassifier","title":"LinearBinaryClassifier","text":"Here","category":"page"},{"location":"models/LinearBinaryClassifier_GLM/","page":"LinearBinaryClassifier","title":"LinearBinaryClassifier","text":"X: is any table of input features (eg, a DataFrame) whose columns are of scitype Continuous; check the scitype with schema(X)\ny: is the target, which can be any AbstractVector whose element scitype is <:OrderedFactor(2) or <:Multiclass(2); check the scitype with schema(y)\nw: is a vector of Real per-observation weights","category":"page"},{"location":"models/LinearBinaryClassifier_GLM/","page":"LinearBinaryClassifier","title":"LinearBinaryClassifier","text":"Train the machine using fit!(mach, rows=...).","category":"page"},{"location":"models/LinearBinaryClassifier_GLM/#Hyper-parameters","page":"LinearBinaryClassifier","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/LinearBinaryClassifier_GLM/","page":"LinearBinaryClassifier","title":"LinearBinaryClassifier","text":"fit_intercept=true: Whether to calculate the intercept for this model. If set to false, no intercept will be calculated (e.g. the data is expected to be centered)\nlink=GLM.LogitLink: The function which links the linear prediction function to the probability of a particular outcome or class. This must have type GLM.Link01. Options include GLM.LogitLink(), GLM.ProbitLink(), CloglogLink(),CauchitLink()`.\noffsetcol=nothing: Name of the column to be used as an offset, if any. An offset is a variable which is known to have a coefficient of 1.\nmaxiter::Integer=30: The maximum number of iterations allowed to achieve convergence.\natol::Real=1e-6: Absolute threshold for convergence. Convergence is achieved when the relative change in deviance is less than `max(rtol*dev, atol). This term exists to avoid failure when deviance is unchanged except for rounding errors.\nrtol::Real=1e-6: Relative threshold for convergence. Convergence is achieved when the relative change in deviance is less than `max(rtol*dev, atol). This term exists to avoid failure when deviance is unchanged except for rounding errors.\nminstepfac::Real=0.001: Minimum step fraction. Must be between 0 and 1. Lower bound for the factor used to update the linear fit.\nreport_keys: Vector of keys for the report. Possible keys are: :deviance, :dof_residual, :stderror, :vcov, :coef_table and :glm_model. By default only :glm_model is excluded.","category":"page"},{"location":"models/LinearBinaryClassifier_GLM/#Operations","page":"LinearBinaryClassifier","title":"Operations","text":"","category":"section"},{"location":"models/LinearBinaryClassifier_GLM/","page":"LinearBinaryClassifier","title":"LinearBinaryClassifier","text":"predict(mach, Xnew): Return predictions of the target given features Xnew having the same scitype as X above. Predictions are probabilistic.\npredict_mode(mach, Xnew): Return the modes of the probabilistic predictions returned above.","category":"page"},{"location":"models/LinearBinaryClassifier_GLM/#Fitted-parameters","page":"LinearBinaryClassifier","title":"Fitted parameters","text":"","category":"section"},{"location":"models/LinearBinaryClassifier_GLM/","page":"LinearBinaryClassifier","title":"LinearBinaryClassifier","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/LinearBinaryClassifier_GLM/","page":"LinearBinaryClassifier","title":"LinearBinaryClassifier","text":"features: The names of the features used during model fitting.\ncoef: The linear coefficients determined by the model.\nintercept: The intercept determined by the model.","category":"page"},{"location":"models/LinearBinaryClassifier_GLM/#Report","page":"LinearBinaryClassifier","title":"Report","text":"","category":"section"},{"location":"models/LinearBinaryClassifier_GLM/","page":"LinearBinaryClassifier","title":"LinearBinaryClassifier","text":"The fields of report(mach) are:","category":"page"},{"location":"models/LinearBinaryClassifier_GLM/","page":"LinearBinaryClassifier","title":"LinearBinaryClassifier","text":"deviance: Measure of deviance of fitted model with respect to a perfectly fitted model. For a linear model, this is the weighted residual sum of squares\ndof_residual: The degrees of freedom for residuals, when meaningful.\nstderror: The standard errors of the coefficients.\nvcov: The estimated variance-covariance matrix of the coefficient estimates.\ncoef_table: Table which displays coefficients and summarizes their significance and confidence intervals.\nglm_model: The raw fitted model returned by GLM.lm. Note this points to training data. Refer to the GLM.jl documentation for usage.","category":"page"},{"location":"models/LinearBinaryClassifier_GLM/#Examples","page":"LinearBinaryClassifier","title":"Examples","text":"","category":"section"},{"location":"models/LinearBinaryClassifier_GLM/","page":"LinearBinaryClassifier","title":"LinearBinaryClassifier","text":"using MLJ\nimport GLM ## namespace must be available\n\nLinearBinaryClassifier = @load LinearBinaryClassifier pkg=GLM\nclf = LinearBinaryClassifier(fit_intercept=false, link=GLM.ProbitLink())\n\nX, y = @load_crabs\n\nmach = machine(clf, X, y) |> fit!\n\nXnew = (;FL = [8.1, 24.8, 7.2],\n RW = [5.1, 25.7, 6.4],\n CL = [15.9, 46.7, 14.3],\n CW = [18.7, 59.7, 12.2],\n BD = [6.2, 23.6, 8.4],)\n\nyhat = predict(mach, Xnew) ## probabilistic predictions\npdf(yhat, levels(y)) ## probability matrix\np_B = pdf.(yhat, \"B\")\nclass_labels = predict_mode(mach, Xnew)\n\nfitted_params(mach).features\nfitted_params(mach).coef\nfitted_params(mach).intercept\n\nreport(mach)","category":"page"},{"location":"models/LinearBinaryClassifier_GLM/","page":"LinearBinaryClassifier","title":"LinearBinaryClassifier","text":"See also LinearRegressor, LinearCountRegressor","category":"page"},{"location":"models/SOSDetector_OutlierDetectionPython/#SOSDetector_OutlierDetectionPython","page":"SOSDetector","title":"SOSDetector","text":"","category":"section"},{"location":"models/SOSDetector_OutlierDetectionPython/","page":"SOSDetector","title":"SOSDetector","text":"SOSDetector(perplexity = 4.5,\n metric = \"minkowski\",\n eps = 1e-5)","category":"page"},{"location":"models/SOSDetector_OutlierDetectionPython/","page":"SOSDetector","title":"SOSDetector","text":"https://pyod.readthedocs.io/en/latest/pyod.models.html#module-pyod.models.sos","category":"page"},{"location":"models/BayesianQDA_MLJScikitLearnInterface/#BayesianQDA_MLJScikitLearnInterface","page":"BayesianQDA","title":"BayesianQDA","text":"","category":"section"},{"location":"models/BayesianQDA_MLJScikitLearnInterface/","page":"BayesianQDA","title":"BayesianQDA","text":"BayesianQDA","category":"page"},{"location":"models/BayesianQDA_MLJScikitLearnInterface/","page":"BayesianQDA","title":"BayesianQDA","text":"A model type for constructing a Bayesian quadratic discriminant analysis, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/BayesianQDA_MLJScikitLearnInterface/","page":"BayesianQDA","title":"BayesianQDA","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/BayesianQDA_MLJScikitLearnInterface/","page":"BayesianQDA","title":"BayesianQDA","text":"BayesianQDA = @load BayesianQDA pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/BayesianQDA_MLJScikitLearnInterface/","page":"BayesianQDA","title":"BayesianQDA","text":"Do model = BayesianQDA() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in BayesianQDA(priors=...).","category":"page"},{"location":"models/BayesianQDA_MLJScikitLearnInterface/#Hyper-parameters","page":"BayesianQDA","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/BayesianQDA_MLJScikitLearnInterface/","page":"BayesianQDA","title":"BayesianQDA","text":"priors = nothing\nreg_param = 0.0\nstore_covariance = false\ntol = 0.0001","category":"page"},{"location":"models/XGBoostClassifier_XGBoost/#XGBoostClassifier_XGBoost","page":"XGBoostClassifier","title":"XGBoostClassifier","text":"","category":"section"},{"location":"models/XGBoostClassifier_XGBoost/","page":"XGBoostClassifier","title":"XGBoostClassifier","text":"XGBoostClassifier","category":"page"},{"location":"models/XGBoostClassifier_XGBoost/","page":"XGBoostClassifier","title":"XGBoostClassifier","text":"A model type for constructing a eXtreme Gradient Boosting Classifier, based on XGBoost.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/XGBoostClassifier_XGBoost/","page":"XGBoostClassifier","title":"XGBoostClassifier","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/XGBoostClassifier_XGBoost/","page":"XGBoostClassifier","title":"XGBoostClassifier","text":"XGBoostClassifier = @load XGBoostClassifier pkg=XGBoost","category":"page"},{"location":"models/XGBoostClassifier_XGBoost/","page":"XGBoostClassifier","title":"XGBoostClassifier","text":"Do model = XGBoostClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in XGBoostClassifier(test=...).","category":"page"},{"location":"models/XGBoostClassifier_XGBoost/","page":"XGBoostClassifier","title":"XGBoostClassifier","text":"Univariate classification using xgboost.","category":"page"},{"location":"models/XGBoostClassifier_XGBoost/#Training-data","page":"XGBoostClassifier","title":"Training data","text":"","category":"section"},{"location":"models/XGBoostClassifier_XGBoost/","page":"XGBoostClassifier","title":"XGBoostClassifier","text":"In MLJ or MLJBase, bind an instance model to data with","category":"page"},{"location":"models/XGBoostClassifier_XGBoost/","page":"XGBoostClassifier","title":"XGBoostClassifier","text":"m = machine(model, X, y)","category":"page"},{"location":"models/XGBoostClassifier_XGBoost/","page":"XGBoostClassifier","title":"XGBoostClassifier","text":"where","category":"page"},{"location":"models/XGBoostClassifier_XGBoost/","page":"XGBoostClassifier","title":"XGBoostClassifier","text":"X: any table of input features, either an AbstractMatrix or Tables.jl-compatible table.\ny: is an AbstractVector Finite target.","category":"page"},{"location":"models/XGBoostClassifier_XGBoost/","page":"XGBoostClassifier","title":"XGBoostClassifier","text":"Train using fit!(m, rows=...).","category":"page"},{"location":"models/XGBoostClassifier_XGBoost/#Hyper-parameters","page":"XGBoostClassifier","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/XGBoostClassifier_XGBoost/","page":"XGBoostClassifier","title":"XGBoostClassifier","text":"See https://xgboost.readthedocs.io/en/stable/parameter.html.","category":"page"},{"location":"models/LODADetector_OutlierDetectionPython/#LODADetector_OutlierDetectionPython","page":"LODADetector","title":"LODADetector","text":"","category":"section"},{"location":"models/LODADetector_OutlierDetectionPython/","page":"LODADetector","title":"LODADetector","text":"LODADetector(n_bins = 10,\n n_random_cuts = 100)","category":"page"},{"location":"models/LODADetector_OutlierDetectionPython/","page":"LODADetector","title":"LODADetector","text":"https://pyod.readthedocs.io/en/latest/pyod.models.html#module-pyod.models.loda","category":"page"},{"location":"models/RandomOversampler_Imbalance/#RandomOversampler_Imbalance","page":"RandomOversampler","title":"RandomOversampler","text":"","category":"section"},{"location":"models/RandomOversampler_Imbalance/","page":"RandomOversampler","title":"RandomOversampler","text":"Initiate a random oversampling model with the given hyper-parameters.","category":"page"},{"location":"models/RandomOversampler_Imbalance/","page":"RandomOversampler","title":"RandomOversampler","text":"RandomOversampler","category":"page"},{"location":"models/RandomOversampler_Imbalance/","page":"RandomOversampler","title":"RandomOversampler","text":"A model type for constructing a random oversampler, based on Imbalance.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/RandomOversampler_Imbalance/","page":"RandomOversampler","title":"RandomOversampler","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/RandomOversampler_Imbalance/","page":"RandomOversampler","title":"RandomOversampler","text":"RandomOversampler = @load RandomOversampler pkg=Imbalance","category":"page"},{"location":"models/RandomOversampler_Imbalance/","page":"RandomOversampler","title":"RandomOversampler","text":"Do model = RandomOversampler() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in RandomOversampler(ratios=...).","category":"page"},{"location":"models/RandomOversampler_Imbalance/","page":"RandomOversampler","title":"RandomOversampler","text":"RandomOversampler implements naive oversampling by repeating existing observations with replacement.","category":"page"},{"location":"models/RandomOversampler_Imbalance/#Training-data","page":"RandomOversampler","title":"Training data","text":"","category":"section"},{"location":"models/RandomOversampler_Imbalance/","page":"RandomOversampler","title":"RandomOversampler","text":"In MLJ or MLJBase, wrap the model in a machine by mach = machine(model)","category":"page"},{"location":"models/RandomOversampler_Imbalance/","page":"RandomOversampler","title":"RandomOversampler","text":"There is no need to provide any data here because the model is a static transformer.","category":"page"},{"location":"models/RandomOversampler_Imbalance/","page":"RandomOversampler","title":"RandomOversampler","text":"Likewise, there is no need to fit!(mach). ","category":"page"},{"location":"models/RandomOversampler_Imbalance/","page":"RandomOversampler","title":"RandomOversampler","text":"For default values of the hyper-parameters, model can be constructed by model = RandomOverSampler()","category":"page"},{"location":"models/RandomOversampler_Imbalance/#Hyperparameters","page":"RandomOversampler","title":"Hyperparameters","text":"","category":"section"},{"location":"models/RandomOversampler_Imbalance/","page":"RandomOversampler","title":"RandomOversampler","text":"ratios=1.0: A parameter that controls the amount of oversampling to be done for each class\nCan be a float and in this case each class will be oversampled to the size of the majority class times the float. By default, all classes are oversampled to the size of the majority class\nCan be a dictionary mapping each class label to the float ratio for that class\nrng::Union{AbstractRNG, Integer}=default_rng(): Either an AbstractRNG object or an Integer seed to be used with Xoshiro if the Julia VERSION supports it. Otherwise, uses MersenneTwister`.","category":"page"},{"location":"models/RandomOversampler_Imbalance/#Transform-Inputs","page":"RandomOversampler","title":"Transform Inputs","text":"","category":"section"},{"location":"models/RandomOversampler_Imbalance/","page":"RandomOversampler","title":"RandomOversampler","text":"X: A matrix of real numbers or a table with element scitypes that subtype Union{Finite, Infinite}. Elements in nominal columns should subtype Finite (i.e., have scitype OrderedFactor or Multiclass) and elements in continuous columns should subtype Infinite (i.e., have scitype Count or Continuous).\ny: An abstract vector of labels (e.g., strings) that correspond to the observations in X","category":"page"},{"location":"models/RandomOversampler_Imbalance/#Transform-Outputs","page":"RandomOversampler","title":"Transform Outputs","text":"","category":"section"},{"location":"models/RandomOversampler_Imbalance/","page":"RandomOversampler","title":"RandomOversampler","text":"Xover: A matrix or table that includes original data and the new observations due to oversampling. depending on whether the input X is a matrix or table respectively\nyover: An abstract vector of labels corresponding to Xover","category":"page"},{"location":"models/RandomOversampler_Imbalance/#Operations","page":"RandomOversampler","title":"Operations","text":"","category":"section"},{"location":"models/RandomOversampler_Imbalance/","page":"RandomOversampler","title":"RandomOversampler","text":"transform(mach, X, y): resample the data X and y using RandomOversampler, returning both the new and original observations","category":"page"},{"location":"models/RandomOversampler_Imbalance/#Example","page":"RandomOversampler","title":"Example","text":"","category":"section"},{"location":"models/RandomOversampler_Imbalance/","page":"RandomOversampler","title":"RandomOversampler","text":"using MLJ\nimport Imbalance\n\n## set probability of each class\nclass_probs = [0.5, 0.2, 0.3] \nnum_rows, num_continuous_feats = 100, 5\n## generate a table and categorical vector accordingly\nX, y = Imbalance.generate_imbalanced_data(num_rows, num_continuous_feats; \n class_probs, rng=42) \n\njulia> Imbalance.checkbalance(y)\n1: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 19 (39.6%) \n2: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 33 (68.8%) \n0: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 48 (100.0%) \n\n## load RandomOversampler\nRandomOversampler = @load RandomOversampler pkg=Imbalance\n\n## wrap the model in a machine\noversampler = RandomOversampler(ratios=Dict(0=>1.0, 1=> 0.9, 2=>0.8), rng=42)\nmach = machine(oversampler)\n\n## provide the data to transform (there is nothing to fit)\nXover, yover = transform(mach, X, y)\n\njulia> Imbalance.checkbalance(yover)\n2: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 38 (79.2%) \n1: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 43 (89.6%) \n0: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 48 (100.0%) ","category":"page"},{"location":"models/DummyRegressor_MLJScikitLearnInterface/#DummyRegressor_MLJScikitLearnInterface","page":"DummyRegressor","title":"DummyRegressor","text":"","category":"section"},{"location":"models/DummyRegressor_MLJScikitLearnInterface/","page":"DummyRegressor","title":"DummyRegressor","text":"DummyRegressor","category":"page"},{"location":"models/DummyRegressor_MLJScikitLearnInterface/","page":"DummyRegressor","title":"DummyRegressor","text":"A model type for constructing a dummy regressor, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/DummyRegressor_MLJScikitLearnInterface/","page":"DummyRegressor","title":"DummyRegressor","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/DummyRegressor_MLJScikitLearnInterface/","page":"DummyRegressor","title":"DummyRegressor","text":"DummyRegressor = @load DummyRegressor pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/DummyRegressor_MLJScikitLearnInterface/","page":"DummyRegressor","title":"DummyRegressor","text":"Do model = DummyRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in DummyRegressor(strategy=...).","category":"page"},{"location":"models/DummyRegressor_MLJScikitLearnInterface/","page":"DummyRegressor","title":"DummyRegressor","text":"DummyRegressor is a regressor that makes predictions using simple rules.","category":"page"},{"location":"models/PegasosClassifier_BetaML/#PegasosClassifier_BetaML","page":"PegasosClassifier","title":"PegasosClassifier","text":"","category":"section"},{"location":"models/PegasosClassifier_BetaML/","page":"PegasosClassifier","title":"PegasosClassifier","text":"mutable struct PegasosClassifier <: MLJModelInterface.Probabilistic","category":"page"},{"location":"models/PegasosClassifier_BetaML/","page":"PegasosClassifier","title":"PegasosClassifier","text":"The gradient-based linear \"pegasos\" classifier using one-vs-all for multiclass, from the Beta Machine Learning Toolkit (BetaML).","category":"page"},{"location":"models/PegasosClassifier_BetaML/#Hyperparameters:","page":"PegasosClassifier","title":"Hyperparameters:","text":"","category":"section"},{"location":"models/PegasosClassifier_BetaML/","page":"PegasosClassifier","title":"PegasosClassifier","text":"initial_coefficients::Union{Nothing, Matrix{Float64}}: N-classes by D-dimensions matrix of initial linear coefficients [def: nothing, i.e. zeros]\ninitial_constant::Union{Nothing, Vector{Float64}}: N-classes vector of initial contant terms [def: nothing, i.e. zeros]\nlearning_rate::Function: Learning rate [def: (epoch -> 1/sqrt(epoch))]\nlearning_rate_multiplicative::Float64: Multiplicative term of the learning rate [def: 0.5]\nepochs::Int64: Maximum number of epochs, i.e. passages trough the whole training sample [def: 1000]\nshuffle::Bool: Whether to randomly shuffle the data at each iteration (epoch) [def: true]\nforce_origin::Bool: Whether to force the parameter associated with the constant term to remain zero [def: false]\nreturn_mean_hyperplane::Bool: Whether to return the average hyperplane coefficients instead of the final ones [def: false]\nrng::Random.AbstractRNG: A Random Number Generator to be used in stochastic parts of the code [deafult: Random.GLOBAL_RNG]","category":"page"},{"location":"models/PegasosClassifier_BetaML/#Example:","page":"PegasosClassifier","title":"Example:","text":"","category":"section"},{"location":"models/PegasosClassifier_BetaML/","page":"PegasosClassifier","title":"PegasosClassifier","text":"julia> using MLJ\n\njulia> X, y = @load_iris;\n\njulia> modelType = @load PegasosClassifier pkg = \"BetaML\" verbosity=0\nBetaML.Perceptron.PegasosClassifier\n\njulia> model = modelType()\nPegasosClassifier(\n initial_coefficients = nothing, \n initial_constant = nothing, \n learning_rate = BetaML.Perceptron.var\"#71#73\"(), \n learning_rate_multiplicative = 0.5, \n epochs = 1000, \n shuffle = true, \n force_origin = false, \n return_mean_hyperplane = false, \n rng = Random._GLOBAL_RNG())\n\njulia> mach = machine(model, X, y);\n\njulia> fit!(mach);\n\njulia> est_classes = predict(mach, X)\n150-element CategoricalDistributions.UnivariateFiniteVector{Multiclass{3}, String, UInt8, Float64}:\n UnivariateFinite{Multiclass{3}}(setosa=>0.817, versicolor=>0.153, virginica=>0.0301)\n UnivariateFinite{Multiclass{3}}(setosa=>0.791, versicolor=>0.177, virginica=>0.0318)\n ⋮\n UnivariateFinite{Multiclass{3}}(setosa=>0.254, versicolor=>0.5, virginica=>0.246)\n UnivariateFinite{Multiclass{3}}(setosa=>0.283, versicolor=>0.51, virginica=>0.207)","category":"page"},{"location":"models/TheilSenRegressor_MLJScikitLearnInterface/#TheilSenRegressor_MLJScikitLearnInterface","page":"TheilSenRegressor","title":"TheilSenRegressor","text":"","category":"section"},{"location":"models/TheilSenRegressor_MLJScikitLearnInterface/","page":"TheilSenRegressor","title":"TheilSenRegressor","text":"TheilSenRegressor","category":"page"},{"location":"models/TheilSenRegressor_MLJScikitLearnInterface/","page":"TheilSenRegressor","title":"TheilSenRegressor","text":"A model type for constructing a Theil-Sen regressor, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/TheilSenRegressor_MLJScikitLearnInterface/","page":"TheilSenRegressor","title":"TheilSenRegressor","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/TheilSenRegressor_MLJScikitLearnInterface/","page":"TheilSenRegressor","title":"TheilSenRegressor","text":"TheilSenRegressor = @load TheilSenRegressor pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/TheilSenRegressor_MLJScikitLearnInterface/","page":"TheilSenRegressor","title":"TheilSenRegressor","text":"Do model = TheilSenRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in TheilSenRegressor(fit_intercept=...).","category":"page"},{"location":"models/TheilSenRegressor_MLJScikitLearnInterface/#Hyper-parameters","page":"TheilSenRegressor","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/TheilSenRegressor_MLJScikitLearnInterface/","page":"TheilSenRegressor","title":"TheilSenRegressor","text":"fit_intercept = true\ncopy_X = true\nmax_subpopulation = 10000\nn_subsamples = nothing\nmax_iter = 300\ntol = 0.001\nrandom_state = nothing\nn_jobs = nothing\nverbose = false","category":"page"},{"location":"models/MultiTaskLassoCVRegressor_MLJScikitLearnInterface/#MultiTaskLassoCVRegressor_MLJScikitLearnInterface","page":"MultiTaskLassoCVRegressor","title":"MultiTaskLassoCVRegressor","text":"","category":"section"},{"location":"models/MultiTaskLassoCVRegressor_MLJScikitLearnInterface/","page":"MultiTaskLassoCVRegressor","title":"MultiTaskLassoCVRegressor","text":"MultiTaskLassoCVRegressor","category":"page"},{"location":"models/MultiTaskLassoCVRegressor_MLJScikitLearnInterface/","page":"MultiTaskLassoCVRegressor","title":"MultiTaskLassoCVRegressor","text":"A model type for constructing a multi-target lasso regressor with built-in cross-validation, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/MultiTaskLassoCVRegressor_MLJScikitLearnInterface/","page":"MultiTaskLassoCVRegressor","title":"MultiTaskLassoCVRegressor","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/MultiTaskLassoCVRegressor_MLJScikitLearnInterface/","page":"MultiTaskLassoCVRegressor","title":"MultiTaskLassoCVRegressor","text":"MultiTaskLassoCVRegressor = @load MultiTaskLassoCVRegressor pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/MultiTaskLassoCVRegressor_MLJScikitLearnInterface/","page":"MultiTaskLassoCVRegressor","title":"MultiTaskLassoCVRegressor","text":"Do model = MultiTaskLassoCVRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in MultiTaskLassoCVRegressor(eps=...).","category":"page"},{"location":"models/MultiTaskLassoCVRegressor_MLJScikitLearnInterface/#Hyper-parameters","page":"MultiTaskLassoCVRegressor","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/MultiTaskLassoCVRegressor_MLJScikitLearnInterface/","page":"MultiTaskLassoCVRegressor","title":"MultiTaskLassoCVRegressor","text":"eps = 0.001\nn_alphas = 100\nalphas = nothing\nfit_intercept = true\nmax_iter = 300\ntol = 0.0001\ncopy_X = true\ncv = 5\nverbose = false\nn_jobs = 1\nrandom_state = nothing\nselection = cyclic","category":"page"},{"location":"evaluating_model_performance/#Evaluating-Model-Performance","page":"Evaluating Model Performance","title":"Evaluating Model Performance","text":"","category":"section"},{"location":"evaluating_model_performance/","page":"Evaluating Model Performance","title":"Evaluating Model Performance","text":"MLJ allows quick evaluation of a supervised model's performance against a battery of selected losses or scores. For more on available performance measures, see Performance Measures.","category":"page"},{"location":"evaluating_model_performance/","page":"Evaluating Model Performance","title":"Evaluating Model Performance","text":"In addition to hold-out and cross-validation, the user can specify an explicit list of train/test pairs of row indices for resampling, or define new resampling strategies.","category":"page"},{"location":"evaluating_model_performance/","page":"Evaluating Model Performance","title":"Evaluating Model Performance","text":"For simultaneously evaluating multiple models, see Comparing models of different type and nested cross-validation.","category":"page"},{"location":"evaluating_model_performance/","page":"Evaluating Model Performance","title":"Evaluating Model Performance","text":"For externally logging the outcomes of performance evaluation experiments, see Logging Workflows","category":"page"},{"location":"evaluating_model_performance/#Evaluating-against-a-single-measure","page":"Evaluating Model Performance","title":"Evaluating against a single measure","text":"","category":"section"},{"location":"evaluating_model_performance/","page":"Evaluating Model Performance","title":"Evaluating Model Performance","text":"using MLJ\nMLJ.color_off()","category":"page"},{"location":"evaluating_model_performance/","page":"Evaluating Model Performance","title":"Evaluating Model Performance","text":"using MLJ\nX = (a=rand(12), b=rand(12), c=rand(12));\ny = X.a + 2X.b + 0.05*rand(12);\nmodel = (@load RidgeRegressor pkg=MultivariateStats verbosity=0)()\ncv=CV(nfolds=3)\nevaluate(model, X, y, resampling=cv, measure=l2, verbosity=0)","category":"page"},{"location":"evaluating_model_performance/","page":"Evaluating Model Performance","title":"Evaluating Model Performance","text":"Alternatively, instead of applying evaluate to a model + data, one may call evaluate! on an existing machine wrapping the model in data:","category":"page"},{"location":"evaluating_model_performance/","page":"Evaluating Model Performance","title":"Evaluating Model Performance","text":"mach = machine(model, X, y)\nevaluate!(mach, resampling=cv, measure=l2, verbosity=0)","category":"page"},{"location":"evaluating_model_performance/","page":"Evaluating Model Performance","title":"Evaluating Model Performance","text":"(The latter call is a mutating call as the learned parameters stored in the machine potentially change. )","category":"page"},{"location":"evaluating_model_performance/#Multiple-measures","page":"Evaluating Model Performance","title":"Multiple measures","text":"","category":"section"},{"location":"evaluating_model_performance/","page":"Evaluating Model Performance","title":"Evaluating Model Performance","text":"Multiple measures are specified as a vector:","category":"page"},{"location":"evaluating_model_performance/","page":"Evaluating Model Performance","title":"Evaluating Model Performance","text":"evaluate!(\n mach,\n resampling=cv,\n measures=[l1, rms, rmslp1], \n\tverbosity=0,\n)","category":"page"},{"location":"evaluating_model_performance/","page":"Evaluating Model Performance","title":"Evaluating Model Performance","text":"Custom measures can also be provided.","category":"page"},{"location":"evaluating_model_performance/#Specifying-weights","page":"Evaluating Model Performance","title":"Specifying weights","text":"","category":"section"},{"location":"evaluating_model_performance/","page":"Evaluating Model Performance","title":"Evaluating Model Performance","text":"Per-observation weights can be passed to measures. If a measure does not support weights, the weights are ignored:","category":"page"},{"location":"evaluating_model_performance/","page":"Evaluating Model Performance","title":"Evaluating Model Performance","text":"holdout = Holdout(fraction_train=0.8)\nweights = [1, 1, 2, 1, 1, 2, 3, 1, 1, 2, 3, 1];\nevaluate!(\n mach,\n resampling=CV(nfolds=3),\n measure=[l2, rsquared],\n weights=weights, \n)","category":"page"},{"location":"evaluating_model_performance/","page":"Evaluating Model Performance","title":"Evaluating Model Performance","text":"In classification problems, use class_weights=... to specify a class weight dictionary.","category":"page"},{"location":"evaluating_model_performance/","page":"Evaluating Model Performance","title":"Evaluating Model Performance","text":"MLJBase.evaluate!\nMLJBase.evaluate\nMLJBase.PerformanceEvaluation","category":"page"},{"location":"evaluating_model_performance/#MLJBase.evaluate!","page":"Evaluating Model Performance","title":"MLJBase.evaluate!","text":"evaluate!(mach; resampling=CV(), measure=nothing, options...)\n\nEstimate the performance of a machine mach wrapping a supervised model in data, using the specified resampling strategy (defaulting to 6-fold cross-validation) and measure, which can be a single measure or vector. Returns a PerformanceEvaluation object.\n\nAvailable resampling strategies are CV, Holdout, StratifiedCV and TimeSeriesCV. If resampling is not an instance of one of these, then a vector of tuples of the form (train_rows, test_rows) is expected. For example, setting\n\nresampling = [((1:100), (101:200)),\n ((101:200), (1:100))]\n\ngives two-fold cross-validation using the first 200 rows of data.\n\nAny measure conforming to the StatisticalMeasuresBase.jl API can be provided, assuming it can consume multiple observations.\n\nAlthough evaluate! is mutating, mach.model and mach.args are not mutated.\n\nAdditional keyword options\n\nrows - vector of observation indices from which both train and test folds are constructed (default is all observations)\noperation/operations=nothing - One of predict, predict_mean, predict_mode, predict_median, or predict_joint, or a vector of these of the same length as measure/measures. Automatically inferred if left unspecified. For example, predict_mode will be used for a Multiclass target, if model is a probabilistic predictor, but measure is expects literal (point) target predictions. Operations actually applied can be inspected from the operation field of the object returned.\nweights - per-sample Real weights for measures that support them (not to be confused with weights used in training, such as the w in mach = machine(model, X, y, w)).\nclass_weights - dictionary of Real per-class weights for use with measures that support these, in classification problems (not to be confused with weights used in training, such as the w in mach = machine(model, X, y, w)).\nrepeats::Int=1: set to a higher value for repeated (Monte Carlo) resampling. For example, if repeats = 10, then resampling = CV(nfolds=5, shuffle=true), generates a total of 50 (train, test) pairs for evaluation and subsequent aggregation.\nacceleration=CPU1(): acceleration/parallelization option; can be any instance of CPU1, (single-threaded computation), CPUThreads (multi-threaded computation) or CPUProcesses (multi-process computation); default is default_resource(). These types are owned by ComputationalResources.jl.\nforce=false: set to true to force cold-restart of each training event\nverbosity::Int=1 logging level; can be negative\ncheck_measure=true: whether to screen measures for possible incompatibility with the model. Will not catch all incompatibilities.\nper_observation=true: whether to calculate estimates for individual observations; if false the per_observation field of the returned object is populated with missings. Setting to false may reduce compute time and allocations.\nlogger - a logger object (see MLJBase.log_evaluation)\n\nSee also evaluate, PerformanceEvaluation\n\n\n\n\n\n","category":"function"},{"location":"evaluating_model_performance/#MLJModelInterface.evaluate","page":"Evaluating Model Performance","title":"MLJModelInterface.evaluate","text":"some meta-models may choose to implement the evaluate operations\n\n\n\n\n\n","category":"function"},{"location":"evaluating_model_performance/#MLJBase.PerformanceEvaluation","page":"Evaluating Model Performance","title":"MLJBase.PerformanceEvaluation","text":"PerformanceEvaluation\n\nType of object returned by evaluate (for models plus data) or evaluate! (for machines). Such objects encode estimates of the performance (generalization error) of a supervised model or outlier detection model.\n\nWhen evaluate/evaluate! is called, a number of train/test pairs (\"folds\") of row indices are generated, according to the options provided, which are discussed in the evaluate! doc-string. Rows correspond to observations. The generated train/test pairs are recorded in the train_test_rows field of the PerformanceEvaluation struct, and the corresponding estimates, aggregated over all train/test pairs, are recorded in measurement, a vector with one entry for each measure (metric) recorded in measure.\n\nWhen displayed, a PerformanceEvalution object includes a value under the heading 1.96*SE, derived from the standard error of the per_fold entries. This value is suitable for constructing a formal 95% confidence interval for the given measurement. Such intervals should be interpreted with caution. See, for example, Bates et al. (2021).\n\nFields\n\nThese fields are part of the public API of the PerformanceEvaluation struct.\n\nmodel: model used to create the performance evaluation. In the case a tuning model, this is the best model found.\nmeasure: vector of measures (metrics) used to evaluate performance\nmeasurement: vector of measurements - one for each element of measure - aggregating the performance measurements over all train/test pairs (folds). The aggregation method applied for a given measure m is StatisticalMeasuresBase.external_aggregation_mode(m) (commonly Mean() or Sum())\noperation (e.g., predict_mode): the operations applied for each measure to generate predictions to be evaluated. Possibilities are: predict, predict_mean, predict_mode, predict_median, or predict_joint.\nper_fold: a vector of vectors of individual test fold evaluations (one vector per measure). Useful for obtaining a rough estimate of the variance of the performance estimate.\nper_observation: a vector of vectors of vectors containing individual per-observation measurements: for an evaluation e, e.per_observation[m][f][i] is the measurement for the ith observation in the fth test fold, evaluated using the mth measure. Useful for some forms of hyper-parameter optimization. Note that an aggregregated measurement for some measure measure is repeated across all observations in a fold if StatisticalMeasures.can_report_unaggregated(measure) == true. If e has been computed with the per_observation=false option, then e_per_observation is a vector of missings.\nfitted_params_per_fold: a vector containing fitted params(mach) for each machine mach trained during resampling - one machine per train/test pair. Use this to extract the learned parameters for each individual training event.\nreport_per_fold: a vector containing report(mach) for each machine mach training in resampling - one machine per train/test pair.\ntrain_test_rows: a vector of tuples, each of the form (train, test), where train and test are vectors of row (observation) indices for training and evaluation respectively.\nresampling: the resampling strategy used to generate the train/test pairs.\nrepeats: the number of times the resampling strategy was repeated.\n\n\n\n\n\n","category":"type"},{"location":"evaluating_model_performance/#User-specified-train/test-sets","page":"Evaluating Model Performance","title":"User-specified train/test sets","text":"","category":"section"},{"location":"evaluating_model_performance/","page":"Evaluating Model Performance","title":"Evaluating Model Performance","text":"Users can either provide an explicit list of train/test pairs of row indices for resampling, as in this example:","category":"page"},{"location":"evaluating_model_performance/","page":"Evaluating Model Performance","title":"Evaluating Model Performance","text":"fold1 = 1:6; fold2 = 7:12;\nevaluate!(\n mach,\n resampling = [(fold1, fold2), (fold2, fold1)],\n measures=[l1, l2], \n\tverbosity=0,\n)","category":"page"},{"location":"evaluating_model_performance/","page":"Evaluating Model Performance","title":"Evaluating Model Performance","text":"Or the user can define their own re-usable ResamplingStrategy objects, - see Custom resampling strategies below.","category":"page"},{"location":"evaluating_model_performance/#Built-in-resampling-strategies","page":"Evaluating Model Performance","title":"Built-in resampling strategies","text":"","category":"section"},{"location":"evaluating_model_performance/","page":"Evaluating Model Performance","title":"Evaluating Model Performance","text":"MLJBase.Holdout","category":"page"},{"location":"evaluating_model_performance/#MLJBase.Holdout","page":"Evaluating Model Performance","title":"MLJBase.Holdout","text":"holdout = Holdout(; fraction_train=0.7,\n shuffle=nothing,\n rng=nothing)\n\nHoldout resampling strategy, for use in evaluate!, evaluate and in tuning.\n\ntrain_test_pairs(holdout, rows)\n\nReturns the pair [(train, test)], where train and test are vectors such that rows=vcat(train, test) and length(train)/length(rows) is approximatey equal to fraction_train`.\n\nPre-shuffling of rows is controlled by rng and shuffle. If rng is an integer, then the Holdout keyword constructor resets it to MersenneTwister(rng). Otherwise some AbstractRNG object is expected.\n\nIf rng is left unspecified, rng is reset to Random.GLOBAL_RNG, in which case rows are only pre-shuffled if shuffle=true is specified.\n\n\n\n\n\n","category":"type"},{"location":"evaluating_model_performance/","page":"Evaluating Model Performance","title":"Evaluating Model Performance","text":"MLJBase.CV","category":"page"},{"location":"evaluating_model_performance/#MLJBase.CV","page":"Evaluating Model Performance","title":"MLJBase.CV","text":"cv = CV(; nfolds=6, shuffle=nothing, rng=nothing)\n\nCross-validation resampling strategy, for use in evaluate!, evaluate and tuning.\n\ntrain_test_pairs(cv, rows)\n\nReturns an nfolds-length iterator of (train, test) pairs of vectors (row indices), where each train and test is a sub-vector of rows. The test vectors are mutually exclusive and exhaust rows. Each train vector is the complement of the corresponding test vector. With no row pre-shuffling, the order of rows is preserved, in the sense that rows coincides precisely with the concatenation of the test vectors, in the order they are generated. The first r test vectors have length n + 1, where n, r = divrem(length(rows), nfolds), and the remaining test vectors have length n.\n\nPre-shuffling of rows is controlled by rng and shuffle. If rng is an integer, then the CV keyword constructor resets it to MersenneTwister(rng). Otherwise some AbstractRNG object is expected.\n\nIf rng is left unspecified, rng is reset to Random.GLOBAL_RNG, in which case rows are only pre-shuffled if shuffle=true is explicitly specified.\n\n\n\n\n\n","category":"type"},{"location":"evaluating_model_performance/","page":"Evaluating Model Performance","title":"Evaluating Model Performance","text":"MLJBase.StratifiedCV","category":"page"},{"location":"evaluating_model_performance/#MLJBase.StratifiedCV","page":"Evaluating Model Performance","title":"MLJBase.StratifiedCV","text":"stratified_cv = StratifiedCV(; nfolds=6,\n shuffle=false,\n rng=Random.GLOBAL_RNG)\n\nStratified cross-validation resampling strategy, for use in evaluate!, evaluate and in tuning. Applies only to classification problems (OrderedFactor or Multiclass targets).\n\ntrain_test_pairs(stratified_cv, rows, y)\n\nReturns an nfolds-length iterator of (train, test) pairs of vectors (row indices) where each train and test is a sub-vector of rows. The test vectors are mutually exclusive and exhaust rows. Each train vector is the complement of the corresponding test vector.\n\nUnlike regular cross-validation, the distribution of the levels of the target y corresponding to each train and test is constrained, as far as possible, to replicate that of y[rows] as a whole.\n\nThe stratified train_test_pairs algorithm is invariant to label renaming. For example, if you run replace!(y, 'a' => 'b', 'b' => 'a') and then re-run train_test_pairs, the returned (train, test) pairs will be the same.\n\nPre-shuffling of rows is controlled by rng and shuffle. If rng is an integer, then the StratifedCV keywod constructor resets it to MersenneTwister(rng). Otherwise some AbstractRNG object is expected.\n\nIf rng is left unspecified, rng is reset to Random.GLOBAL_RNG, in which case rows are only pre-shuffled if shuffle=true is explicitly specified.\n\n\n\n\n\n","category":"type"},{"location":"evaluating_model_performance/","page":"Evaluating Model Performance","title":"Evaluating Model Performance","text":"MLJBase.TimeSeriesCV","category":"page"},{"location":"evaluating_model_performance/#MLJBase.TimeSeriesCV","page":"Evaluating Model Performance","title":"MLJBase.TimeSeriesCV","text":"tscv = TimeSeriesCV(; nfolds=4)\n\nCross-validation resampling strategy, for use in evaluate!, evaluate and tuning, when observations are chronological and not expected to be independent.\n\ntrain_test_pairs(tscv, rows)\n\nReturns an nfolds-length iterator of (train, test) pairs of vectors (row indices), where each train and test is a sub-vector of rows. The rows are partitioned sequentially into nfolds + 1 approximately equal length partitions, where the first partition is the first train set, and the second partition is the first test set. The second train set consists of the first two partitions, and the second test set consists of the third partition, and so on for each fold.\n\nThe first partition (which is the first train set) has length n + r, where n, r = divrem(length(rows), nfolds + 1), and the remaining partitions (all of the test folds) have length n.\n\nExamples\n\njulia> MLJBase.train_test_pairs(TimeSeriesCV(nfolds=3), 1:10)\n3-element Vector{Tuple{UnitRange{Int64}, UnitRange{Int64}}}:\n (1:4, 5:6)\n (1:6, 7:8)\n (1:8, 9:10)\n\njulia> model = (@load RidgeRegressor pkg=MultivariateStats verbosity=0)();\n\njulia> data = @load_sunspots;\n\njulia> X = (lag1 = data.sunspot_number[2:end-1],\n lag2 = data.sunspot_number[1:end-2]);\n\njulia> y = data.sunspot_number[3:end];\n\njulia> tscv = TimeSeriesCV(nfolds=3);\n\njulia> evaluate(model, X, y, resampling=tscv, measure=rmse, verbosity=0)\n┌───────────────────────────┬───────────────┬────────────────────┐\n│ _.measure │ _.measurement │ _.per_fold │\n├───────────────────────────┼───────────────┼────────────────────┤\n│ RootMeanSquaredError @753 │ 21.7 │ [25.4, 16.3, 22.4] │\n└───────────────────────────┴───────────────┴────────────────────┘\n_.per_observation = [missing]\n_.fitted_params_per_fold = [ … ]\n_.report_per_fold = [ … ]\n_.train_test_rows = [ … ]\n\n\n\n\n\n","category":"type"},{"location":"evaluating_model_performance/#Custom-resampling-strategies","page":"Evaluating Model Performance","title":"Custom resampling strategies","text":"","category":"section"},{"location":"evaluating_model_performance/","page":"Evaluating Model Performance","title":"Evaluating Model Performance","text":"To define a new resampling strategy, make relevant parameters of your strategy the fields of a new type MyResamplingStrategy <: MLJ.ResamplingStrategy, and implement one of the following methods:","category":"page"},{"location":"evaluating_model_performance/","page":"Evaluating Model Performance","title":"Evaluating Model Performance","text":"MLJ.train_test_pairs(my_strategy::MyResamplingStrategy, rows)\nMLJ.train_test_pairs(my_strategy::MyResamplingStrategy, rows, y)\nMLJ.train_test_pairs(my_strategy::MyResamplingStrategy, rows, X, y)","category":"page"},{"location":"evaluating_model_performance/","page":"Evaluating Model Performance","title":"Evaluating Model Performance","text":"Each method takes a vector of indices rows and returns a vector [(t1, e1), (t2, e2), ... (tk, ek)] of train/test pairs of row indices selected from rows. Here X, y are the input and target data (ignored in simple strategies, such as Holdout and CV).","category":"page"},{"location":"evaluating_model_performance/","page":"Evaluating Model Performance","title":"Evaluating Model Performance","text":"Here is the code for the Holdout strategy as an example:","category":"page"},{"location":"evaluating_model_performance/","page":"Evaluating Model Performance","title":"Evaluating Model Performance","text":"struct Holdout <: ResamplingStrategy\n fraction_train::Float64\n shuffle::Bool\n rng::Union{Int,AbstractRNG}\n\n function Holdout(fraction_train, shuffle, rng)\n 0 < fraction_train < 1 ||\n error(\"`fraction_train` must be between 0 and 1.\")\n return new(fraction_train, shuffle, rng)\n end\nend\n\n# Keyword Constructor\nfunction Holdout(; fraction_train::Float64=0.7, shuffle=nothing, rng=nothing)\n if rng isa Integer\n rng = MersenneTwister(rng)\n end\n if shuffle === nothing\n shuffle = ifelse(rng===nothing, false, true)\n end\n if rng === nothing\n rng = Random.GLOBAL_RNG\n end\n return Holdout(fraction_train, shuffle, rng)\nend\n\nfunction train_test_pairs(holdout::Holdout, rows)\n train, test = partition(rows, holdout.fraction_train,\n shuffle=holdout.shuffle, rng=holdout.rng)\n return [(train, test),]\nend","category":"page"},{"location":"common_mlj_workflows/#Common-MLJ-Workflows","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"","category":"section"},{"location":"common_mlj_workflows/#Data-ingestion","page":"Common MLJ Workflows","title":"Data ingestion","text":"","category":"section"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"# to avoid RDatasets as a doc dependency:\nusing MLJ; color_off()\nimport DataFrames\nchanning = (Sex = rand([\"Male\",\"Female\"], 462),\n Entry = rand(Int, 462),\n Exit = rand(Int, 462),\n Time = rand(Int, 462),\n Cens = rand(Int, 462)) |> DataFrames.DataFrame\ncoerce!(channing, :Sex => Multiclass)","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"import RDatasets\nchanning = RDatasets.dataset(\"boot\", \"channing\")\n\njulia> first(channing, 4)\n4×5 DataFrame\n Row │ Sex Entry Exit Time Cens\n │ Cat… Int32 Int32 Int32 Int32\n─────┼──────────────────────────────────\n 1 │ Male 782 909 127 1\n 2 │ Male 1020 1128 108 1\n 3 │ Male 856 969 113 1\n 4 │ Male 915 957 42 1","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"Inspecting metadata, including column scientific types:","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"schema(channing)","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"Horizontally splitting data and shuffling rows.","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"Here y is the :Exit column and X everything else:","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"y, X = unpack(channing, ==(:Exit), rng=123);\nnothing # hide","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"Here y is the :Exit column and X everything else except :Time:","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"y, X = unpack(channing,\n ==(:Exit),\n !=(:Time);\n rng=123);\nscitype(y)","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"schema(X)","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"Fixing wrong scientific types in X:","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"X = coerce(X, :Exit=>Continuous, :Entry=>Continuous, :Cens=>Multiclass)\nschema(X)","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"Loading a built-in supervised dataset:","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"table = load_iris();\nschema(table)","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"Loading a built-in data set already split into X and y:","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"X, y = @load_iris;\nselectrows(X, 1:4) # selectrows works whenever `Tables.istable(X)==true`.","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"y[1:4]","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"Splitting data vertically after row shuffling:","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"channing_train, channing_test = partition(channing, 0.6, rng=123);\nnothing # hide","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"Or, if already horizontally split:","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"(Xtrain, Xtest), (ytrain, ytest) = partition((X, y), 0.6, multi=true, rng=123)","category":"page"},{"location":"common_mlj_workflows/#Model-Search","page":"Common MLJ Workflows","title":"Model Search","text":"","category":"section"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"Reference: Model Search","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"Searching for a supervised model:","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"X, y = @load_boston\nms = models(matching(X, y))","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"ms[6]","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"models(\"Tree\");","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"A more refined search:","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"models() do model\n matching(model, X, y) &&\n model.prediction_type == :deterministic &&\n model.is_pure_julia\nend;\nnothing # hide","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"Searching for an unsupervised model:","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"models(matching(X))","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"Getting the metadata entry for a given model type:","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"info(\"PCA\")\ninfo(\"RidgeRegressor\", pkg=\"MultivariateStats\") # a model type in multiple packages","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"Extracting the model document string (output omitted):","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"doc(\"DecisionTreeClassifier\", pkg=\"DecisionTree\")\nnothing # hide","category":"page"},{"location":"common_mlj_workflows/#Instantiating-a-model","page":"Common MLJ Workflows","title":"Instantiating a model","text":"","category":"section"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"Reference: Getting Started, Loading Model Code","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"Tree = @load DecisionTreeClassifier pkg=DecisionTree\ntree = Tree(min_samples_split=5, max_depth=4)","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"or","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"tree = (@load DecisionTreeClassifier)()\ntree.min_samples_split = 5\ntree.max_depth = 4","category":"page"},{"location":"common_mlj_workflows/#Evaluating-a-model","page":"Common MLJ Workflows","title":"Evaluating a model","text":"","category":"section"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"Reference: Evaluating Model Performance","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"X, y = @load_boston\nKNN = @load KNNRegressor\nknn = KNN()\nevaluate(knn, X, y,\n resampling=CV(nfolds=5),\n measure=[RootMeanSquaredError(), LPLoss(1)])","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"Note RootMeanSquaredError() has alias rms and LPLoss(1) has aliases l1, mae.","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"Do measures() to list all losses and scores and their aliases.","category":"page"},{"location":"common_mlj_workflows/#Basic-fit/evaluate/predict-by-hand:","page":"Common MLJ Workflows","title":"Basic fit/evaluate/predict by hand:","text":"","category":"section"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"Reference: Getting Started, Machines, Evaluating Model Performance, Performance Measures","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"crabs = load_crabs() |> DataFrames.DataFrame\nschema(crabs)","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"y, X = unpack(crabs, ==(:sp), !in([:index, :sex]); rng=123)\n\n\nTree = @load DecisionTreeClassifier pkg=DecisionTree\ntree = Tree(max_depth=2) # hide","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"Bind the model and data together in a machine, which will additionally, store the learned parameters (fitresults) when fit:","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"mach = machine(tree, X, y)","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"Split row indices into training and evaluation rows:","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"train, test = partition(eachindex(y), 0.7); # 70:30 split","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"Fit on the train data set and evaluate on the test data set:","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"fit!(mach, rows=train)\nyhat = predict(mach, X[test,:])\nLogLoss(tol=1e-4)(yhat, y[test])","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"Note LogLoss() has aliases log_loss and cross_entropy.","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"Run measures() to list all losses and scores and their aliases (\"instances\").","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"Predict on the new data set:","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"Xnew = (FL = rand(3), RW = rand(3), CL = rand(3), CW = rand(3), BD =rand(3))\npredict(mach, Xnew) # a vector of distributions","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"predict_mode(mach, Xnew) # a vector of point-predictions","category":"page"},{"location":"common_mlj_workflows/#More-performance-evaluation-examples","page":"Common MLJ Workflows","title":"More performance evaluation examples","text":"","category":"section"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"Evaluating model + data directly:","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"evaluate(tree, X, y,\n resampling=Holdout(fraction_train=0.7, shuffle=true, rng=1234),\n measure=[LogLoss(), Accuracy()])","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"If a machine is already defined, as above:","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"evaluate!(mach,\n resampling=Holdout(fraction_train=0.7, shuffle=true, rng=1234),\n measure=[LogLoss(), Accuracy()])","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"Using cross-validation:","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"evaluate!(mach, resampling=CV(nfolds=5, shuffle=true, rng=1234),\n measure=[LogLoss(), Accuracy()])","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"With user-specified train/test pairs of row indices:","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"f1, f2, f3 = 1:13, 14:26, 27:36\npairs = [(f1, vcat(f2, f3)), (f2, vcat(f3, f1)), (f3, vcat(f1, f2))];\nevaluate!(mach,\n resampling=pairs,\n measure=[LogLoss(), Accuracy()])","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"Changing a hyperparameter and re-evaluating:","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"tree.max_depth = 3\nevaluate!(mach,\n resampling=CV(nfolds=5, shuffle=true, rng=1234),\n measure=[LogLoss(), Accuracy()])","category":"page"},{"location":"common_mlj_workflows/#Inspecting-training-results","page":"Common MLJ Workflows","title":"Inspecting training results","text":"","category":"section"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"Fit an ordinary least square model to some synthetic data:","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"x1 = rand(100)\nx2 = rand(100)\n\nX = (x1=x1, x2=x2)\ny = x1 - 2x2 + 0.1*rand(100);\n\nOLS = @load LinearRegressor pkg=GLM\nols = OLS()\nmach = machine(ols, X, y) |> fit!","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"Get a named tuple representing the learned parameters, human-readable if appropriate:","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"fitted_params(mach)","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"Get other training-related information:","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"report(mach)","category":"page"},{"location":"common_mlj_workflows/#Basic-fit/transform-for-unsupervised-models","page":"Common MLJ Workflows","title":"Basic fit/transform for unsupervised models","text":"","category":"section"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"Load data:","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"X, y = @load_iris\ntrain, test = partition(eachindex(y), 0.97, shuffle=true, rng=123)","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"Instantiate and fit the model/machine:","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"PCA = @load PCA\npca = PCA(maxoutdim=2)\nmach = machine(pca, X)\nfit!(mach, rows=train)","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"Transform selected data bound to the machine:","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"transform(mach, rows=test);","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"Transform new data:","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"Xnew = (sepal_length=rand(3), sepal_width=rand(3),\n petal_length=rand(3), petal_width=rand(3));\ntransform(mach, Xnew)","category":"page"},{"location":"common_mlj_workflows/#Inverting-learned-transformations","page":"Common MLJ Workflows","title":"Inverting learned transformations","text":"","category":"section"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"y = rand(100);\nstand = Standardizer()\nmach = machine(stand, y)\nfit!(mach)\nz = transform(mach, y);\n@assert inverse_transform(mach, z) ≈ y # true","category":"page"},{"location":"common_mlj_workflows/#Nested-hyperparameter-tuning","page":"Common MLJ Workflows","title":"Nested hyperparameter tuning","text":"","category":"section"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"Reference: Tuning Models","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"X, y = @load_iris; nothing # hide","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"Define a model with nested hyperparameters:","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"Tree = @load DecisionTreeClassifier pkg=DecisionTree\ntree = Tree()\nforest = EnsembleModel(model=tree, n=300)","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"Define ranges for hyperparameters to be tuned:","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"r1 = range(forest, :bagging_fraction, lower=0.5, upper=1.0, scale=:log10)","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"r2 = range(forest, :(model.n_subfeatures), lower=1, upper=4) # nested","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"Wrap the model in a tuning strategy:","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"tuned_forest = TunedModel(model=forest,\n tuning=Grid(resolution=12),\n resampling=CV(nfolds=6),\n ranges=[r1, r2],\n measure=BrierLoss())","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"Bound the wrapped model to data:","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"mach = machine(tuned_forest, X, y)","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"Fitting the resultant machine optimizes the hyperparameters specified in range, using the specified tuning and resampling strategies and performance measure (possibly a vector of measures), and retrains on all data bound to the machine:","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"fit!(mach)","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"Inspecting the optimal model:","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"F = fitted_params(mach)","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"F.best_model","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"Inspecting details of tuning procedure:","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"r = report(mach);\nkeys(r)","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"r.history[[1,end]]","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"Visualizing these results:","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"using Plots\nplot(mach)","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"(Image: )","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"Predicting on new data using the optimized model:","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"predict(mach, Xnew)","category":"page"},{"location":"common_mlj_workflows/#Constructing-linear-pipelines","page":"Common MLJ Workflows","title":"Constructing linear pipelines","text":"","category":"section"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"Reference: Composing Models","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"Constructing a linear (unbranching) pipeline with a learned target transformation/inverse transformation:","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"X, y = @load_reduced_ames\nKNN = @load KNNRegressor\nknn_with_target = TransformedTargetModel(model=KNN(K=3), transformer=Standardizer())\npipe = (X -> coerce(X, :age=>Continuous)) |> OneHotEncoder() |> knn_with_target","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"Evaluating the pipeline (just as you would any other model):","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"pipe.one_hot_encoder.drop_last = true # mutate a nested hyper-parameter\nevaluate(pipe, X, y, resampling=Holdout(), measure=RootMeanSquaredError(), verbosity=2)","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"Inspecting the learned parameters in a pipeline:","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"mach = machine(pipe, X, y) |> fit!\nF = fitted_params(mach)\nF.transformed_target_model_deterministic.model","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"Constructing a linear (unbranching) pipeline with a static (unlearned) target transformation/inverse transformation:","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"Tree = @load DecisionTreeRegressor pkg=DecisionTree verbosity=0\ntree_with_target = TransformedTargetModel(model=Tree(),\n transformer=y -> log.(y),\n inverse = z -> exp.(z))\npipe2 = (X -> coerce(X, :age=>Continuous)) |> OneHotEncoder() |> tree_with_target;\nnothing # hide","category":"page"},{"location":"common_mlj_workflows/#Creating-a-homogeneous-ensemble-of-models","page":"Common MLJ Workflows","title":"Creating a homogeneous ensemble of models","text":"","category":"section"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"Reference: Homogeneous Ensembles","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"X, y = @load_iris\nTree = @load DecisionTreeClassifier pkg=DecisionTree\ntree = Tree()\nforest = EnsembleModel(model=tree, bagging_fraction=0.8, n=300)\nmach = machine(forest, X, y)\nevaluate!(mach, measure=LogLoss())","category":"page"},{"location":"common_mlj_workflows/#Performance-curves","page":"Common MLJ Workflows","title":"Performance curves","text":"","category":"section"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"Generate a plot of performance, as a function of some hyperparameter (building on the preceding example)","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"Single performance curve:","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"r = range(forest, :n, lower=1, upper=1000, scale=:log10)\ncurve = learning_curve(mach,\n range=r,\n resampling=Holdout(),\n resolution=50,\n measure=LogLoss(),\n verbosity=0)","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"using Plots\nplot(curve.parameter_values, curve.measurements, xlab=curve.parameter_name, xscale=curve.parameter_scale)","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"(Image: )","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"Multiple curves:","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"curve = learning_curve(mach,\n range=r,\n resampling=Holdout(),\n measure=LogLoss(),\n resolution=50,\n rng_name=:rng,\n rngs=4,\n verbosity=0)","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"plot(curve.parameter_values, curve.measurements,\nxlab=curve.parameter_name, xscale=curve.parameter_scale)","category":"page"},{"location":"common_mlj_workflows/","page":"Common MLJ Workflows","title":"Common MLJ Workflows","text":"(Image: )","category":"page"},{"location":"models/LinearRegressor_GLM/#LinearRegressor_GLM","page":"LinearRegressor","title":"LinearRegressor","text":"","category":"section"},{"location":"models/LinearRegressor_GLM/","page":"LinearRegressor","title":"LinearRegressor","text":"LinearRegressor","category":"page"},{"location":"models/LinearRegressor_GLM/","page":"LinearRegressor","title":"LinearRegressor","text":"A model type for constructing a linear regressor, based on GLM.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/LinearRegressor_GLM/","page":"LinearRegressor","title":"LinearRegressor","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/LinearRegressor_GLM/","page":"LinearRegressor","title":"LinearRegressor","text":"LinearRegressor = @load LinearRegressor pkg=GLM","category":"page"},{"location":"models/LinearRegressor_GLM/","page":"LinearRegressor","title":"LinearRegressor","text":"Do model = LinearRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in LinearRegressor(fit_intercept=...).","category":"page"},{"location":"models/LinearRegressor_GLM/","page":"LinearRegressor","title":"LinearRegressor","text":"LinearRegressor assumes the target is a continuous variable whose conditional distribution is normal with constant variance, and whose expected value is a linear combination of the features (identity link function). Options exist to specify an intercept or offset feature.","category":"page"},{"location":"models/LinearRegressor_GLM/#Training-data","page":"LinearRegressor","title":"Training data","text":"","category":"section"},{"location":"models/LinearRegressor_GLM/","page":"LinearRegressor","title":"LinearRegressor","text":"In MLJ or MLJBase, bind an instance model to data with one of:","category":"page"},{"location":"models/LinearRegressor_GLM/","page":"LinearRegressor","title":"LinearRegressor","text":"mach = machine(model, X, y)\nmach = machine(model, X, y, w)","category":"page"},{"location":"models/LinearRegressor_GLM/","page":"LinearRegressor","title":"LinearRegressor","text":"Here","category":"page"},{"location":"models/LinearRegressor_GLM/","page":"LinearRegressor","title":"LinearRegressor","text":"X: is any table of input features (eg, a DataFrame) whose columns are of scitype Continuous; check the scitype with schema(X)\ny: is the target, which can be any AbstractVector whose element scitype is Continuous; check the scitype with scitype(y)\nw: is a vector of Real per-observation weights","category":"page"},{"location":"models/LinearRegressor_GLM/#Hyper-parameters","page":"LinearRegressor","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/LinearRegressor_GLM/","page":"LinearRegressor","title":"LinearRegressor","text":"fit_intercept=true: Whether to calculate the intercept for this model. If set to false, no intercept will be calculated (e.g. the data is expected to be centered)\ndropcollinear=false: Whether to drop features in the training data to ensure linear independence. If true , only the first of each set of linearly-dependent features is used. The coefficient for redundant linearly dependent features is 0.0 and all associated statistics are set to NaN.\noffsetcol=nothing: Name of the column to be used as an offset, if any. An offset is a variable which is known to have a coefficient of 1.\nreport_keys: Vector of keys for the report. Possible keys are: :deviance, :dof_residual, :stderror, :vcov, :coef_table and :glm_model. By default only :glm_model is excluded.","category":"page"},{"location":"models/LinearRegressor_GLM/","page":"LinearRegressor","title":"LinearRegressor","text":"Train the machine using fit!(mach, rows=...).","category":"page"},{"location":"models/LinearRegressor_GLM/#Operations","page":"LinearRegressor","title":"Operations","text":"","category":"section"},{"location":"models/LinearRegressor_GLM/","page":"LinearRegressor","title":"LinearRegressor","text":"predict(mach, Xnew): return predictions of the target given new features Xnew having the same Scitype as X above. Predictions are probabilistic.\npredict_mean(mach, Xnew): instead return the mean of each prediction above\npredict_median(mach, Xnew): instead return the median of each prediction above.","category":"page"},{"location":"models/LinearRegressor_GLM/#Fitted-parameters","page":"LinearRegressor","title":"Fitted parameters","text":"","category":"section"},{"location":"models/LinearRegressor_GLM/","page":"LinearRegressor","title":"LinearRegressor","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/LinearRegressor_GLM/","page":"LinearRegressor","title":"LinearRegressor","text":"features: The names of the features encountered during model fitting.\ncoef: The linear coefficients determined by the model.\nintercept: The intercept determined by the model.","category":"page"},{"location":"models/LinearRegressor_GLM/#Report","page":"LinearRegressor","title":"Report","text":"","category":"section"},{"location":"models/LinearRegressor_GLM/","page":"LinearRegressor","title":"LinearRegressor","text":"When all keys are enabled in report_keys, the following fields are available in report(mach):","category":"page"},{"location":"models/LinearRegressor_GLM/","page":"LinearRegressor","title":"LinearRegressor","text":"deviance: Measure of deviance of fitted model with respect to a perfectly fitted model. For a linear model, this is the weighted residual sum of squares\ndof_residual: The degrees of freedom for residuals, when meaningful.\nstderror: The standard errors of the coefficients.\nvcov: The estimated variance-covariance matrix of the coefficient estimates.\ncoef_table: Table which displays coefficients and summarizes their significance and confidence intervals.\nglm_model: The raw fitted model returned by GLM.lm. Note this points to training data. Refer to the GLM.jl documentation for usage.","category":"page"},{"location":"models/LinearRegressor_GLM/#Examples","page":"LinearRegressor","title":"Examples","text":"","category":"section"},{"location":"models/LinearRegressor_GLM/","page":"LinearRegressor","title":"LinearRegressor","text":"using MLJ\nLinearRegressor = @load LinearRegressor pkg=GLM\nglm = LinearRegressor()\n\nX, y = make_regression(100, 2) ## synthetic data\nmach = machine(glm, X, y) |> fit!\n\nXnew, _ = make_regression(3, 2)\nyhat = predict(mach, Xnew) ## new predictions\nyhat_point = predict_mean(mach, Xnew) ## new predictions\n\nfitted_params(mach).features\nfitted_params(mach).coef ## x1, x2, intercept\nfitted_params(mach).intercept\n\nreport(mach)","category":"page"},{"location":"models/LinearRegressor_GLM/","page":"LinearRegressor","title":"LinearRegressor","text":"See also LinearCountRegressor, LinearBinaryClassifier","category":"page"},{"location":"models/SelfOrganizingMap_SelfOrganizingMaps/#SelfOrganizingMap_SelfOrganizingMaps","page":"SelfOrganizingMap","title":"SelfOrganizingMap","text":"","category":"section"},{"location":"models/SelfOrganizingMap_SelfOrganizingMaps/","page":"SelfOrganizingMap","title":"SelfOrganizingMap","text":"SelfOrganizingMap","category":"page"},{"location":"models/SelfOrganizingMap_SelfOrganizingMaps/","page":"SelfOrganizingMap","title":"SelfOrganizingMap","text":"A model type for constructing a self organizing map, based on SelfOrganizingMaps.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/SelfOrganizingMap_SelfOrganizingMaps/","page":"SelfOrganizingMap","title":"SelfOrganizingMap","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/SelfOrganizingMap_SelfOrganizingMaps/","page":"SelfOrganizingMap","title":"SelfOrganizingMap","text":"SelfOrganizingMap = @load SelfOrganizingMap pkg=SelfOrganizingMaps","category":"page"},{"location":"models/SelfOrganizingMap_SelfOrganizingMaps/","page":"SelfOrganizingMap","title":"SelfOrganizingMap","text":"Do model = SelfOrganizingMap() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in SelfOrganizingMap(k=...).","category":"page"},{"location":"models/SelfOrganizingMap_SelfOrganizingMaps/","page":"SelfOrganizingMap","title":"SelfOrganizingMap","text":"SelfOrganizingMaps implements Kohonen's Self Organizing Map, Proceedings of the IEEE; Kohonen, T.; (1990):\"The self-organizing map\"","category":"page"},{"location":"models/SelfOrganizingMap_SelfOrganizingMaps/#Training-data","page":"SelfOrganizingMap","title":"Training data","text":"","category":"section"},{"location":"models/SelfOrganizingMap_SelfOrganizingMaps/","page":"SelfOrganizingMap","title":"SelfOrganizingMap","text":"In MLJ or MLJBase, bind an instance model to data with mach = machine(model, X) where","category":"page"},{"location":"models/SelfOrganizingMap_SelfOrganizingMaps/","page":"SelfOrganizingMap","title":"SelfOrganizingMap","text":"X: an AbstractMatrix or Table of input features whose columns are of scitype Continuous.","category":"page"},{"location":"models/SelfOrganizingMap_SelfOrganizingMaps/","page":"SelfOrganizingMap","title":"SelfOrganizingMap","text":"Train the machine with fit!(mach, rows=...).","category":"page"},{"location":"models/SelfOrganizingMap_SelfOrganizingMaps/#Hyper-parameters","page":"SelfOrganizingMap","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/SelfOrganizingMap_SelfOrganizingMaps/","page":"SelfOrganizingMap","title":"SelfOrganizingMap","text":"k=10: Number of nodes along once side of SOM grid. There are k² total nodes.\nη=0.5: Learning rate. Scales adjust made to winning node and its neighbors during each round of training.\nσ²=0.05: The (squared) neighbor radius. Used to determine scale for neighbor node adjustments.\ngrid_type=:rectangular Node grid geometry. One of (:rectangular, :hexagonal, :spherical).\nη_decay=:exponential Learning rate schedule function. One of (:exponential, :asymptotic)\nσ_decay=:exponential Neighbor radius schedule function. One of (:exponential, :asymptotic, :none)\nneighbor_function=:gaussian Kernel function used to make adjustment to neighbor weights. Scale is set by σ². One of (:gaussian, :mexican_hat).\nmatching_distance=euclidean Distance function from Distances.jl used to determine winning node.\nNepochs=1 Number of times to repeat training on the shuffled dataset.","category":"page"},{"location":"models/SelfOrganizingMap_SelfOrganizingMaps/#Operations","page":"SelfOrganizingMap","title":"Operations","text":"","category":"section"},{"location":"models/SelfOrganizingMap_SelfOrganizingMaps/","page":"SelfOrganizingMap","title":"SelfOrganizingMap","text":"transform(mach, Xnew): returns the coordinates of the winning SOM node for each instance of Xnew. For SOM of gridtype :rectangular and :hexagonal, these are cartesian coordinates. For gridtype :spherical, these are the latitude and longitude in radians.","category":"page"},{"location":"models/SelfOrganizingMap_SelfOrganizingMaps/#Fitted-parameters","page":"SelfOrganizingMap","title":"Fitted parameters","text":"","category":"section"},{"location":"models/SelfOrganizingMap_SelfOrganizingMaps/","page":"SelfOrganizingMap","title":"SelfOrganizingMap","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/SelfOrganizingMap_SelfOrganizingMaps/","page":"SelfOrganizingMap","title":"SelfOrganizingMap","text":"coords: The coordinates of each of the SOM nodes (points in the domain of the map) with shape (k², 2)\nweights: Array of weight vectors for the SOM nodes (corresponding points in the map's range) of shape (k², input dimension)","category":"page"},{"location":"models/SelfOrganizingMap_SelfOrganizingMaps/#Report","page":"SelfOrganizingMap","title":"Report","text":"","category":"section"},{"location":"models/SelfOrganizingMap_SelfOrganizingMaps/","page":"SelfOrganizingMap","title":"SelfOrganizingMap","text":"The fields of report(mach) are:","category":"page"},{"location":"models/SelfOrganizingMap_SelfOrganizingMaps/","page":"SelfOrganizingMap","title":"SelfOrganizingMap","text":"classes: the index of the winning node for each instance of the training data X interpreted as a class label","category":"page"},{"location":"models/SelfOrganizingMap_SelfOrganizingMaps/#Examples","page":"SelfOrganizingMap","title":"Examples","text":"","category":"section"},{"location":"models/SelfOrganizingMap_SelfOrganizingMaps/","page":"SelfOrganizingMap","title":"SelfOrganizingMap","text":"using MLJ\nsom = @load SelfOrganizingMap pkg=SelfOrganizingMaps\nmodel = som()\nX, y = make_regression(50, 3) ## synthetic data\nmach = machine(model, X) |> fit!\nX̃ = transform(mach, X)\n\nrpt = report(mach)\nclasses = rpt.classes","category":"page"},{"location":"models/MultinomialClassifier_MLJLinearModels/#MultinomialClassifier_MLJLinearModels","page":"MultinomialClassifier","title":"MultinomialClassifier","text":"","category":"section"},{"location":"models/MultinomialClassifier_MLJLinearModels/","page":"MultinomialClassifier","title":"MultinomialClassifier","text":"MultinomialClassifier","category":"page"},{"location":"models/MultinomialClassifier_MLJLinearModels/","page":"MultinomialClassifier","title":"MultinomialClassifier","text":"A model type for constructing a multinomial classifier, based on MLJLinearModels.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/MultinomialClassifier_MLJLinearModels/","page":"MultinomialClassifier","title":"MultinomialClassifier","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/MultinomialClassifier_MLJLinearModels/","page":"MultinomialClassifier","title":"MultinomialClassifier","text":"MultinomialClassifier = @load MultinomialClassifier pkg=MLJLinearModels","category":"page"},{"location":"models/MultinomialClassifier_MLJLinearModels/","page":"MultinomialClassifier","title":"MultinomialClassifier","text":"Do model = MultinomialClassifier() to construct an instance with default hyper-parameters.","category":"page"},{"location":"models/MultinomialClassifier_MLJLinearModels/","page":"MultinomialClassifier","title":"MultinomialClassifier","text":"This model coincides with LogisticClassifier, except certain optimizations possible in the special binary case will not be applied. Its hyperparameters are identical.","category":"page"},{"location":"models/MultinomialClassifier_MLJLinearModels/#Training-data","page":"MultinomialClassifier","title":"Training data","text":"","category":"section"},{"location":"models/MultinomialClassifier_MLJLinearModels/","page":"MultinomialClassifier","title":"MultinomialClassifier","text":"In MLJ or MLJBase, bind an instance model to data with","category":"page"},{"location":"models/MultinomialClassifier_MLJLinearModels/","page":"MultinomialClassifier","title":"MultinomialClassifier","text":"mach = machine(model, X, y)","category":"page"},{"location":"models/MultinomialClassifier_MLJLinearModels/","page":"MultinomialClassifier","title":"MultinomialClassifier","text":"where:","category":"page"},{"location":"models/MultinomialClassifier_MLJLinearModels/","page":"MultinomialClassifier","title":"MultinomialClassifier","text":"X is any table of input features (eg, a DataFrame) whose columns have Continuous scitype; check column scitypes with schema(X)\ny is the target, which can be any AbstractVector whose element scitype is <:OrderedFactor or <:Multiclass; check the scitype with scitype(y)","category":"page"},{"location":"models/MultinomialClassifier_MLJLinearModels/","page":"MultinomialClassifier","title":"MultinomialClassifier","text":"Train the machine using fit!(mach, rows=...).","category":"page"},{"location":"models/MultinomialClassifier_MLJLinearModels/#Hyperparameters","page":"MultinomialClassifier","title":"Hyperparameters","text":"","category":"section"},{"location":"models/MultinomialClassifier_MLJLinearModels/","page":"MultinomialClassifier","title":"MultinomialClassifier","text":"lambda::Real: strength of the regularizer if penalty is :l2 or :l1. Strength of the L2 regularizer if penalty is :en. Default: eps()\ngamma::Real: strength of the L1 regularizer if penalty is :en. Default: 0.0\npenalty::Union{String, Symbol}: the penalty to use, either :l2, :l1, :en (elastic net) or :none. Default: :l2\nfit_intercept::Bool: whether to fit the intercept or not. Default: true\npenalize_intercept::Bool: whether to penalize the intercept. Default: false\nscale_penalty_with_samples::Bool: whether to scale the penalty with the number of samples. Default: true\nsolver::Union{Nothing, MLJLinearModels.Solver}: some instance of MLJLinearModels.S where S is one of: LBFGS, NewtonCG, ProxGrad; but subject to the following restrictions:\nIf penalty = :l2, ProxGrad is disallowed. Otherwise, ProxGrad is the only option.\nUnless scitype(y) <: Finite{2} (binary target) Newton is disallowed.\nIf solver = nothing (default) then ProxGrad(accel=true) (FISTA) is used, unless gamma = 0, in which case LBFGS() is used.\nSolver aliases: FISTA(; kwargs...) = ProxGrad(accel=true, kwargs...), ISTA(; kwargs...) = ProxGrad(accel=false, kwargs...) Default: nothing","category":"page"},{"location":"models/MultinomialClassifier_MLJLinearModels/#Example","page":"MultinomialClassifier","title":"Example","text":"","category":"section"},{"location":"models/MultinomialClassifier_MLJLinearModels/","page":"MultinomialClassifier","title":"MultinomialClassifier","text":"using MLJ\nX, y = make_blobs(centers = 3)\nmach = fit!(machine(MultinomialClassifier(), X, y))\npredict(mach, X)\nfitted_params(mach)","category":"page"},{"location":"models/MultinomialClassifier_MLJLinearModels/","page":"MultinomialClassifier","title":"MultinomialClassifier","text":"See also LogisticClassifier.","category":"page"},{"location":"models/MultitargetSRRegressor_SymbolicRegression/#MultitargetSRRegressor_SymbolicRegression","page":"MultitargetSRRegressor","title":"MultitargetSRRegressor","text":"","category":"section"},{"location":"models/MultitargetSRRegressor_SymbolicRegression/","page":"MultitargetSRRegressor","title":"MultitargetSRRegressor","text":"MultitargetSRRegressor","category":"page"},{"location":"models/MultitargetSRRegressor_SymbolicRegression/","page":"MultitargetSRRegressor","title":"MultitargetSRRegressor","text":"A model type for constructing a Multi-Target Symbolic Regression via Evolutionary Search, based on SymbolicRegression.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/MultitargetSRRegressor_SymbolicRegression/","page":"MultitargetSRRegressor","title":"MultitargetSRRegressor","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/MultitargetSRRegressor_SymbolicRegression/","page":"MultitargetSRRegressor","title":"MultitargetSRRegressor","text":"MultitargetSRRegressor = @load MultitargetSRRegressor pkg=SymbolicRegression","category":"page"},{"location":"models/MultitargetSRRegressor_SymbolicRegression/","page":"MultitargetSRRegressor","title":"MultitargetSRRegressor","text":"Do model = MultitargetSRRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in MultitargetSRRegressor(binary_operators=...).","category":"page"},{"location":"models/MultitargetSRRegressor_SymbolicRegression/","page":"MultitargetSRRegressor","title":"MultitargetSRRegressor","text":"Multi-target Symbolic Regression regressor (MultitargetSRRegressor) conducts several searches for expressions that predict each target variable from a set of input variables. All data is assumed to be Continuous. The search is performed using an evolutionary algorithm. This algorithm is described in the paper https://arxiv.org/abs/2305.01582.","category":"page"},{"location":"models/MultitargetSRRegressor_SymbolicRegression/#Training-data","page":"MultitargetSRRegressor","title":"Training data","text":"","category":"section"},{"location":"models/MultitargetSRRegressor_SymbolicRegression/","page":"MultitargetSRRegressor","title":"MultitargetSRRegressor","text":"In MLJ or MLJBase, bind an instance model to data with","category":"page"},{"location":"models/MultitargetSRRegressor_SymbolicRegression/","page":"MultitargetSRRegressor","title":"MultitargetSRRegressor","text":"mach = machine(model, X, y)","category":"page"},{"location":"models/MultitargetSRRegressor_SymbolicRegression/","page":"MultitargetSRRegressor","title":"MultitargetSRRegressor","text":"OR","category":"page"},{"location":"models/MultitargetSRRegressor_SymbolicRegression/","page":"MultitargetSRRegressor","title":"MultitargetSRRegressor","text":"mach = machine(model, X, y, w)","category":"page"},{"location":"models/MultitargetSRRegressor_SymbolicRegression/","page":"MultitargetSRRegressor","title":"MultitargetSRRegressor","text":"Here:","category":"page"},{"location":"models/MultitargetSRRegressor_SymbolicRegression/","page":"MultitargetSRRegressor","title":"MultitargetSRRegressor","text":"X is any table of input features (eg, a DataFrame) whose columns are of scitype","category":"page"},{"location":"models/MultitargetSRRegressor_SymbolicRegression/","page":"MultitargetSRRegressor","title":"MultitargetSRRegressor","text":"Continuous; check column scitypes with schema(X). Variable names in discovered expressions will be taken from the column names of X, if available. Units in columns of X (use DynamicQuantities for units) will trigger dimensional analysis to be used.","category":"page"},{"location":"models/MultitargetSRRegressor_SymbolicRegression/","page":"MultitargetSRRegressor","title":"MultitargetSRRegressor","text":"y is the target, which can be any table of target variables whose element scitype is Continuous; check the scitype with schema(y). Units in columns of y (use DynamicQuantities for units) will trigger dimensional analysis to be used.\nw is the observation weights which can either be nothing (default) or an AbstractVector whoose element scitype is Count or Continuous. The same weights are used for all targets.","category":"page"},{"location":"models/MultitargetSRRegressor_SymbolicRegression/","page":"MultitargetSRRegressor","title":"MultitargetSRRegressor","text":"Train the machine using fit!(mach), inspect the discovered expressions with report(mach), and predict on new data with predict(mach, Xnew). Note that unlike other regressors, symbolic regression stores a list of lists of trained models. The models chosen from each of these lists is defined by the function selection_method keyword argument, which by default balances accuracy and complexity.","category":"page"},{"location":"models/MultitargetSRRegressor_SymbolicRegression/#Hyper-parameters","page":"MultitargetSRRegressor","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/MultitargetSRRegressor_SymbolicRegression/","page":"MultitargetSRRegressor","title":"MultitargetSRRegressor","text":"binary_operators: Vector of binary operators (functions) to use. Each operator should be defined for two input scalars, and one output scalar. All operators need to be defined over the entire real line (excluding infinity - these are stopped before they are input), or return NaN where not defined. For speed, define it so it takes two reals of the same type as input, and outputs the same type. For the SymbolicUtils simplification backend, you will need to define a generic method of the operator so it takes arbitrary types.\nunary_operators: Same, but for unary operators (one input scalar, gives an output scalar).\nconstraints: Array of pairs specifying size constraints for each operator. The constraints for a binary operator should be a 2-tuple (e.g., (-1, -1)) and the constraints for a unary operator should be an Int. A size constraint is a limit to the size of the subtree in each argument of an operator. e.g., [(^)=>(-1, 3)] means that the ^ operator can have arbitrary size (-1) in its left argument, but a maximum size of 3 in its right argument. Default is no constraints.\nbatching: Whether to evolve based on small mini-batches of data, rather than the entire dataset.\nbatch_size: What batch size to use if using batching.\nelementwise_loss: What elementwise loss function to use. Can be one of the following losses, or any other loss of type SupervisedLoss. You can also pass a function that takes a scalar target (left argument), and scalar predicted (right argument), and returns a scalar. This will be averaged over the predicted data. If weights are supplied, your function should take a third argument for the weight scalar. Included losses: Regression: - LPDistLoss{P}(), - L1DistLoss(), - L2DistLoss() (mean square), - LogitDistLoss(), - HuberLoss(d), - L1EpsilonInsLoss(ϵ), - L2EpsilonInsLoss(ϵ), - PeriodicLoss(c), - QuantileLoss(τ), Classification: - ZeroOneLoss(), - PerceptronLoss(), - L1HingeLoss(), - SmoothedL1HingeLoss(γ), - ModifiedHuberLoss(), - L2MarginLoss(), - ExpLoss(), - SigmoidLoss(), - DWDMarginLoss(q).\nloss_function: Alternatively, you may redefine the loss used as any function of tree::Node{T}, dataset::Dataset{T}, and options::Options, so long as you output a non-negative scalar of type T. This is useful if you want to use a loss that takes into account derivatives, or correlations across the dataset. This also means you could use a custom evaluation for a particular expression. If you are using batching=true, then your function should accept a fourth argument idx, which is either nothing (indicating that the full dataset should be used), or a vector of indices to use for the batch. For example,\n function my_loss(tree, dataset::Dataset{T,L}, options)::L where {T,L}\n prediction, flag = eval_tree_array(tree, dataset.X, options)\n if !flag\n return L(Inf)\n end\n return sum((prediction .- dataset.y) .^ 2) / dataset.n\n end\npopulations: How many populations of equations to use.\npopulation_size: How many equations in each population.\nncycles_per_iteration: How many generations to consider per iteration.\ntournament_selection_n: Number of expressions considered in each tournament.\ntournament_selection_p: The fittest expression in a tournament is to be selected with probability p, the next fittest with probability p*(1-p), and so forth.\ntopn: Number of equations to return to the host process, and to consider for the hall of fame.\ncomplexity_of_operators: What complexity should be assigned to each operator, and the occurrence of a constant or variable. By default, this is 1 for all operators. Can be a real number as well, in which case the complexity of an expression will be rounded to the nearest integer. Input this in the form of, e.g., [(^) => 3, sin => 2].\ncomplexity_of_constants: What complexity should be assigned to use of a constant. By default, this is 1.\ncomplexity_of_variables: What complexity should be assigned to each variable. By default, this is 1.\nalpha: The probability of accepting an equation mutation during regularized evolution is given by exp(-delta_loss/(alpha * T)), where T goes from 1 to 0. Thus, alpha=infinite is the same as no annealing.\nmaxsize: Maximum size of equations during the search.\nmaxdepth: Maximum depth of equations during the search, by default this is set equal to the maxsize.\nparsimony: A multiplicative factor for how much complexity is punished.\ndimensional_constraint_penalty: An additive factor if the dimensional constraint is violated.\nuse_frequency: Whether to use a parsimony that adapts to the relative proportion of equations at each complexity; this will ensure that there are a balanced number of equations considered for every complexity.\nuse_frequency_in_tournament: Whether to use the adaptive parsimony described above inside the score, rather than just at the mutation accept/reject stage.\nadaptive_parsimony_scaling: How much to scale the adaptive parsimony term in the loss. Increase this if the search is spending too much time optimizing the most complex equations.\nturbo: Whether to use LoopVectorization.@turbo to evaluate expressions. This can be significantly faster, but is only compatible with certain operators. Experimental!\nmigration: Whether to migrate equations between processes.\nhof_migration: Whether to migrate equations from the hall of fame to processes.\nfraction_replaced: What fraction of each population to replace with migrated equations at the end of each cycle.\nfraction_replaced_hof: What fraction to replace with hall of fame equations at the end of each cycle.\nshould_simplify: Whether to simplify equations. If you pass a custom objective, this will be set to false.\nshould_optimize_constants: Whether to use an optimization algorithm to periodically optimize constants in equations.\noptimizer_nrestarts: How many different random starting positions to consider for optimization of constants.\noptimizer_algorithm: Select algorithm to use for optimizing constants. Default is \"BFGS\", but \"NelderMead\" is also supported.\noptimizer_options: General options for the constant optimization. For details we refer to the documentation on Optim.Options from the Optim.jl package. Options can be provided here as NamedTuple, e.g. (iterations=16,), as a Dict, e.g. Dict(:x_tol => 1.0e-32,), or as an Optim.Options instance.\noutput_file: What file to store equations to, as a backup.\nperturbation_factor: When mutating a constant, either multiply or divide by (1+perturbation_factor)^(rand()+1).\nprobability_negate_constant: Probability of negating a constant in the equation when mutating it.\nmutation_weights: Relative probabilities of the mutations. The struct MutationWeights should be passed to these options. See its documentation on MutationWeights for the different weights.\ncrossover_probability: Probability of performing crossover.\nannealing: Whether to use simulated annealing.\nwarmup_maxsize_by: Whether to slowly increase the max size from 5 up to maxsize. If nonzero, specifies the fraction through the search at which the maxsize should be reached.\nverbosity: Whether to print debugging statements or not.\nprint_precision: How many digits to print when printing equations. By default, this is 5.\nsave_to_file: Whether to save equations to a file during the search.\nbin_constraints: See constraints. This is the same, but specified for binary operators only (for example, if you have an operator that is both a binary and unary operator).\nuna_constraints: Likewise, for unary operators.\nseed: What random seed to use. nothing uses no seed.\nprogress: Whether to use a progress bar output (verbosity will have no effect).\nearly_stop_condition: Float - whether to stop early if the mean loss gets below this value. Function - a function taking (loss, complexity) as arguments and returning true or false.\ntimeout_in_seconds: Float64 - the time in seconds after which to exit (as an alternative to the number of iterations).\nmax_evals: Int (or Nothing) - the maximum number of evaluations of expressions to perform.\nskip_mutation_failures: Whether to simply skip over mutations that fail or are rejected, rather than to replace the mutated expression with the original expression and proceed normally.\nenable_autodiff: Whether to enable automatic differentiation functionality. This is turned off by default. If turned on, this will be turned off if one of the operators does not have well-defined gradients.\nnested_constraints: Specifies how many times a combination of operators can be nested. For example, [sin => [cos => 0], cos => [cos => 2]] specifies that cos may never appear within a sin, but sin can be nested with itself an unlimited number of times. The second term specifies that cos can be nested up to 2 times within a cos, so that cos(cos(cos(x))) is allowed (as well as any combination of + or - within it), but cos(cos(cos(cos(x)))) is not allowed. When an operator is not specified, it is assumed that it can be nested an unlimited number of times. This requires that there is no operator which is used both in the unary operators and the binary operators (e.g., - could be both subtract, and negation). For binary operators, both arguments are treated the same way, and the max of each argument is constrained.\ndeterministic: Use a global counter for the birth time, rather than calls to time(). This gives perfect resolution, and is therefore deterministic. However, it is not thread safe, and must be used in serial mode.\ndefine_helper_functions: Whether to define helper functions for constructing and evaluating trees.\nniterations::Int=10: The number of iterations to perform the search. More iterations will improve the results.\nparallelism=:multithreading: What parallelism mode to use. The options are :multithreading, :multiprocessing, and :serial. By default, multithreading will be used. Multithreading uses less memory, but multiprocessing can handle multi-node compute. If using :multithreading mode, the number of threads available to julia are used. If using :multiprocessing, numprocs processes will be created dynamically if procs is unset. If you have already allocated processes, pass them to the procs argument and they will be used. You may also pass a string instead of a symbol, like \"multithreading\".\nnumprocs::Union{Int, Nothing}=nothing: The number of processes to use, if you want equation_search to set this up automatically. By default this will be 4, but can be any number (you should pick a number <= the number of cores available).\nprocs::Union{Vector{Int}, Nothing}=nothing: If you have set up a distributed run manually with procs = addprocs() and @everywhere, pass the procs to this keyword argument.\naddprocs_function::Union{Function, Nothing}=nothing: If using multiprocessing (parallelism=:multithreading), and are not passing procs manually, then they will be allocated dynamically using addprocs. However, you may also pass a custom function to use instead of addprocs. This function should take a single positional argument, which is the number of processes to use, as well as the lazy keyword argument. For example, if set up on a slurm cluster, you could pass addprocs_function = addprocs_slurm, which will set up slurm processes.\nheap_size_hint_in_bytes::Union{Int,Nothing}=nothing: On Julia 1.9+, you may set the --heap-size-hint flag on Julia processes, recommending garbage collection once a process is close to the recommended size. This is important for long-running distributed jobs where each process has an independent memory, and can help avoid out-of-memory errors. By default, this is set to Sys.free_memory() / numprocs.\nruntests::Bool=true: Whether to run (quick) tests before starting the search, to see if there will be any problems during the equation search related to the host environment.\nloss_type::Type=Nothing: If you would like to use a different type for the loss than for the data you passed, specify the type here. Note that if you pass complex data ::Complex{L}, then the loss type will automatically be set to L.\nselection_method::Function: Function to selection expression from the Pareto frontier for use in predict. See SymbolicRegression.MLJInterfaceModule.choose_best for an example. This function should return a single integer specifying the index of the expression to use. By default, choose_best maximizes the score (a pound-for-pound rating) of expressions reaching the threshold of 1.5x the minimum loss. To fix the index at 5, you could just write Returns(5).\ndimensions_type::AbstractDimensions: The type of dimensions to use when storing the units of the data. By default this is DynamicQuantities.SymbolicDimensions.","category":"page"},{"location":"models/MultitargetSRRegressor_SymbolicRegression/#Operations","page":"MultitargetSRRegressor","title":"Operations","text":"","category":"section"},{"location":"models/MultitargetSRRegressor_SymbolicRegression/","page":"MultitargetSRRegressor","title":"MultitargetSRRegressor","text":"predict(mach, Xnew): Return predictions of the target given features Xnew, which should have same scitype as X above. The expression used for prediction is defined by the selection_method function, which can be seen by viewing report(mach).best_idx.","category":"page"},{"location":"models/MultitargetSRRegressor_SymbolicRegression/#Fitted-parameters","page":"MultitargetSRRegressor","title":"Fitted parameters","text":"","category":"section"},{"location":"models/MultitargetSRRegressor_SymbolicRegression/","page":"MultitargetSRRegressor","title":"MultitargetSRRegressor","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/MultitargetSRRegressor_SymbolicRegression/","page":"MultitargetSRRegressor","title":"MultitargetSRRegressor","text":"best_idx::Vector{Int}: The index of the best expression in each Pareto frontier, as determined by the selection_method function.\nequations::Vector{Vector{Node{T}}}: The expressions discovered by the search, represented in a dominating Pareto frontier (i.e., the best expressions found for each complexity). The outer vector is indexed by target variable, and the inner vector is ordered by increasing complexity. T is equal to the element type of the passed data.\nequation_strings::Vector{Vector{String}}: The expressions discovered by the search, represented as strings for easy inspection.","category":"page"},{"location":"models/MultitargetSRRegressor_SymbolicRegression/#Report","page":"MultitargetSRRegressor","title":"Report","text":"","category":"section"},{"location":"models/MultitargetSRRegressor_SymbolicRegression/","page":"MultitargetSRRegressor","title":"MultitargetSRRegressor","text":"The fields of report(mach) are:","category":"page"},{"location":"models/MultitargetSRRegressor_SymbolicRegression/","page":"MultitargetSRRegressor","title":"MultitargetSRRegressor","text":"best_idx::Vector{Int}: The index of the best expression in each Pareto frontier, as determined by the selection_method function.\nequations::Vector{Vector{Node{T}}}: The expressions discovered by the search, represented in a dominating Pareto frontier (i.e., the best expressions found for each complexity). The outer vector is indexed by target variable, and the inner vector is ordered by increasing complexity.\nequation_strings::Vector{Vector{String}}: The expressions discovered by the search, represented as strings for easy inspection.\ncomplexities::Vector{Vector{Int}}: The complexity of each expression in each Pareto frontier.\nlosses::Vector{Vector{L}}: The loss of each expression in each Pareto frontier, according to the loss function specified in the model. The type L is the loss type, which is usually the same as the element type of data passed (i.e., T), but can differ if complex data types are passed.\nscores::Vector{Vector{L}}: A metric which considers both the complexity and loss of an expression, equal to the change in the log-loss divided by the change in complexity, relative to the previous expression along the Pareto frontier. A larger score aims to indicate an expression is more likely to be the true expression generating the data, but this is very problem-dependent and generally several other factors should be considered.","category":"page"},{"location":"models/MultitargetSRRegressor_SymbolicRegression/#Examples","page":"MultitargetSRRegressor","title":"Examples","text":"","category":"section"},{"location":"models/MultitargetSRRegressor_SymbolicRegression/","page":"MultitargetSRRegressor","title":"MultitargetSRRegressor","text":"using MLJ\nMultitargetSRRegressor = @load MultitargetSRRegressor pkg=SymbolicRegression\nX = (a=rand(100), b=rand(100), c=rand(100))\nY = (y1=(@. cos(X.c) * 2.1 - 0.9), y2=(@. X.a * X.b + X.c))\nmodel = MultitargetSRRegressor(binary_operators=[+, -, *], unary_operators=[exp], niterations=100)\nmach = machine(model, X, Y)\nfit!(mach)\ny_hat = predict(mach, X)\n## View the equations used:\nr = report(mach)\nfor (output_index, (eq, i)) in enumerate(zip(r.equation_strings, r.best_idx))\n println(\"Equation used for \", output_index, \": \", eq[i])\nend","category":"page"},{"location":"models/MultitargetSRRegressor_SymbolicRegression/","page":"MultitargetSRRegressor","title":"MultitargetSRRegressor","text":"See also SRRegressor.","category":"page"},{"location":"models/PerceptronClassifier_MLJScikitLearnInterface/#PerceptronClassifier_MLJScikitLearnInterface","page":"PerceptronClassifier","title":"PerceptronClassifier","text":"","category":"section"},{"location":"models/PerceptronClassifier_MLJScikitLearnInterface/","page":"PerceptronClassifier","title":"PerceptronClassifier","text":"PerceptronClassifier","category":"page"},{"location":"models/PerceptronClassifier_MLJScikitLearnInterface/","page":"PerceptronClassifier","title":"PerceptronClassifier","text":"A model type for constructing a perceptron classifier, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/PerceptronClassifier_MLJScikitLearnInterface/","page":"PerceptronClassifier","title":"PerceptronClassifier","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/PerceptronClassifier_MLJScikitLearnInterface/","page":"PerceptronClassifier","title":"PerceptronClassifier","text":"PerceptronClassifier = @load PerceptronClassifier pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/PerceptronClassifier_MLJScikitLearnInterface/","page":"PerceptronClassifier","title":"PerceptronClassifier","text":"Do model = PerceptronClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in PerceptronClassifier(penalty=...).","category":"page"},{"location":"models/PerceptronClassifier_MLJScikitLearnInterface/#Hyper-parameters","page":"PerceptronClassifier","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/PerceptronClassifier_MLJScikitLearnInterface/","page":"PerceptronClassifier","title":"PerceptronClassifier","text":"penalty = nothing\nalpha = 0.0001\nfit_intercept = true\nmax_iter = 1000\ntol = 0.001\nshuffle = true\nverbose = 0\neta0 = 1.0\nn_jobs = nothing\nrandom_state = 0\nearly_stopping = false\nvalidation_fraction = 0.1\nn_iter_no_change = 5\nclass_weight = nothing\nwarm_start = false","category":"page"},{"location":"models/KNeighborsRegressor_MLJScikitLearnInterface/#KNeighborsRegressor_MLJScikitLearnInterface","page":"KNeighborsRegressor","title":"KNeighborsRegressor","text":"","category":"section"},{"location":"models/KNeighborsRegressor_MLJScikitLearnInterface/","page":"KNeighborsRegressor","title":"KNeighborsRegressor","text":"KNeighborsRegressor","category":"page"},{"location":"models/KNeighborsRegressor_MLJScikitLearnInterface/","page":"KNeighborsRegressor","title":"KNeighborsRegressor","text":"A model type for constructing a K-nearest neighbors regressor, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/KNeighborsRegressor_MLJScikitLearnInterface/","page":"KNeighborsRegressor","title":"KNeighborsRegressor","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/KNeighborsRegressor_MLJScikitLearnInterface/","page":"KNeighborsRegressor","title":"KNeighborsRegressor","text":"KNeighborsRegressor = @load KNeighborsRegressor pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/KNeighborsRegressor_MLJScikitLearnInterface/","page":"KNeighborsRegressor","title":"KNeighborsRegressor","text":"Do model = KNeighborsRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in KNeighborsRegressor(n_neighbors=...).","category":"page"},{"location":"models/KNeighborsRegressor_MLJScikitLearnInterface/#Hyper-parameters","page":"KNeighborsRegressor","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/KNeighborsRegressor_MLJScikitLearnInterface/","page":"KNeighborsRegressor","title":"KNeighborsRegressor","text":"n_neighbors = 5\nweights = uniform\nalgorithm = auto\nleaf_size = 30\np = 2\nmetric = minkowski\nmetric_params = nothing\nn_jobs = nothing","category":"page"},{"location":"models/NeuralNetworkRegressor_MLJFlux/#NeuralNetworkRegressor_MLJFlux","page":"NeuralNetworkRegressor","title":"NeuralNetworkRegressor","text":"","category":"section"},{"location":"models/NeuralNetworkRegressor_MLJFlux/","page":"NeuralNetworkRegressor","title":"NeuralNetworkRegressor","text":"NeuralNetworkRegressor","category":"page"},{"location":"models/NeuralNetworkRegressor_MLJFlux/","page":"NeuralNetworkRegressor","title":"NeuralNetworkRegressor","text":"A model type for constructing a neural network regressor, based on MLJFlux.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/NeuralNetworkRegressor_MLJFlux/","page":"NeuralNetworkRegressor","title":"NeuralNetworkRegressor","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/NeuralNetworkRegressor_MLJFlux/","page":"NeuralNetworkRegressor","title":"NeuralNetworkRegressor","text":"NeuralNetworkRegressor = @load NeuralNetworkRegressor pkg=MLJFlux","category":"page"},{"location":"models/NeuralNetworkRegressor_MLJFlux/","page":"NeuralNetworkRegressor","title":"NeuralNetworkRegressor","text":"Do model = NeuralNetworkRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in NeuralNetworkRegressor(builder=...).","category":"page"},{"location":"models/NeuralNetworkRegressor_MLJFlux/","page":"NeuralNetworkRegressor","title":"NeuralNetworkRegressor","text":"NeuralNetworkRegressor is for training a data-dependent Flux.jl neural network to predict a Continuous target, given a table of Continuous features. Users provide a recipe for constructing the network, based on properties of the data that is encountered, by specifying an appropriate builder. See MLJFlux documentation for more on builders.","category":"page"},{"location":"models/NeuralNetworkRegressor_MLJFlux/#Training-data","page":"NeuralNetworkRegressor","title":"Training data","text":"","category":"section"},{"location":"models/NeuralNetworkRegressor_MLJFlux/","page":"NeuralNetworkRegressor","title":"NeuralNetworkRegressor","text":"In MLJ or MLJBase, bind an instance model to data with","category":"page"},{"location":"models/NeuralNetworkRegressor_MLJFlux/","page":"NeuralNetworkRegressor","title":"NeuralNetworkRegressor","text":"mach = machine(model, X, y)","category":"page"},{"location":"models/NeuralNetworkRegressor_MLJFlux/","page":"NeuralNetworkRegressor","title":"NeuralNetworkRegressor","text":"Here:","category":"page"},{"location":"models/NeuralNetworkRegressor_MLJFlux/","page":"NeuralNetworkRegressor","title":"NeuralNetworkRegressor","text":"X is either a Matrix or any table of input features (eg, a DataFrame) whose columns are of scitype Continuous; check column scitypes with schema(X). If X is a Matrix, it is assumed to have columns corresponding to features and rows corresponding to observations.\ny is the target, which can be any AbstractVector whose element scitype is Continuous; check the scitype with scitype(y)","category":"page"},{"location":"models/NeuralNetworkRegressor_MLJFlux/","page":"NeuralNetworkRegressor","title":"NeuralNetworkRegressor","text":"Train the machine with fit!(mach, rows=...).","category":"page"},{"location":"models/NeuralNetworkRegressor_MLJFlux/#Hyper-parameters","page":"NeuralNetworkRegressor","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/NeuralNetworkRegressor_MLJFlux/","page":"NeuralNetworkRegressor","title":"NeuralNetworkRegressor","text":"builder=MLJFlux.Linear(σ=Flux.relu): An MLJFlux builder that constructs a neural network. Possible builders include: MLJFlux.Linear, MLJFlux.Short, and MLJFlux.MLP. See MLJFlux documentation for more on builders, and the example below for using the @builder convenience macro.\noptimiser::Flux.Adam(): A Flux.Optimise optimiser. The optimiser performs the updating of the weights of the network. For further reference, see the Flux optimiser documentation. To choose a learning rate (the update rate of the optimizer), a good rule of thumb is to start out at 10e-3, and tune using powers of 10 between 1 and 1e-7.\nloss=Flux.mse: The loss function which the network will optimize. Should be a function which can be called in the form loss(yhat, y). Possible loss functions are listed in the Flux loss function documentation. For a regression task, natural loss functions are:\nFlux.mse\nFlux.mae\nFlux.msle\nFlux.huber_loss\nCurrently MLJ measures are not supported as loss functions here.\nepochs::Int=10: The duration of training, in epochs. Typically, one epoch represents one pass through the complete the training dataset.\nbatch_size::int=1: the batch size to be used for training, representing the number of samples per update of the network weights. Typically, batch size is between 8 and\nIncreasing batch size may accelerate training if acceleration=CUDALibs() and a\nGPU is available.\nlambda::Float64=0: The strength of the weight regularization penalty. Can be any value in the range [0, ∞).\nalpha::Float64=0: The L2/L1 mix of regularization, in the range [0, 1]. A value of 0 represents L2 regularization, and a value of 1 represents L1 regularization.\nrng::Union{AbstractRNG, Int64}: The random number generator or seed used during training.\noptimizer_changes_trigger_retraining::Bool=false: Defines what happens when re-fitting a machine if the associated optimiser has changed. If true, the associated machine will retrain from scratch on fit! call, otherwise it will not.\nacceleration::AbstractResource=CPU1(): Defines on what hardware training is done. For Training on GPU, use CUDALibs().","category":"page"},{"location":"models/NeuralNetworkRegressor_MLJFlux/#Operations","page":"NeuralNetworkRegressor","title":"Operations","text":"","category":"section"},{"location":"models/NeuralNetworkRegressor_MLJFlux/","page":"NeuralNetworkRegressor","title":"NeuralNetworkRegressor","text":"predict(mach, Xnew): return predictions of the target given new features Xnew, which should have the same scitype as X above.","category":"page"},{"location":"models/NeuralNetworkRegressor_MLJFlux/#Fitted-parameters","page":"NeuralNetworkRegressor","title":"Fitted parameters","text":"","category":"section"},{"location":"models/NeuralNetworkRegressor_MLJFlux/","page":"NeuralNetworkRegressor","title":"NeuralNetworkRegressor","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/NeuralNetworkRegressor_MLJFlux/","page":"NeuralNetworkRegressor","title":"NeuralNetworkRegressor","text":"chain: The trained \"chain\" (Flux.jl model), namely the series of layers, functions, and activations which make up the neural network.","category":"page"},{"location":"models/NeuralNetworkRegressor_MLJFlux/#Report","page":"NeuralNetworkRegressor","title":"Report","text":"","category":"section"},{"location":"models/NeuralNetworkRegressor_MLJFlux/","page":"NeuralNetworkRegressor","title":"NeuralNetworkRegressor","text":"The fields of report(mach) are:","category":"page"},{"location":"models/NeuralNetworkRegressor_MLJFlux/","page":"NeuralNetworkRegressor","title":"NeuralNetworkRegressor","text":"training_losses: A vector of training losses (penalized if lambda != 0) in historical order, of length epochs + 1. The first element is the pre-training loss.","category":"page"},{"location":"models/NeuralNetworkRegressor_MLJFlux/#Examples","page":"NeuralNetworkRegressor","title":"Examples","text":"","category":"section"},{"location":"models/NeuralNetworkRegressor_MLJFlux/","page":"NeuralNetworkRegressor","title":"NeuralNetworkRegressor","text":"In this example we build a regression model for the Boston house price dataset.","category":"page"},{"location":"models/NeuralNetworkRegressor_MLJFlux/","page":"NeuralNetworkRegressor","title":"NeuralNetworkRegressor","text":"using MLJ\nimport MLJFlux\nusing Flux","category":"page"},{"location":"models/NeuralNetworkRegressor_MLJFlux/","page":"NeuralNetworkRegressor","title":"NeuralNetworkRegressor","text":"First, we load in the data: The :MEDV column becomes the target vector y, and all remaining columns go into a table X, with the exception of :CHAS:","category":"page"},{"location":"models/NeuralNetworkRegressor_MLJFlux/","page":"NeuralNetworkRegressor","title":"NeuralNetworkRegressor","text":"data = OpenML.load(531); ## Loads from https://www.openml.org/d/531\ny, X = unpack(data, ==(:MEDV), !=(:CHAS); rng=123);\n\nscitype(y)\nschema(X)","category":"page"},{"location":"models/NeuralNetworkRegressor_MLJFlux/","page":"NeuralNetworkRegressor","title":"NeuralNetworkRegressor","text":"Since MLJFlux models do not handle ordered factors, we'll treat :RAD as Continuous:","category":"page"},{"location":"models/NeuralNetworkRegressor_MLJFlux/","page":"NeuralNetworkRegressor","title":"NeuralNetworkRegressor","text":"X = coerce(X, :RAD=>Continuous)","category":"page"},{"location":"models/NeuralNetworkRegressor_MLJFlux/","page":"NeuralNetworkRegressor","title":"NeuralNetworkRegressor","text":"Splitting off a test set:","category":"page"},{"location":"models/NeuralNetworkRegressor_MLJFlux/","page":"NeuralNetworkRegressor","title":"NeuralNetworkRegressor","text":"(X, Xtest), (y, ytest) = partition((X, y), 0.7, multi=true);","category":"page"},{"location":"models/NeuralNetworkRegressor_MLJFlux/","page":"NeuralNetworkRegressor","title":"NeuralNetworkRegressor","text":"Next, we can define a builder, making use of a convenience macro to do so. In the following @builder call, n_in is a proxy for the number input features (which will be known at fit! time) and rng is a proxy for a RNG (which will be passed from the rng field of model defined below). We also have the parameter n_out which is the number of output features. As we are doing single target regression, the value passed will always be 1, but the builder we define will also work for MultitargetNeuralRegressor.","category":"page"},{"location":"models/NeuralNetworkRegressor_MLJFlux/","page":"NeuralNetworkRegressor","title":"NeuralNetworkRegressor","text":"builder = MLJFlux.@builder begin\n init=Flux.glorot_uniform(rng)\n Chain(\n Dense(n_in, 64, relu, init=init),\n Dense(64, 32, relu, init=init),\n Dense(32, n_out, init=init),\n )\nend","category":"page"},{"location":"models/NeuralNetworkRegressor_MLJFlux/","page":"NeuralNetworkRegressor","title":"NeuralNetworkRegressor","text":"Instantiating a model:","category":"page"},{"location":"models/NeuralNetworkRegressor_MLJFlux/","page":"NeuralNetworkRegressor","title":"NeuralNetworkRegressor","text":"NeuralNetworkRegressor = @load NeuralNetworkRegressor pkg=MLJFlux\nmodel = NeuralNetworkRegressor(\n builder=builder,\n rng=123,\n epochs=20\n)","category":"page"},{"location":"models/NeuralNetworkRegressor_MLJFlux/","page":"NeuralNetworkRegressor","title":"NeuralNetworkRegressor","text":"We arrange for standardization of the the target by wrapping our model in TransformedTargetModel, and standardization of the features by inserting the wrapped model in a pipeline:","category":"page"},{"location":"models/NeuralNetworkRegressor_MLJFlux/","page":"NeuralNetworkRegressor","title":"NeuralNetworkRegressor","text":"pipe = Standardizer |> TransformedTargetModel(model, target=Standardizer)","category":"page"},{"location":"models/NeuralNetworkRegressor_MLJFlux/","page":"NeuralNetworkRegressor","title":"NeuralNetworkRegressor","text":"If we fit with a high verbosity (>1), we will see the losses during training. We can also see the losses in the output of report(mach).","category":"page"},{"location":"models/NeuralNetworkRegressor_MLJFlux/","page":"NeuralNetworkRegressor","title":"NeuralNetworkRegressor","text":"mach = machine(pipe, X, y)\nfit!(mach, verbosity=2)\n\n## first element initial loss, 2:end per epoch training losses\nreport(mach).transformed_target_model_deterministic.model.training_losses","category":"page"},{"location":"models/NeuralNetworkRegressor_MLJFlux/#Experimenting-with-learning-rate","page":"NeuralNetworkRegressor","title":"Experimenting with learning rate","text":"","category":"section"},{"location":"models/NeuralNetworkRegressor_MLJFlux/","page":"NeuralNetworkRegressor","title":"NeuralNetworkRegressor","text":"We can visually compare how the learning rate affects the predictions:","category":"page"},{"location":"models/NeuralNetworkRegressor_MLJFlux/","page":"NeuralNetworkRegressor","title":"NeuralNetworkRegressor","text":"using Plots\n\nrates = rates = [5e-5, 1e-4, 0.005, 0.001, 0.05]\nplt=plot()\n\nforeach(rates) do η\n pipe.transformed_target_model_deterministic.model.optimiser.eta = η\n fit!(mach, force=true, verbosity=0)\n losses =\n report(mach).transformed_target_model_deterministic.model.training_losses[3:end]\n plot!(1:length(losses), losses, label=η)\nend\n\nplt\n\npipe.transformed_target_model_deterministic.model.optimiser.eta = 0.0001","category":"page"},{"location":"models/NeuralNetworkRegressor_MLJFlux/","page":"NeuralNetworkRegressor","title":"NeuralNetworkRegressor","text":"With the learning rate fixed, we compute a CV estimate of the performance (using all data bound to mach) and compare this with performance on the test set:","category":"page"},{"location":"models/NeuralNetworkRegressor_MLJFlux/","page":"NeuralNetworkRegressor","title":"NeuralNetworkRegressor","text":"## CV estimate, based on `(X, y)`:\nevaluate!(mach, resampling=CV(nfolds=5), measure=l2)\n\n## loss for `(Xtest, test)`:\nfit!(mach) ## train on `(X, y)`\nyhat = predict(mach, Xtest)\nl2(yhat, ytest) |> mean","category":"page"},{"location":"models/NeuralNetworkRegressor_MLJFlux/","page":"NeuralNetworkRegressor","title":"NeuralNetworkRegressor","text":"These losses, for the pipeline model, refer to the target on the original, unstandardized, scale.","category":"page"},{"location":"models/NeuralNetworkRegressor_MLJFlux/","page":"NeuralNetworkRegressor","title":"NeuralNetworkRegressor","text":"For implementing stopping criterion and other iteration controls, refer to examples linked from the MLJFlux documentation.","category":"page"},{"location":"models/NeuralNetworkRegressor_MLJFlux/","page":"NeuralNetworkRegressor","title":"NeuralNetworkRegressor","text":"See also MultitargetNeuralNetworkRegressor","category":"page"},{"location":"models/PassiveAggressiveRegressor_MLJScikitLearnInterface/#PassiveAggressiveRegressor_MLJScikitLearnInterface","page":"PassiveAggressiveRegressor","title":"PassiveAggressiveRegressor","text":"","category":"section"},{"location":"models/PassiveAggressiveRegressor_MLJScikitLearnInterface/","page":"PassiveAggressiveRegressor","title":"PassiveAggressiveRegressor","text":"PassiveAggressiveRegressor","category":"page"},{"location":"models/PassiveAggressiveRegressor_MLJScikitLearnInterface/","page":"PassiveAggressiveRegressor","title":"PassiveAggressiveRegressor","text":"A model type for constructing a passive aggressive regressor, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/PassiveAggressiveRegressor_MLJScikitLearnInterface/","page":"PassiveAggressiveRegressor","title":"PassiveAggressiveRegressor","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/PassiveAggressiveRegressor_MLJScikitLearnInterface/","page":"PassiveAggressiveRegressor","title":"PassiveAggressiveRegressor","text":"PassiveAggressiveRegressor = @load PassiveAggressiveRegressor pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/PassiveAggressiveRegressor_MLJScikitLearnInterface/","page":"PassiveAggressiveRegressor","title":"PassiveAggressiveRegressor","text":"Do model = PassiveAggressiveRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in PassiveAggressiveRegressor(C=...).","category":"page"},{"location":"models/PassiveAggressiveRegressor_MLJScikitLearnInterface/#Hyper-parameters","page":"PassiveAggressiveRegressor","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/PassiveAggressiveRegressor_MLJScikitLearnInterface/","page":"PassiveAggressiveRegressor","title":"PassiveAggressiveRegressor","text":"C = 1.0\nfit_intercept = true\nmax_iter = 1000\ntol = 0.0001\nearly_stopping = false\nvalidation_fraction = 0.1\nn_iter_no_change = 5\nshuffle = true\nverbose = 0\nloss = epsilon_insensitive\nepsilon = 0.1\nrandom_state = nothing\nwarm_start = false\naverage = false","category":"page"},{"location":"models/LOCIDetector_OutlierDetectionPython/#LOCIDetector_OutlierDetectionPython","page":"LOCIDetector","title":"LOCIDetector","text":"","category":"section"},{"location":"models/LOCIDetector_OutlierDetectionPython/","page":"LOCIDetector","title":"LOCIDetector","text":"LOCIDetector(alpha = 0.5,\n k = 3)","category":"page"},{"location":"models/LOCIDetector_OutlierDetectionPython/","page":"LOCIDetector","title":"LOCIDetector","text":"https://pyod.readthedocs.io/en/latest/pyod.models.html#module-pyod.models.loci","category":"page"},{"location":"api/#Index-of-Methods","page":"Index of Methods","title":"Index of Methods","text":"","category":"section"},{"location":"api/","page":"Index of Methods","title":"Index of Methods","text":"","category":"page"},{"location":"models/OCSVMDetector_OutlierDetectionPython/#OCSVMDetector_OutlierDetectionPython","page":"OCSVMDetector","title":"OCSVMDetector","text":"","category":"section"},{"location":"models/OCSVMDetector_OutlierDetectionPython/","page":"OCSVMDetector","title":"OCSVMDetector","text":"OCSVMDetector(kernel = \"rbf\",\n degree = 3,\n gamma = \"auto\",\n coef0 = 0.0,\n tol = 0.001,\n nu = 0.5,\n shrinking = true,\n cache_size = 200,\n verbose = false,\n max_iter = -1)","category":"page"},{"location":"models/OCSVMDetector_OutlierDetectionPython/","page":"OCSVMDetector","title":"OCSVMDetector","text":"https://pyod.readthedocs.io/en/latest/pyod.models.html#module-pyod.models.ocsvm","category":"page"},{"location":"models/ExtraTreesRegressor_MLJScikitLearnInterface/#ExtraTreesRegressor_MLJScikitLearnInterface","page":"ExtraTreesRegressor","title":"ExtraTreesRegressor","text":"","category":"section"},{"location":"models/ExtraTreesRegressor_MLJScikitLearnInterface/","page":"ExtraTreesRegressor","title":"ExtraTreesRegressor","text":"ExtraTreesRegressor","category":"page"},{"location":"models/ExtraTreesRegressor_MLJScikitLearnInterface/","page":"ExtraTreesRegressor","title":"ExtraTreesRegressor","text":"A model type for constructing a extra trees regressor, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/ExtraTreesRegressor_MLJScikitLearnInterface/","page":"ExtraTreesRegressor","title":"ExtraTreesRegressor","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/ExtraTreesRegressor_MLJScikitLearnInterface/","page":"ExtraTreesRegressor","title":"ExtraTreesRegressor","text":"ExtraTreesRegressor = @load ExtraTreesRegressor pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/ExtraTreesRegressor_MLJScikitLearnInterface/","page":"ExtraTreesRegressor","title":"ExtraTreesRegressor","text":"Do model = ExtraTreesRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in ExtraTreesRegressor(n_estimators=...).","category":"page"},{"location":"models/ExtraTreesRegressor_MLJScikitLearnInterface/","page":"ExtraTreesRegressor","title":"ExtraTreesRegressor","text":"Extra trees regressor, fits a number of randomized decision trees on various sub-samples of the dataset and uses averaging to improve the predictive accuracy and control over-fitting.","category":"page"},{"location":"models/LOFDetector_OutlierDetectionPython/#LOFDetector_OutlierDetectionPython","page":"LOFDetector","title":"LOFDetector","text":"","category":"section"},{"location":"models/LOFDetector_OutlierDetectionPython/","page":"LOFDetector","title":"LOFDetector","text":"LOFDetector(n_neighbors = 5,\n algorithm = \"auto\",\n leaf_size = 30,\n metric = \"minkowski\",\n p = 2,\n metric_params = nothing,\n n_jobs = 1,\n novelty = true)","category":"page"},{"location":"models/LOFDetector_OutlierDetectionPython/","page":"LOFDetector","title":"LOFDetector","text":"https://pyod.readthedocs.io/en/latest/pyod.models.html#module-pyod.models.lof","category":"page"},{"location":"models/PerceptronClassifier_BetaML/#PerceptronClassifier_BetaML","page":"PerceptronClassifier","title":"PerceptronClassifier","text":"","category":"section"},{"location":"models/PerceptronClassifier_BetaML/","page":"PerceptronClassifier","title":"PerceptronClassifier","text":"mutable struct PerceptronClassifier <: MLJModelInterface.Probabilistic","category":"page"},{"location":"models/PerceptronClassifier_BetaML/","page":"PerceptronClassifier","title":"PerceptronClassifier","text":"The classical perceptron algorithm using one-vs-all for multiclass, from the Beta Machine Learning Toolkit (BetaML).","category":"page"},{"location":"models/PerceptronClassifier_BetaML/#Hyperparameters:","page":"PerceptronClassifier","title":"Hyperparameters:","text":"","category":"section"},{"location":"models/PerceptronClassifier_BetaML/","page":"PerceptronClassifier","title":"PerceptronClassifier","text":"initial_coefficients::Union{Nothing, Matrix{Float64}}: N-classes by D-dimensions matrix of initial linear coefficients [def: nothing, i.e. zeros]\ninitial_constant::Union{Nothing, Vector{Float64}}: N-classes vector of initial contant terms [def: nothing, i.e. zeros]\nepochs::Int64: Maximum number of epochs, i.e. passages trough the whole training sample [def: 1000]\nshuffle::Bool: Whether to randomly shuffle the data at each iteration (epoch) [def: true]\nforce_origin::Bool: Whether to force the parameter associated with the constant term to remain zero [def: false]\nreturn_mean_hyperplane::Bool: Whether to return the average hyperplane coefficients instead of the final ones [def: false]\nrng::Random.AbstractRNG: A Random Number Generator to be used in stochastic parts of the code [deafult: Random.GLOBAL_RNG]","category":"page"},{"location":"models/PerceptronClassifier_BetaML/#Example:","page":"PerceptronClassifier","title":"Example:","text":"","category":"section"},{"location":"models/PerceptronClassifier_BetaML/","page":"PerceptronClassifier","title":"PerceptronClassifier","text":"julia> using MLJ\n\njulia> X, y = @load_iris;\n\njulia> modelType = @load PerceptronClassifier pkg = \"BetaML\"\n[ Info: For silent loading, specify `verbosity=0`. \nimport BetaML ✔\nBetaML.Perceptron.PerceptronClassifier\n\njulia> model = modelType()\nPerceptronClassifier(\n initial_coefficients = nothing, \n initial_constant = nothing, \n epochs = 1000, \n shuffle = true, \n force_origin = false, \n return_mean_hyperplane = false, \n rng = Random._GLOBAL_RNG())\n\njulia> mach = machine(model, X, y);\n\njulia> fit!(mach);\n[ Info: Training machine(PerceptronClassifier(initial_coefficients = nothing, …), …).\n*** Avg. error after epoch 2 : 0.0 (all elements of the set has been correctly classified)\njulia> est_classes = predict(mach, X)\n150-element CategoricalDistributions.UnivariateFiniteVector{Multiclass{3}, String, UInt8, Float64}:\n UnivariateFinite{Multiclass{3}}(setosa=>1.0, versicolor=>2.53e-34, virginica=>0.0)\n UnivariateFinite{Multiclass{3}}(setosa=>1.0, versicolor=>1.27e-18, virginica=>1.86e-310)\n ⋮\n UnivariateFinite{Multiclass{3}}(setosa=>2.77e-57, versicolor=>1.1099999999999999e-82, virginica=>1.0)\n UnivariateFinite{Multiclass{3}}(setosa=>3.09e-22, versicolor=>4.03e-25, virginica=>1.0)","category":"page"},{"location":"models/ABODDetector_OutlierDetectionPython/#ABODDetector_OutlierDetectionPython","page":"ABODDetector","title":"ABODDetector","text":"","category":"section"},{"location":"models/ABODDetector_OutlierDetectionPython/","page":"ABODDetector","title":"ABODDetector","text":"ABODDetector(n_neighbors = 5,\n method = \"fast\")","category":"page"},{"location":"models/ABODDetector_OutlierDetectionPython/","page":"ABODDetector","title":"ABODDetector","text":"https://pyod.readthedocs.io/en/latest/pyod.models.html#module-pyod.models.abod","category":"page"},{"location":"preparing_data/#Preparing-Data","page":"Preparing Data","title":"Preparing Data","text":"","category":"section"},{"location":"preparing_data/#Splitting-data","page":"Preparing Data","title":"Splitting data","text":"","category":"section"},{"location":"preparing_data/","page":"Preparing Data","title":"Preparing Data","text":"MLJ has two tools for splitting data. To split data vertically (that is, to split by observations) use partition. This is commonly applied to a vector of observation indices, but can also be applied to datasets themselves, provided they are vectors, matrices or tables.","category":"page"},{"location":"preparing_data/","page":"Preparing Data","title":"Preparing Data","text":"To split tabular data horizontally (i.e., break up a table based on feature names) use unpack.","category":"page"},{"location":"preparing_data/","page":"Preparing Data","title":"Preparing Data","text":"MLJBase.partition\nMLJBase.unpack","category":"page"},{"location":"preparing_data/#MLJBase.partition","page":"Preparing Data","title":"MLJBase.partition","text":"partition(X, fractions...;\n shuffle=nothing,\n rng=Random.GLOBAL_RNG,\n stratify=nothing,\n multi=false)\n\nSplits the vector, matrix or table X into a tuple of objects of the same type, whose vertical concatenation is X. The number of rows in each component of the return value is determined by the corresponding fractions of length(nrows(X)), where valid fractions are floats between 0 and 1 whose sum is less than one. The last fraction is not provided, as it is inferred from the preceding ones.\n\nFor \"synchronized\" partitioning of multiple objects, use the multi=true option described below.\n\njulia> partition(1:1000, 0.8)\n([1,...,800], [801,...,1000])\n\njulia> partition(1:1000, 0.2, 0.7)\n([1,...,200], [201,...,900], [901,...,1000])\n\njulia> partition(reshape(1:10, 5, 2), 0.2, 0.4)\n([1 6], [2 7; 3 8], [4 9; 5 10])\n\nX, y = make_blobs() # a table and vector\nXtrain, Xtest = partition(X, 0.8, stratify=y)\n\n(Xtrain, Xtest), (ytrain, ytest) = partition((X, y), 0.8, rng=123, multi=true)\n\nKeywords\n\nshuffle=nothing: if set to true, shuffles the rows before taking fractions.\nrng=Random.GLOBAL_RNG: specifies the random number generator to be used, can be an integer seed. If specified, and shuffle === nothing is interpreted as true.\nstratify=nothing: if a vector is specified, the partition will match the stratification of the given vector. In that case, shuffle cannot be false.\nmulti=false: if true then X is expected to be a tuple of objects sharing a common length, which are each partitioned separately using the same specified fractions and the same row shuffling. Returns a tuple of partitions (a tuple of tuples).\n\n\n\n\n\n","category":"function"},{"location":"preparing_data/#MLJBase.unpack","page":"Preparing Data","title":"MLJBase.unpack","text":"unpack(table, f1, f2, ... fk;\n wrap_singles=false,\n shuffle=false,\n rng::Union{AbstractRNG,Int,Nothing}=nothing,\n coerce_options...)\n\nHorizontally split any Tables.jl compatible table into smaller tables or vectors by making column selections determined by the predicates f1, f2, ..., fk. Selection from the column names is without replacement. A predicate is any object f such that f(name) is true or false for each column name::Symbol of table.\n\nReturns a tuple of tables/vectors with length one greater than the number of supplied predicates, with the last component including all previously unselected columns.\n\njulia> table = DataFrame(x=[1,2], y=['a', 'b'], z=[10.0, 20.0], w=[\"A\", \"B\"])\n2×4 DataFrame\n Row │ x y z w\n │ Int64 Char Float64 String\n─────┼──────────────────────────────\n 1 │ 1 a 10.0 A\n 2 │ 2 b 20.0 B\n\nZ, XY, W = unpack(table, ==(:z), !=(:w))\njulia> Z\n2-element Vector{Float64}:\n 10.0\n 20.0\n\njulia> XY\n2×2 DataFrame\n Row │ x y\n │ Int64 Char\n─────┼─────────────\n 1 │ 1 a\n 2 │ 2 b\n\njulia> W # the column(s) left over\n2-element Vector{String}:\n \"A\"\n \"B\"\n\nWhenever a returned table contains a single column, it is converted to a vector unless wrap_singles=true.\n\nIf coerce_options are specified then table is first replaced with coerce(table, coerce_options). See ScientificTypes.coerce for details.\n\nIf shuffle=true then the rows of table are first shuffled, using the global RNG, unless rng is specified; if rng is an integer, it specifies the seed of an automatically generated Mersenne twister. If rng is specified then shuffle=true is implicit.\n\n\n\n\n\n","category":"function"},{"location":"preparing_data/#Bridging-the-gap-between-data-type-and-model-requirements","page":"Preparing Data","title":"Bridging the gap between data type and model requirements","text":"","category":"section"},{"location":"preparing_data/","page":"Preparing Data","title":"Preparing Data","text":"As outlined in Getting Started, it is important that the scientific type of data matches the requirements of the model of interest. For example, while the majority of supervised learning models require input features to be Continuous, newcomers to MLJ are sometimes surprised at the disappointing results of model queries such as this one:","category":"page"},{"location":"preparing_data/","page":"Preparing Data","title":"Preparing Data","text":"using MLJ","category":"page"},{"location":"preparing_data/","page":"Preparing Data","title":"Preparing Data","text":"X = (height = [185, 153, 163, 114, 180],\n time = [2.3, 4.5, 4.2, 1.8, 7.1],\n mark = [\"D\", \"A\", \"C\", \"B\", \"A\"],\n admitted = [\"yes\", \"no\", missing, \"yes\"]);\ny = [12.4, 12.5, 12.0, 31.9, 43.0]\nmodels(matching(X, y))","category":"page"},{"location":"preparing_data/","page":"Preparing Data","title":"Preparing Data","text":"Or are unsure about the source of the following warning:","category":"page"},{"location":"preparing_data/","page":"Preparing Data","title":"Preparing Data","text":"Tree = @load DecisionTreeRegressor pkg=DecisionTree verbosity=0\ntree = Tree();\njulia> machine(tree, X, y)\n\njulia> machine(tree, X, y)\n┌ Warning: The scitype of `X`, in `machine(model, X, ...)` is incompatible with `model=DecisionTreeRegressor @378`: \n│ scitype(X) = Table{Union{AbstractVector{Continuous}, AbstractVector{Count}, AbstractVector{Textual}, AbstractVector{Union{Missing, Textual}}}}\n│ input_scitype(model) = Table{var\"#s46\"} where var\"#s46\"<:Union{AbstractVector{var\"#s9\"} where var\"#s9\"<:Continuous, AbstractVector{var\"#s9\"} where var\"#s9\"<:Count, AbstractVector{var\"#s9\"} where var\"#s9\"<:OrderedFactor}.\n└ @ MLJBase ~/Dropbox/Julia7/MLJ/MLJBase/src/machines.jl:103\nMachine{DecisionTreeRegressor,…} @198 trained 0 times; caches data\n args: \n 1: Source @628 ⏎ `Table{Union{AbstractVector{Continuous}, AbstractVector{Count}, AbstractVector{Textual}, AbstractVector{Union{Missing, Textual}}}}`\n 2: Source @544 ⏎ `AbstractVector{Continuous}`","category":"page"},{"location":"preparing_data/","page":"Preparing Data","title":"Preparing Data","text":"The meaning of the warning is:","category":"page"},{"location":"preparing_data/","page":"Preparing Data","title":"Preparing Data","text":"The input X is a table with column scitypes Continuous, Count, and Textual and Union{Missing, Textual}, which can also see by inspecting the schema:","category":"page"},{"location":"preparing_data/","page":"Preparing Data","title":"Preparing Data","text":"schema(X)","category":"page"},{"location":"preparing_data/","page":"Preparing Data","title":"Preparing Data","text":"The model requires a table whose column element scitypes subtype Continuous, an incompatibility.","category":"page"},{"location":"preparing_data/#Common-data-preprocessing-workflows","page":"Preparing Data","title":"Common data preprocessing workflows","text":"","category":"section"},{"location":"preparing_data/","page":"Preparing Data","title":"Preparing Data","text":"There are two tools for addressing data-model type mismatches like the above, with links to further documentation given below:","category":"page"},{"location":"preparing_data/","page":"Preparing Data","title":"Preparing Data","text":"Scientific type coercion: We coerce machine types to obtain the intended scientific interpretation. If height in the above example is intended to be Continuous, mark is supposed to be OrderedFactor, and admitted a (binary) Multiclass, then we can do","category":"page"},{"location":"preparing_data/","page":"Preparing Data","title":"Preparing Data","text":"X_coerced = coerce(X, :height=>Continuous, :mark=>OrderedFactor, :admitted=>Multiclass);\nschema(X_coerced)","category":"page"},{"location":"preparing_data/","page":"Preparing Data","title":"Preparing Data","text":"Data transformations: We carry out conventional data transformations, such as missing value imputation and feature encoding:","category":"page"},{"location":"preparing_data/","page":"Preparing Data","title":"Preparing Data","text":"imputer = FillImputer()\nmach = machine(imputer, X_coerced) |> fit!\nX_imputed = transform(mach, X_coerced);\nschema(X_imputed)","category":"page"},{"location":"preparing_data/","page":"Preparing Data","title":"Preparing Data","text":"encoder = ContinuousEncoder()\nmach = machine(encoder, X_imputed) |> fit!\nX_encoded = transform(mach, X_imputed)","category":"page"},{"location":"preparing_data/","page":"Preparing Data","title":"Preparing Data","text":"schema(X_encoded)","category":"page"},{"location":"preparing_data/","page":"Preparing Data","title":"Preparing Data","text":"Such transformations can also be combined in a pipeline; see Linear Pipelines.","category":"page"},{"location":"preparing_data/#Scientific-type-coercion","page":"Preparing Data","title":"Scientific type coercion","text":"","category":"section"},{"location":"preparing_data/","page":"Preparing Data","title":"Preparing Data","text":"Scientific type coercion is documented in detail at ScientificTypesBase.jl. See also the tutorial at the this MLJ Workshop (specifically, here) and this Data Science in Julia tutorial.","category":"page"},{"location":"preparing_data/","page":"Preparing Data","title":"Preparing Data","text":"Also relevant is the section, Working with Categorical Data.","category":"page"},{"location":"preparing_data/#Data-transformation","page":"Preparing Data","title":"Data transformation","text":"","category":"section"},{"location":"preparing_data/","page":"Preparing Data","title":"Preparing Data","text":"MLJ's Built-in transformers are documented at Transformers and Other Unsupervised Models. The most relevant in the present context are: ContinuousEncoder, OneHotEncoder, FeatureSelector and FillImputer. A Gaussian mixture models imputer is provided by BetaML, which can be loaded with","category":"page"},{"location":"preparing_data/","page":"Preparing Data","title":"Preparing Data","text":"MissingImputator = @load MissingImputator pkg=BetaML","category":"page"},{"location":"preparing_data/","page":"Preparing Data","title":"Preparing Data","text":"This MLJ Workshop, and the \"End-to-end examples\" in Data Science in Julia tutorials give further illustrations of data preprocessing in MLJ.","category":"page"},{"location":"models/AgglomerativeClustering_MLJScikitLearnInterface/#AgglomerativeClustering_MLJScikitLearnInterface","page":"AgglomerativeClustering","title":"AgglomerativeClustering","text":"","category":"section"},{"location":"models/AgglomerativeClustering_MLJScikitLearnInterface/","page":"AgglomerativeClustering","title":"AgglomerativeClustering","text":"AgglomerativeClustering","category":"page"},{"location":"models/AgglomerativeClustering_MLJScikitLearnInterface/","page":"AgglomerativeClustering","title":"AgglomerativeClustering","text":"A model type for constructing a agglomerative clustering, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/AgglomerativeClustering_MLJScikitLearnInterface/","page":"AgglomerativeClustering","title":"AgglomerativeClustering","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/AgglomerativeClustering_MLJScikitLearnInterface/","page":"AgglomerativeClustering","title":"AgglomerativeClustering","text":"AgglomerativeClustering = @load AgglomerativeClustering pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/AgglomerativeClustering_MLJScikitLearnInterface/","page":"AgglomerativeClustering","title":"AgglomerativeClustering","text":"Do model = AgglomerativeClustering() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in AgglomerativeClustering(n_clusters=...).","category":"page"},{"location":"models/AgglomerativeClustering_MLJScikitLearnInterface/","page":"AgglomerativeClustering","title":"AgglomerativeClustering","text":"Recursively merges the pair of clusters that minimally increases a given linkage distance. Note: there is no predict or transform. Instead, inspect the fitted_params.","category":"page"},{"location":"","page":"Home","title":"Home","text":"\n\n
      \n About  | \n Install  | \n Learn  | \n Cheatsheet  | \n Workflows  | \n For Developers  | \n 3rd Party Packages\n
      \n\n\nMLJ\n
      \n\nA Machine Learning Framework for Julia","category":"page"},{"location":"","page":"Home","title":"Home","text":"To support MLJ development, please cite these works or star the repo:","category":"page"},{"location":"","page":"Home","title":"Home","text":"(Image: DOI) (Image: arXiv)","category":"page"},{"location":"","page":"Home","title":"Home","text":"\n Star","category":"page"},{"location":"#[Model-Browser](@ref)","page":"Home","title":"Model Browser","text":"","category":"section"},{"location":"#Reference-Manual","page":"Home","title":"Reference Manual","text":"","category":"section"},{"location":"#Basics","page":"Home","title":"Basics","text":"","category":"section"},{"location":"","page":"Home","title":"Home","text":"Getting Started | Working with Categorical Data | Common MLJ Workflows | Machines | MLJ Cheatsheet ","category":"page"},{"location":"#Data","page":"Home","title":"Data","text":"","category":"section"},{"location":"","page":"Home","title":"Home","text":"Working with Categorical Data | Preparing Data | Generating Synthetic Data | OpenML Integration | Correcting Class Imbalance","category":"page"},{"location":"#Models","page":"Home","title":"Models","text":"","category":"section"},{"location":"","page":"Home","title":"Home","text":"Model Search | Loading Model Code | Transformers and Other Unsupervised Models | More on Probabilistic Predictors | Composing Models | Simple User Defined Models | List of Supported Models | Third Party Packages ","category":"page"},{"location":"#Meta-algorithms","page":"Home","title":"Meta-algorithms","text":"","category":"section"},{"location":"","page":"Home","title":"Home","text":"Evaluating Model Performance | Tuning Models | Controlling Iterative Models | Learning Curves| Correcting Class Imbalance","category":"page"},{"location":"#Composition","page":"Home","title":"Composition","text":"","category":"section"},{"location":"","page":"Home","title":"Home","text":"Composing Models | Linear Pipelines | Target Transformations | Homogeneous Ensembles | Model Stacking | Learning Networks| Correcting Class Imbalance","category":"page"},{"location":"#Integration","page":"Home","title":"Integration","text":"","category":"section"},{"location":"","page":"Home","title":"Home","text":"Logging Workflows | OpenML Integration","category":"page"},{"location":"#Customization-and-Extension","page":"Home","title":"Customization and Extension","text":"","category":"section"},{"location":"","page":"Home","title":"Home","text":"Simple User Defined Models | Quick-Start Guide to Adding Models | Adding Models for General Use | Composing Models | Internals | Modifying Behavior","category":"page"},{"location":"#Miscellaneous","page":"Home","title":"Miscellaneous","text":"","category":"section"},{"location":"","page":"Home","title":"Home","text":"Weights | Acceleration and Parallelism | Performance Measures ","category":"page"},{"location":"models/SVMNuClassifier_MLJScikitLearnInterface/#SVMNuClassifier_MLJScikitLearnInterface","page":"SVMNuClassifier","title":"SVMNuClassifier","text":"","category":"section"},{"location":"models/SVMNuClassifier_MLJScikitLearnInterface/","page":"SVMNuClassifier","title":"SVMNuClassifier","text":"SVMNuClassifier","category":"page"},{"location":"models/SVMNuClassifier_MLJScikitLearnInterface/","page":"SVMNuClassifier","title":"SVMNuClassifier","text":"A model type for constructing a nu-support vector classifier, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/SVMNuClassifier_MLJScikitLearnInterface/","page":"SVMNuClassifier","title":"SVMNuClassifier","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/SVMNuClassifier_MLJScikitLearnInterface/","page":"SVMNuClassifier","title":"SVMNuClassifier","text":"SVMNuClassifier = @load SVMNuClassifier pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/SVMNuClassifier_MLJScikitLearnInterface/","page":"SVMNuClassifier","title":"SVMNuClassifier","text":"Do model = SVMNuClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in SVMNuClassifier(nu=...).","category":"page"},{"location":"models/SVMNuClassifier_MLJScikitLearnInterface/#Hyper-parameters","page":"SVMNuClassifier","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/SVMNuClassifier_MLJScikitLearnInterface/","page":"SVMNuClassifier","title":"SVMNuClassifier","text":"nu = 0.5\nkernel = rbf\ndegree = 3\ngamma = scale\ncoef0 = 0.0\nshrinking = true\ntol = 0.001\ncache_size = 200\nmax_iter = -1\ndecision_function_shape = ovr\nrandom_state = nothing","category":"page"},{"location":"models/KernelPCA_MultivariateStats/#KernelPCA_MultivariateStats","page":"KernelPCA","title":"KernelPCA","text":"","category":"section"},{"location":"models/KernelPCA_MultivariateStats/","page":"KernelPCA","title":"KernelPCA","text":"KernelPCA","category":"page"},{"location":"models/KernelPCA_MultivariateStats/","page":"KernelPCA","title":"KernelPCA","text":"A model type for constructing a kernel prinicipal component analysis model, based on MultivariateStats.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/KernelPCA_MultivariateStats/","page":"KernelPCA","title":"KernelPCA","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/KernelPCA_MultivariateStats/","page":"KernelPCA","title":"KernelPCA","text":"KernelPCA = @load KernelPCA pkg=MultivariateStats","category":"page"},{"location":"models/KernelPCA_MultivariateStats/","page":"KernelPCA","title":"KernelPCA","text":"Do model = KernelPCA() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in KernelPCA(maxoutdim=...).","category":"page"},{"location":"models/KernelPCA_MultivariateStats/","page":"KernelPCA","title":"KernelPCA","text":"In kernel PCA the linear operations of ordinary principal component analysis are performed in a reproducing Hilbert space.","category":"page"},{"location":"models/KernelPCA_MultivariateStats/#Training-data","page":"KernelPCA","title":"Training data","text":"","category":"section"},{"location":"models/KernelPCA_MultivariateStats/","page":"KernelPCA","title":"KernelPCA","text":"In MLJ or MLJBase, bind an instance model to data with","category":"page"},{"location":"models/KernelPCA_MultivariateStats/","page":"KernelPCA","title":"KernelPCA","text":"mach = machine(model, X)","category":"page"},{"location":"models/KernelPCA_MultivariateStats/","page":"KernelPCA","title":"KernelPCA","text":"Here:","category":"page"},{"location":"models/KernelPCA_MultivariateStats/","page":"KernelPCA","title":"KernelPCA","text":"X is any table of input features (eg, a DataFrame) whose columns are of scitype Continuous; check column scitypes with schema(X).","category":"page"},{"location":"models/KernelPCA_MultivariateStats/","page":"KernelPCA","title":"KernelPCA","text":"Train the machine using fit!(mach, rows=...).","category":"page"},{"location":"models/KernelPCA_MultivariateStats/#Hyper-parameters","page":"KernelPCA","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/KernelPCA_MultivariateStats/","page":"KernelPCA","title":"KernelPCA","text":"maxoutdim=0: Controls the the dimension (number of columns) of the output, outdim. Specifically, outdim = min(n, indim, maxoutdim), where n is the number of observations and indim the input dimension.\nkernel::Function=(x,y)->x'y: The kernel function, takes in 2 vector arguments x and y, returns a scalar value. Defaults to the dot product of x and y.\nsolver::Symbol=:eig: solver to use for the eigenvalues, one of :eig(default, uses LinearAlgebra.eigen), :eigs(uses Arpack.eigs).\ninverse::Bool=true: perform calculations needed for inverse transform\nbeta::Real=1.0: strength of the ridge regression that learns the inverse transform when inverse is true.\ntol::Real=0.0: Convergence tolerance for eigenvalue solver.\nmaxiter::Int=300: maximum number of iterations for eigenvalue solver.","category":"page"},{"location":"models/KernelPCA_MultivariateStats/#Operations","page":"KernelPCA","title":"Operations","text":"","category":"section"},{"location":"models/KernelPCA_MultivariateStats/","page":"KernelPCA","title":"KernelPCA","text":"transform(mach, Xnew): Return a lower dimensional projection of the input Xnew, which should have the same scitype as X above.\ninverse_transform(mach, Xsmall): For a dimension-reduced table Xsmall, such as returned by transform, reconstruct a table, having same the number of columns as the original training data X, that transforms to Xsmall. Mathematically, inverse_transform is a right-inverse for the PCA projection map, whose image is orthogonal to the kernel of that map. In particular, if Xsmall = transform(mach, Xnew), then inverse_transform(Xsmall) is only an approximation to Xnew.","category":"page"},{"location":"models/KernelPCA_MultivariateStats/#Fitted-parameters","page":"KernelPCA","title":"Fitted parameters","text":"","category":"section"},{"location":"models/KernelPCA_MultivariateStats/","page":"KernelPCA","title":"KernelPCA","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/KernelPCA_MultivariateStats/","page":"KernelPCA","title":"KernelPCA","text":"projection: Returns the projection matrix, which has size (indim, outdim), where indim and outdim are the number of features of the input and ouput respectively.","category":"page"},{"location":"models/KernelPCA_MultivariateStats/#Report","page":"KernelPCA","title":"Report","text":"","category":"section"},{"location":"models/KernelPCA_MultivariateStats/","page":"KernelPCA","title":"KernelPCA","text":"The fields of report(mach) are:","category":"page"},{"location":"models/KernelPCA_MultivariateStats/","page":"KernelPCA","title":"KernelPCA","text":"indim: Dimension (number of columns) of the training data and new data to be transformed.\noutdim: Dimension of transformed data.\nprincipalvars: The variance of the principal components.","category":"page"},{"location":"models/KernelPCA_MultivariateStats/#Examples","page":"KernelPCA","title":"Examples","text":"","category":"section"},{"location":"models/KernelPCA_MultivariateStats/","page":"KernelPCA","title":"KernelPCA","text":"using MLJ\nusing LinearAlgebra\n\nKernelPCA = @load KernelPCA pkg=MultivariateStats\n\nX, y = @load_iris ## a table and a vector\n\nfunction rbf_kernel(length_scale)\n return (x,y) -> norm(x-y)^2 / ((2 * length_scale)^2)\nend\n\nmodel = KernelPCA(maxoutdim=2, kernel=rbf_kernel(1))\nmach = machine(model, X) |> fit!\n\nXproj = transform(mach, X)","category":"page"},{"location":"models/KernelPCA_MultivariateStats/","page":"KernelPCA","title":"KernelPCA","text":"See also PCA, ICA, FactorAnalysis, PPCA","category":"page"},{"location":"models/StableRulesClassifier_SIRUS/#StableRulesClassifier_SIRUS","page":"StableRulesClassifier","title":"StableRulesClassifier","text":"","category":"section"},{"location":"models/StableRulesClassifier_SIRUS/","page":"StableRulesClassifier","title":"StableRulesClassifier","text":"StableRulesClassifier","category":"page"},{"location":"models/StableRulesClassifier_SIRUS/","page":"StableRulesClassifier","title":"StableRulesClassifier","text":"A model type for constructing a stable rules classifier, based on SIRUS.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/StableRulesClassifier_SIRUS/","page":"StableRulesClassifier","title":"StableRulesClassifier","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/StableRulesClassifier_SIRUS/","page":"StableRulesClassifier","title":"StableRulesClassifier","text":"StableRulesClassifier = @load StableRulesClassifier pkg=SIRUS","category":"page"},{"location":"models/StableRulesClassifier_SIRUS/","page":"StableRulesClassifier","title":"StableRulesClassifier","text":"Do model = StableRulesClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in StableRulesClassifier(rng=...).","category":"page"},{"location":"models/StableRulesClassifier_SIRUS/","page":"StableRulesClassifier","title":"StableRulesClassifier","text":"StableRulesClassifier implements the explainable rule-based model based on a random forest.","category":"page"},{"location":"models/StableRulesClassifier_SIRUS/#Training-data","page":"StableRulesClassifier","title":"Training data","text":"","category":"section"},{"location":"models/StableRulesClassifier_SIRUS/","page":"StableRulesClassifier","title":"StableRulesClassifier","text":"In MLJ or MLJBase, bind an instance model to data with","category":"page"},{"location":"models/StableRulesClassifier_SIRUS/","page":"StableRulesClassifier","title":"StableRulesClassifier","text":"mach = machine(model, X, y)","category":"page"},{"location":"models/StableRulesClassifier_SIRUS/","page":"StableRulesClassifier","title":"StableRulesClassifier","text":"where","category":"page"},{"location":"models/StableRulesClassifier_SIRUS/","page":"StableRulesClassifier","title":"StableRulesClassifier","text":"X: any table of input features (eg, a DataFrame) whose columns each have one of the following element scitypes: Continuous, Count, or <:OrderedFactor; check column scitypes with schema(X)\ny: the target, which can be any AbstractVector whose element scitype is <:OrderedFactor or <:Multiclass; check the scitype with scitype(y)","category":"page"},{"location":"models/StableRulesClassifier_SIRUS/","page":"StableRulesClassifier","title":"StableRulesClassifier","text":"Train the machine with fit!(mach, rows=...).","category":"page"},{"location":"models/StableRulesClassifier_SIRUS/#Hyperparameters","page":"StableRulesClassifier","title":"Hyperparameters","text":"","category":"section"},{"location":"models/StableRulesClassifier_SIRUS/","page":"StableRulesClassifier","title":"StableRulesClassifier","text":"rng::AbstractRNG=default_rng(): Random number generator. Using a StableRNG from StableRNGs.jl is advised.\npartial_sampling::Float64=0.7: Ratio of samples to use in each subset of the data. The default should be fine for most cases.\nn_trees::Int=1000: The number of trees to use. It is advisable to use at least thousand trees to for a better rule selection, and in turn better predictive performance.\nmax_depth::Int=2: The depth of the tree. A lower depth decreases model complexity and can therefore improve accuracy when the sample size is small (reduce overfitting).\nq::Int=10: Number of cutpoints to use per feature. The default value should be fine for most situations.\nmin_data_in_leaf::Int=5: Minimum number of data points per leaf.\nmax_rules::Int=10: This is the most important hyperparameter after lambda. The more rules, the more accurate the model should be. If this is not the case, tune lambda first. However, more rules will also decrease model interpretability. So, it is important to find a good balance here. In most cases, 10 to 40 rules should provide reasonable accuracy while remaining interpretable.\nlambda::Float64=1.0: The weights of the final rules are determined via a regularized regression over each rule as a binary feature. This hyperparameter specifies the strength of the ridge (L2) regularizer. SIRUS is very sensitive to the choice of this hyperparameter. Ensure that you try the full range from 10^-4 to 10^4 (e.g., 0.001, 0.01, ..., 100). When trying the range, one good check is to verify that an increase in max_rules increases performance. If this is not the case, then try a different value for lambda.","category":"page"},{"location":"models/StableRulesClassifier_SIRUS/#Fitted-parameters","page":"StableRulesClassifier","title":"Fitted parameters","text":"","category":"section"},{"location":"models/StableRulesClassifier_SIRUS/","page":"StableRulesClassifier","title":"StableRulesClassifier","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/StableRulesClassifier_SIRUS/","page":"StableRulesClassifier","title":"StableRulesClassifier","text":"fitresult: A StableRules object.","category":"page"},{"location":"models/StableRulesClassifier_SIRUS/#Operations","page":"StableRulesClassifier","title":"Operations","text":"","category":"section"},{"location":"models/StableRulesClassifier_SIRUS/","page":"StableRulesClassifier","title":"StableRulesClassifier","text":"predict(mach, Xnew): Return a vector of predictions for each row of Xnew.","category":"page"},{"location":"quick_start_guide_to_adding_models/#Quick-Start-Guide-to-Adding-Models","page":"Quick-Start Guide to Adding Models","title":"Quick-Start Guide to Adding Models","text":"","category":"section"},{"location":"quick_start_guide_to_adding_models/","page":"Quick-Start Guide to Adding Models","title":"Quick-Start Guide to Adding Models","text":"This guide has moved to this section of the MLJModelInterface.jl documentation.","category":"page"},{"location":"quick_start_guide_to_adding_models/","page":"Quick-Start Guide to Adding Models","title":"Quick-Start Guide to Adding Models","text":"For quick-and-dirty user-defined models, not intended for registering with the MLJ Model Registry, see Simple User Defined Models. ","category":"page"},{"location":"target_transformations/#Target-Transformations","page":"Target Transformations","title":"Target Transformations","text":"","category":"section"},{"location":"target_transformations/","page":"Target Transformations","title":"Target Transformations","text":"Some supervised models work best if the target variable has been standardized, i.e., rescaled to have zero mean and unit variance. Such a target transformation is learned from the values of the training target variable. In particular, one generally learns a different transformation when training on a proper subset of the training data. Good data hygiene prescribes that a new transformation should be computed each time the supervised model is trained on new data - for example in cross-validation.","category":"page"},{"location":"target_transformations/","page":"Target Transformations","title":"Target Transformations","text":"Additionally, one generally wants to inverse transform the predictions of the supervised model for the final target predictions to be on the original scale.","category":"page"},{"location":"target_transformations/","page":"Target Transformations","title":"Target Transformations","text":"All these concerns are addressed by wrapping the supervised model using TransformedTargetModel:","category":"page"},{"location":"target_transformations/","page":"Target Transformations","title":"Target Transformations","text":"using MLJ\nMLJ.color_off()","category":"page"},{"location":"target_transformations/","page":"Target Transformations","title":"Target Transformations","text":"Ridge = @load RidgeRegressor pkg=MLJLinearModels verbosity=0\nridge = Ridge(fit_intercept=false)\nridge2 = TransformedTargetModel(ridge, transformer=Standardizer())","category":"page"},{"location":"target_transformations/","page":"Target Transformations","title":"Target Transformations","text":"Note that all the original hyperparameters, as well as those of the Standardizer, are accessible as nested hyper-parameters of the wrapped model, which can be trained or evaluated like any other:","category":"page"},{"location":"target_transformations/","page":"Target Transformations","title":"Target Transformations","text":"X, y = make_regression(rng=1234, intercept=false)\ny = y*10^5\nmach = machine(ridge2, X, y)\nfit!(mach, rows=1:60, verbosity=0)\npredict(mach, rows=61:62)","category":"page"},{"location":"target_transformations/","page":"Target Transformations","title":"Target Transformations","text":"Training and predicting using ridge2 as above means:","category":"page"},{"location":"target_transformations/","page":"Target Transformations","title":"Target Transformations","text":"Standardizing the target y using the first 60 rows to get a new target z\nTraining the original ridge model using the first 60 rows of X and z\nCalling predict on the machine trained in Step 2 on rows 61:62 of X\nApplying the inverse scaling learned in Step 1 to those predictions (to get the final output shown above)","category":"page"},{"location":"target_transformations/","page":"Target Transformations","title":"Target Transformations","text":"Since both ridge and ridge2 return predictions on the original scale, we can meaningfully compare the corresponding mean absolute errors, which are indeed different in this case.","category":"page"},{"location":"target_transformations/","page":"Target Transformations","title":"Target Transformations","text":"evaluate(ridge, X, y, measure=l1)","category":"page"},{"location":"target_transformations/","page":"Target Transformations","title":"Target Transformations","text":"evaluate(ridge2, X, y, measure=l1)","category":"page"},{"location":"target_transformations/","page":"Target Transformations","title":"Target Transformations","text":"Ordinary functions can also be used in target transformations but an inverse must be explicitly specified:","category":"page"},{"location":"target_transformations/","page":"Target Transformations","title":"Target Transformations","text":"ridge3 = TransformedTargetModel(ridge, transformer=y->log.(y), inverse=z->exp.(z))\nX, y = @load_boston\nevaluate(ridge3, X, y, measure=l1)","category":"page"},{"location":"target_transformations/","page":"Target Transformations","title":"Target Transformations","text":"Without the log transform (ie, using ridge) we get the poorer mean absolute error, l1, of 3.9.","category":"page"},{"location":"target_transformations/","page":"Target Transformations","title":"Target Transformations","text":"TransformedTargetModel","category":"page"},{"location":"target_transformations/#MLJBase.TransformedTargetModel","page":"Target Transformations","title":"MLJBase.TransformedTargetModel","text":"TransformedTargetModel(model; transformer=nothing, inverse=nothing, cache=true)\n\nWrap the supervised or semi-supervised model in a transformation of the target variable.\n\nHere transformer one of the following:\n\nThe Unsupervised model that is to transform the training target. By default (inverse=nothing) the parameters learned by this transformer are also used to inverse-transform the predictions of model, which means transformer must implement the inverse_transform method. If this is not the case, specify inverse=identity to suppress inversion.\nA callable object for transforming the target, such as y -> log.(y). In this case a callable inverse, such as z -> exp.(z), should be specified.\n\nSpecify cache=false to prioritize memory over speed, or to guarantee data anonymity.\n\nSpecify inverse=identity if model is a probabilistic predictor, as inverse-transforming sample spaces is not supported. Alternatively, replace model with a deterministic model, such as Pipeline(model, y -> mode.(y)).\n\nExamples\n\nA model that normalizes the target before applying ridge regression, with predictions returned on the original scale:\n\n@load RidgeRegressor pkg=MLJLinearModels\nmodel = RidgeRegressor()\ntmodel = TransformedTargetModel(model, transformer=Standardizer())\n\nA model that applies a static log transformation to the data, again returning predictions to the original scale:\n\ntmodel2 = TransformedTargetModel(model, transformer=y->log.(y), inverse=z->exp.(y))\n\n\n\n\n\n","category":"function"},{"location":"models/SVMClassifier_MLJScikitLearnInterface/#SVMClassifier_MLJScikitLearnInterface","page":"SVMClassifier","title":"SVMClassifier","text":"","category":"section"},{"location":"models/SVMClassifier_MLJScikitLearnInterface/","page":"SVMClassifier","title":"SVMClassifier","text":"SVMClassifier","category":"page"},{"location":"models/SVMClassifier_MLJScikitLearnInterface/","page":"SVMClassifier","title":"SVMClassifier","text":"A model type for constructing a C-support vector classifier, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/SVMClassifier_MLJScikitLearnInterface/","page":"SVMClassifier","title":"SVMClassifier","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/SVMClassifier_MLJScikitLearnInterface/","page":"SVMClassifier","title":"SVMClassifier","text":"SVMClassifier = @load SVMClassifier pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/SVMClassifier_MLJScikitLearnInterface/","page":"SVMClassifier","title":"SVMClassifier","text":"Do model = SVMClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in SVMClassifier(C=...).","category":"page"},{"location":"models/SVMClassifier_MLJScikitLearnInterface/#Hyper-parameters","page":"SVMClassifier","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/SVMClassifier_MLJScikitLearnInterface/","page":"SVMClassifier","title":"SVMClassifier","text":"C = 1.0\nkernel = rbf\ndegree = 3\ngamma = scale\ncoef0 = 0.0\nshrinking = true\ntol = 0.001\ncache_size = 200\nmax_iter = -1\ndecision_function_shape = ovr\nrandom_state = nothing","category":"page"},{"location":"models/PCADetector_OutlierDetectionPython/#PCADetector_OutlierDetectionPython","page":"PCADetector","title":"PCADetector","text":"","category":"section"},{"location":"models/PCADetector_OutlierDetectionPython/","page":"PCADetector","title":"PCADetector","text":"PCADetector(n_components = nothing,\n n_selected_components = nothing,\n copy = true,\n whiten = false,\n svd_solver = \"auto\",\n tol = 0.0\n iterated_power = \"auto\",\n standardization = true,\n weighted = true,\n random_state = nothing)","category":"page"},{"location":"models/PCADetector_OutlierDetectionPython/","page":"PCADetector","title":"PCADetector","text":"https://pyod.readthedocs.io/en/latest/pyod.models.html#module-pyod.models.pca","category":"page"},{"location":"models/RandomForestClassifier_DecisionTree/#RandomForestClassifier_DecisionTree","page":"RandomForestClassifier","title":"RandomForestClassifier","text":"","category":"section"},{"location":"models/RandomForestClassifier_DecisionTree/","page":"RandomForestClassifier","title":"RandomForestClassifier","text":"RandomForestClassifier","category":"page"},{"location":"models/RandomForestClassifier_DecisionTree/","page":"RandomForestClassifier","title":"RandomForestClassifier","text":"A model type for constructing a CART random forest classifier, based on DecisionTree.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/RandomForestClassifier_DecisionTree/","page":"RandomForestClassifier","title":"RandomForestClassifier","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/RandomForestClassifier_DecisionTree/","page":"RandomForestClassifier","title":"RandomForestClassifier","text":"RandomForestClassifier = @load RandomForestClassifier pkg=DecisionTree","category":"page"},{"location":"models/RandomForestClassifier_DecisionTree/","page":"RandomForestClassifier","title":"RandomForestClassifier","text":"Do model = RandomForestClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in RandomForestClassifier(max_depth=...).","category":"page"},{"location":"models/RandomForestClassifier_DecisionTree/","page":"RandomForestClassifier","title":"RandomForestClassifier","text":"RandomForestClassifier implements the standard Random Forest algorithm, originally published in Breiman, L. (2001): \"Random Forests.\", Machine Learning, vol. 45, pp. 5–32.","category":"page"},{"location":"models/RandomForestClassifier_DecisionTree/#Training-data","page":"RandomForestClassifier","title":"Training data","text":"","category":"section"},{"location":"models/RandomForestClassifier_DecisionTree/","page":"RandomForestClassifier","title":"RandomForestClassifier","text":"In MLJ or MLJBase, bind an instance model to data with","category":"page"},{"location":"models/RandomForestClassifier_DecisionTree/","page":"RandomForestClassifier","title":"RandomForestClassifier","text":"mach = machine(model, X, y)","category":"page"},{"location":"models/RandomForestClassifier_DecisionTree/","page":"RandomForestClassifier","title":"RandomForestClassifier","text":"where","category":"page"},{"location":"models/RandomForestClassifier_DecisionTree/","page":"RandomForestClassifier","title":"RandomForestClassifier","text":"X: any table of input features (eg, a DataFrame) whose columns each have one of the following element scitypes: Continuous, Count, or <:OrderedFactor; check column scitypes with schema(X)\ny: the target, which can be any AbstractVector whose element scitype is <:OrderedFactor or <:Multiclass; check the scitype with scitype(y)","category":"page"},{"location":"models/RandomForestClassifier_DecisionTree/","page":"RandomForestClassifier","title":"RandomForestClassifier","text":"Train the machine with fit!(mach, rows=...).","category":"page"},{"location":"models/RandomForestClassifier_DecisionTree/#Hyperparameters","page":"RandomForestClassifier","title":"Hyperparameters","text":"","category":"section"},{"location":"models/RandomForestClassifier_DecisionTree/","page":"RandomForestClassifier","title":"RandomForestClassifier","text":"max_depth=-1: max depth of the decision tree (-1=any)\nmin_samples_leaf=1: min number of samples each leaf needs to have\nmin_samples_split=2: min number of samples needed for a split\nmin_purity_increase=0: min purity needed for a split\nn_subfeatures=-1: number of features to select at random (0 for all, -1 for square root of number of features)\nn_trees=10: number of trees to train\nsampling_fraction=0.7 fraction of samples to train each tree on\nfeature_importance: method to use for computing feature importances. One of (:impurity, :split)\nrng=Random.GLOBAL_RNG: random number generator or seed","category":"page"},{"location":"models/RandomForestClassifier_DecisionTree/#Operations","page":"RandomForestClassifier","title":"Operations","text":"","category":"section"},{"location":"models/RandomForestClassifier_DecisionTree/","page":"RandomForestClassifier","title":"RandomForestClassifier","text":"predict(mach, Xnew): return predictions of the target given features Xnew having the same scitype as X above. Predictions are probabilistic, but uncalibrated.\npredict_mode(mach, Xnew): instead return the mode of each prediction above.","category":"page"},{"location":"models/RandomForestClassifier_DecisionTree/#Fitted-parameters","page":"RandomForestClassifier","title":"Fitted parameters","text":"","category":"section"},{"location":"models/RandomForestClassifier_DecisionTree/","page":"RandomForestClassifier","title":"RandomForestClassifier","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/RandomForestClassifier_DecisionTree/","page":"RandomForestClassifier","title":"RandomForestClassifier","text":"forest: the Ensemble object returned by the core DecisionTree.jl algorithm","category":"page"},{"location":"models/RandomForestClassifier_DecisionTree/#Report","page":"RandomForestClassifier","title":"Report","text":"","category":"section"},{"location":"models/RandomForestClassifier_DecisionTree/","page":"RandomForestClassifier","title":"RandomForestClassifier","text":"features: the names of the features encountered in training","category":"page"},{"location":"models/RandomForestClassifier_DecisionTree/#Accessor-functions","page":"RandomForestClassifier","title":"Accessor functions","text":"","category":"section"},{"location":"models/RandomForestClassifier_DecisionTree/","page":"RandomForestClassifier","title":"RandomForestClassifier","text":"feature_importances(mach) returns a vector of (feature::Symbol => importance) pairs; the type of importance is determined by the hyperparameter feature_importance (see above)","category":"page"},{"location":"models/RandomForestClassifier_DecisionTree/#Examples","page":"RandomForestClassifier","title":"Examples","text":"","category":"section"},{"location":"models/RandomForestClassifier_DecisionTree/","page":"RandomForestClassifier","title":"RandomForestClassifier","text":"using MLJ\nForest = @load RandomForestClassifier pkg=DecisionTree\nforest = Forest(min_samples_split=6, n_subfeatures=3)\n\nX, y = @load_iris\nmach = machine(forest, X, y) |> fit!\n\nXnew = (sepal_length = [6.4, 7.2, 7.4],\n sepal_width = [2.8, 3.0, 2.8],\n petal_length = [5.6, 5.8, 6.1],\n petal_width = [2.1, 1.6, 1.9],)\nyhat = predict(mach, Xnew) ## probabilistic predictions\npredict_mode(mach, Xnew) ## point predictions\npdf.(yhat, \"virginica\") ## probabilities for the \"verginica\" class\n\nfitted_params(mach).forest ## raw `Ensemble` object from DecisionTrees.jl\n\nfeature_importances(mach) ## `:impurity` feature importances\nforest.feature_importance = :split\nfeature_importance(mach) ## `:split` feature importances\n","category":"page"},{"location":"models/RandomForestClassifier_DecisionTree/","page":"RandomForestClassifier","title":"RandomForestClassifier","text":"See also DecisionTree.jl and the unwrapped model type MLJDecisionTreeInterface.DecisionTree.RandomForestClassifier.","category":"page"},{"location":"models/LADRegressor_MLJLinearModels/#LADRegressor_MLJLinearModels","page":"LADRegressor","title":"LADRegressor","text":"","category":"section"},{"location":"models/LADRegressor_MLJLinearModels/","page":"LADRegressor","title":"LADRegressor","text":"LADRegressor","category":"page"},{"location":"models/LADRegressor_MLJLinearModels/","page":"LADRegressor","title":"LADRegressor","text":"A model type for constructing a lad regressor, based on MLJLinearModels.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/LADRegressor_MLJLinearModels/","page":"LADRegressor","title":"LADRegressor","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/LADRegressor_MLJLinearModels/","page":"LADRegressor","title":"LADRegressor","text":"LADRegressor = @load LADRegressor pkg=MLJLinearModels","category":"page"},{"location":"models/LADRegressor_MLJLinearModels/","page":"LADRegressor","title":"LADRegressor","text":"Do model = LADRegressor() to construct an instance with default hyper-parameters.","category":"page"},{"location":"models/LADRegressor_MLJLinearModels/","page":"LADRegressor","title":"LADRegressor","text":"Least absolute deviation regression is a linear model with objective function","category":"page"},{"location":"models/LADRegressor_MLJLinearModels/","page":"LADRegressor","title":"LADRegressor","text":"$","category":"page"},{"location":"models/LADRegressor_MLJLinearModels/","page":"LADRegressor","title":"LADRegressor","text":"∑ρ(Xθ - y) + n⋅λ|θ|₂² + n⋅γ|θ|₁ $","category":"page"},{"location":"models/LADRegressor_MLJLinearModels/","page":"LADRegressor","title":"LADRegressor","text":"where ρ is the absolute loss and n is the number of observations.","category":"page"},{"location":"models/LADRegressor_MLJLinearModels/","page":"LADRegressor","title":"LADRegressor","text":"If scale_penalty_with_samples = false the objective function is instead","category":"page"},{"location":"models/LADRegressor_MLJLinearModels/","page":"LADRegressor","title":"LADRegressor","text":"$","category":"page"},{"location":"models/LADRegressor_MLJLinearModels/","page":"LADRegressor","title":"LADRegressor","text":"∑ρ(Xθ - y) + λ|θ|₂² + γ|θ|₁ $","category":"page"},{"location":"models/LADRegressor_MLJLinearModels/","page":"LADRegressor","title":"LADRegressor","text":".","category":"page"},{"location":"models/LADRegressor_MLJLinearModels/","page":"LADRegressor","title":"LADRegressor","text":"Different solver options exist, as indicated under \"Hyperparameters\" below. ","category":"page"},{"location":"models/LADRegressor_MLJLinearModels/#Training-data","page":"LADRegressor","title":"Training data","text":"","category":"section"},{"location":"models/LADRegressor_MLJLinearModels/","page":"LADRegressor","title":"LADRegressor","text":"In MLJ or MLJBase, bind an instance model to data with","category":"page"},{"location":"models/LADRegressor_MLJLinearModels/","page":"LADRegressor","title":"LADRegressor","text":"mach = machine(model, X, y)","category":"page"},{"location":"models/LADRegressor_MLJLinearModels/","page":"LADRegressor","title":"LADRegressor","text":"where:","category":"page"},{"location":"models/LADRegressor_MLJLinearModels/","page":"LADRegressor","title":"LADRegressor","text":"X is any table of input features (eg, a DataFrame) whose columns have Continuous scitype; check column scitypes with schema(X)\ny is the target, which can be any AbstractVector whose element scitype is Continuous; check the scitype with scitype(y)","category":"page"},{"location":"models/LADRegressor_MLJLinearModels/","page":"LADRegressor","title":"LADRegressor","text":"Train the machine using fit!(mach, rows=...).","category":"page"},{"location":"models/LADRegressor_MLJLinearModels/#Hyperparameters","page":"LADRegressor","title":"Hyperparameters","text":"","category":"section"},{"location":"models/LADRegressor_MLJLinearModels/","page":"LADRegressor","title":"LADRegressor","text":"See also RobustRegressor.","category":"page"},{"location":"models/LADRegressor_MLJLinearModels/#Parameters","page":"LADRegressor","title":"Parameters","text":"","category":"section"},{"location":"models/LADRegressor_MLJLinearModels/","page":"LADRegressor","title":"LADRegressor","text":"lambda::Real: strength of the regularizer if penalty is :l2 or :l1. Strength of the L2 regularizer if penalty is :en. Default: 1.0\ngamma::Real: strength of the L1 regularizer if penalty is :en. Default: 0.0\npenalty::Union{String, Symbol}: the penalty to use, either :l2, :l1, :en (elastic net) or :none. Default: :l2\nfit_intercept::Bool: whether to fit the intercept or not. Default: true\npenalize_intercept::Bool: whether to penalize the intercept. Default: false\nscale_penalty_with_samples::Bool: whether to scale the penalty with the number of observations. Default: true\nsolver::Union{Nothing, MLJLinearModels.Solver}: some instance of MLJLinearModels.S where S is one of: LBFGS, IWLSCG, if penalty = :l2, and ProxGrad otherwise.\nIf solver = nothing (default) then LBFGS() is used, if penalty = :l2, and otherwise ProxGrad(accel=true) (FISTA) is used.\nSolver aliases: FISTA(; kwargs...) = ProxGrad(accel=true, kwargs...), ISTA(; kwargs...) = ProxGrad(accel=false, kwargs...) Default: nothing","category":"page"},{"location":"models/LADRegressor_MLJLinearModels/#Example","page":"LADRegressor","title":"Example","text":"","category":"section"},{"location":"models/LADRegressor_MLJLinearModels/","page":"LADRegressor","title":"LADRegressor","text":"using MLJ\nX, y = make_regression()\nmach = fit!(machine(LADRegressor(), X, y))\npredict(mach, X)\nfitted_params(mach)","category":"page"},{"location":"models/RidgeRegressor_MLJLinearModels/#RidgeRegressor_MLJLinearModels","page":"RidgeRegressor","title":"RidgeRegressor","text":"","category":"section"},{"location":"models/RidgeRegressor_MLJLinearModels/","page":"RidgeRegressor","title":"RidgeRegressor","text":"RidgeRegressor","category":"page"},{"location":"models/RidgeRegressor_MLJLinearModels/","page":"RidgeRegressor","title":"RidgeRegressor","text":"A model type for constructing a ridge regressor, based on MLJLinearModels.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/RidgeRegressor_MLJLinearModels/","page":"RidgeRegressor","title":"RidgeRegressor","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/RidgeRegressor_MLJLinearModels/","page":"RidgeRegressor","title":"RidgeRegressor","text":"RidgeRegressor = @load RidgeRegressor pkg=MLJLinearModels","category":"page"},{"location":"models/RidgeRegressor_MLJLinearModels/","page":"RidgeRegressor","title":"RidgeRegressor","text":"Do model = RidgeRegressor() to construct an instance with default hyper-parameters.","category":"page"},{"location":"models/RidgeRegressor_MLJLinearModels/","page":"RidgeRegressor","title":"RidgeRegressor","text":"Ridge regression is a linear model with objective function","category":"page"},{"location":"models/RidgeRegressor_MLJLinearModels/","page":"RidgeRegressor","title":"RidgeRegressor","text":"$","category":"page"},{"location":"models/RidgeRegressor_MLJLinearModels/","page":"RidgeRegressor","title":"RidgeRegressor","text":"|Xθ - y|₂²/2 + n⋅λ|θ|₂²/2 $","category":"page"},{"location":"models/RidgeRegressor_MLJLinearModels/","page":"RidgeRegressor","title":"RidgeRegressor","text":"where n is the number of observations.","category":"page"},{"location":"models/RidgeRegressor_MLJLinearModels/","page":"RidgeRegressor","title":"RidgeRegressor","text":"If scale_penalty_with_samples = false then the objective function is instead","category":"page"},{"location":"models/RidgeRegressor_MLJLinearModels/","page":"RidgeRegressor","title":"RidgeRegressor","text":"$","category":"page"},{"location":"models/RidgeRegressor_MLJLinearModels/","page":"RidgeRegressor","title":"RidgeRegressor","text":"|Xθ - y|₂²/2 + λ|θ|₂²/2 $","category":"page"},{"location":"models/RidgeRegressor_MLJLinearModels/","page":"RidgeRegressor","title":"RidgeRegressor","text":".","category":"page"},{"location":"models/RidgeRegressor_MLJLinearModels/","page":"RidgeRegressor","title":"RidgeRegressor","text":"Different solver options exist, as indicated under \"Hyperparameters\" below. ","category":"page"},{"location":"models/RidgeRegressor_MLJLinearModels/#Training-data","page":"RidgeRegressor","title":"Training data","text":"","category":"section"},{"location":"models/RidgeRegressor_MLJLinearModels/","page":"RidgeRegressor","title":"RidgeRegressor","text":"In MLJ or MLJBase, bind an instance model to data with","category":"page"},{"location":"models/RidgeRegressor_MLJLinearModels/","page":"RidgeRegressor","title":"RidgeRegressor","text":"mach = machine(model, X, y)","category":"page"},{"location":"models/RidgeRegressor_MLJLinearModels/","page":"RidgeRegressor","title":"RidgeRegressor","text":"where:","category":"page"},{"location":"models/RidgeRegressor_MLJLinearModels/","page":"RidgeRegressor","title":"RidgeRegressor","text":"X is any table of input features (eg, a DataFrame) whose columns have Continuous scitype; check column scitypes with schema(X)\ny is the target, which can be any AbstractVector whose element scitype is Continuous; check the scitype with scitype(y)","category":"page"},{"location":"models/RidgeRegressor_MLJLinearModels/","page":"RidgeRegressor","title":"RidgeRegressor","text":"Train the machine using fit!(mach, rows=...).","category":"page"},{"location":"models/RidgeRegressor_MLJLinearModels/#Hyperparameters","page":"RidgeRegressor","title":"Hyperparameters","text":"","category":"section"},{"location":"models/RidgeRegressor_MLJLinearModels/","page":"RidgeRegressor","title":"RidgeRegressor","text":"lambda::Real: strength of the L2 regularization. Default: 1.0\nfit_intercept::Bool: whether to fit the intercept or not. Default: true\npenalize_intercept::Bool: whether to penalize the intercept. Default: false\nscale_penalty_with_samples::Bool: whether to scale the penalty with the number of observations. Default: true\nsolver::Union{Nothing, MLJLinearModels.Solver}: any instance of MLJLinearModels.Analytical. Use Analytical() for Cholesky and CG()=Analytical(iterative=true) for conjugate-gradient. If solver = nothing (default) then Analytical() is used. Default: nothing","category":"page"},{"location":"models/RidgeRegressor_MLJLinearModels/#Example","page":"RidgeRegressor","title":"Example","text":"","category":"section"},{"location":"models/RidgeRegressor_MLJLinearModels/","page":"RidgeRegressor","title":"RidgeRegressor","text":"using MLJ\nX, y = make_regression()\nmach = fit!(machine(RidgeRegressor(), X, y))\npredict(mach, X)\nfitted_params(mach)","category":"page"},{"location":"models/RidgeRegressor_MLJLinearModels/","page":"RidgeRegressor","title":"RidgeRegressor","text":"See also ElasticNetRegressor.","category":"page"},{"location":"models/KNNDetector_OutlierDetectionPython/#KNNDetector_OutlierDetectionPython","page":"KNNDetector","title":"KNNDetector","text":"","category":"section"},{"location":"models/KNNDetector_OutlierDetectionPython/","page":"KNNDetector","title":"KNNDetector","text":"KNNDetector(n_neighbors = 5,\n method = \"largest\",\n radius = 1.0,\n algorithm = \"auto\",\n leaf_size = 30,\n metric = \"minkowski\",\n p = 2,\n metric_params = nothing,\n n_jobs = 1)","category":"page"},{"location":"models/KNNDetector_OutlierDetectionPython/","page":"KNNDetector","title":"KNNDetector","text":"https://pyod.readthedocs.io/en/latest/pyod.models.html#module-pyod.models.knn","category":"page"},{"location":"models/LinearRegressor_MultivariateStats/#LinearRegressor_MultivariateStats","page":"LinearRegressor","title":"LinearRegressor","text":"","category":"section"},{"location":"models/LinearRegressor_MultivariateStats/","page":"LinearRegressor","title":"LinearRegressor","text":"LinearRegressor","category":"page"},{"location":"models/LinearRegressor_MultivariateStats/","page":"LinearRegressor","title":"LinearRegressor","text":"A model type for constructing a linear regressor, based on MultivariateStats.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/LinearRegressor_MultivariateStats/","page":"LinearRegressor","title":"LinearRegressor","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/LinearRegressor_MultivariateStats/","page":"LinearRegressor","title":"LinearRegressor","text":"LinearRegressor = @load LinearRegressor pkg=MultivariateStats","category":"page"},{"location":"models/LinearRegressor_MultivariateStats/","page":"LinearRegressor","title":"LinearRegressor","text":"Do model = LinearRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in LinearRegressor(bias=...).","category":"page"},{"location":"models/LinearRegressor_MultivariateStats/","page":"LinearRegressor","title":"LinearRegressor","text":"LinearRegressor assumes the target is a Continuous variable and trains a linear prediction function using the least squares algorithm. Options exist to specify a bias term.","category":"page"},{"location":"models/LinearRegressor_MultivariateStats/#Training-data","page":"LinearRegressor","title":"Training data","text":"","category":"section"},{"location":"models/LinearRegressor_MultivariateStats/","page":"LinearRegressor","title":"LinearRegressor","text":"In MLJ or MLJBase, bind an instance model to data with","category":"page"},{"location":"models/LinearRegressor_MultivariateStats/","page":"LinearRegressor","title":"LinearRegressor","text":"mach = machine(model, X, y)","category":"page"},{"location":"models/LinearRegressor_MultivariateStats/","page":"LinearRegressor","title":"LinearRegressor","text":"Here:","category":"page"},{"location":"models/LinearRegressor_MultivariateStats/","page":"LinearRegressor","title":"LinearRegressor","text":"X is any table of input features (eg, a DataFrame) whose columns are of scitype Continuous; check the column scitypes with schema(X).\ny is the target, which can be any AbstractVector whose element scitype is Continuous; check the scitype with scitype(y).","category":"page"},{"location":"models/LinearRegressor_MultivariateStats/","page":"LinearRegressor","title":"LinearRegressor","text":"Train the machine using fit!(mach, rows=...).","category":"page"},{"location":"models/LinearRegressor_MultivariateStats/#Hyper-parameters","page":"LinearRegressor","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/LinearRegressor_MultivariateStats/","page":"LinearRegressor","title":"LinearRegressor","text":"bias=true: Include the bias term if true, otherwise fit without bias term.","category":"page"},{"location":"models/LinearRegressor_MultivariateStats/#Operations","page":"LinearRegressor","title":"Operations","text":"","category":"section"},{"location":"models/LinearRegressor_MultivariateStats/","page":"LinearRegressor","title":"LinearRegressor","text":"predict(mach, Xnew): Return predictions of the target given new features Xnew, which should have the same scitype as X above.","category":"page"},{"location":"models/LinearRegressor_MultivariateStats/#Fitted-parameters","page":"LinearRegressor","title":"Fitted parameters","text":"","category":"section"},{"location":"models/LinearRegressor_MultivariateStats/","page":"LinearRegressor","title":"LinearRegressor","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/LinearRegressor_MultivariateStats/","page":"LinearRegressor","title":"LinearRegressor","text":"coefficients: The linear coefficients determined by the model.\nintercept: The intercept determined by the model.","category":"page"},{"location":"models/LinearRegressor_MultivariateStats/#Examples","page":"LinearRegressor","title":"Examples","text":"","category":"section"},{"location":"models/LinearRegressor_MultivariateStats/","page":"LinearRegressor","title":"LinearRegressor","text":"using MLJ\n\nLinearRegressor = @load LinearRegressor pkg=MultivariateStats\nlinear_regressor = LinearRegressor()\n\nX, y = make_regression(100, 2) ## a table and a vector (synthetic data)\nmach = machine(linear_regressor, X, y) |> fit!\n\nXnew, _ = make_regression(3, 2)\nyhat = predict(mach, Xnew) ## new predictions","category":"page"},{"location":"models/LinearRegressor_MultivariateStats/","page":"LinearRegressor","title":"LinearRegressor","text":"See also MultitargetLinearRegressor, RidgeRegressor, MultitargetRidgeRegressor","category":"page"},{"location":"models/QuantileRegressor_MLJLinearModels/#QuantileRegressor_MLJLinearModels","page":"QuantileRegressor","title":"QuantileRegressor","text":"","category":"section"},{"location":"models/QuantileRegressor_MLJLinearModels/","page":"QuantileRegressor","title":"QuantileRegressor","text":"QuantileRegressor","category":"page"},{"location":"models/QuantileRegressor_MLJLinearModels/","page":"QuantileRegressor","title":"QuantileRegressor","text":"A model type for constructing a quantile regressor, based on MLJLinearModels.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/QuantileRegressor_MLJLinearModels/","page":"QuantileRegressor","title":"QuantileRegressor","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/QuantileRegressor_MLJLinearModels/","page":"QuantileRegressor","title":"QuantileRegressor","text":"QuantileRegressor = @load QuantileRegressor pkg=MLJLinearModels","category":"page"},{"location":"models/QuantileRegressor_MLJLinearModels/","page":"QuantileRegressor","title":"QuantileRegressor","text":"Do model = QuantileRegressor() to construct an instance with default hyper-parameters.","category":"page"},{"location":"models/QuantileRegressor_MLJLinearModels/","page":"QuantileRegressor","title":"QuantileRegressor","text":"This model coincides with RobustRegressor, with the exception that the robust loss, rho, is fixed to QuantileRho(delta), where delta is a new hyperparameter.","category":"page"},{"location":"models/QuantileRegressor_MLJLinearModels/","page":"QuantileRegressor","title":"QuantileRegressor","text":"Different solver options exist, as indicated under \"Hyperparameters\" below. ","category":"page"},{"location":"models/QuantileRegressor_MLJLinearModels/#Training-data","page":"QuantileRegressor","title":"Training data","text":"","category":"section"},{"location":"models/QuantileRegressor_MLJLinearModels/","page":"QuantileRegressor","title":"QuantileRegressor","text":"In MLJ or MLJBase, bind an instance model to data with","category":"page"},{"location":"models/QuantileRegressor_MLJLinearModels/","page":"QuantileRegressor","title":"QuantileRegressor","text":"mach = machine(model, X, y)","category":"page"},{"location":"models/QuantileRegressor_MLJLinearModels/","page":"QuantileRegressor","title":"QuantileRegressor","text":"where:","category":"page"},{"location":"models/QuantileRegressor_MLJLinearModels/","page":"QuantileRegressor","title":"QuantileRegressor","text":"X is any table of input features (eg, a DataFrame) whose columns have Continuous scitype; check column scitypes with schema(X)\ny is the target, which can be any AbstractVector whose element scitype is Continuous; check the scitype with scitype(y)","category":"page"},{"location":"models/QuantileRegressor_MLJLinearModels/","page":"QuantileRegressor","title":"QuantileRegressor","text":"Train the machine using fit!(mach, rows=...).","category":"page"},{"location":"models/QuantileRegressor_MLJLinearModels/#Hyperparameters","page":"QuantileRegressor","title":"Hyperparameters","text":"","category":"section"},{"location":"models/QuantileRegressor_MLJLinearModels/","page":"QuantileRegressor","title":"QuantileRegressor","text":"delta::Real: parameterizes the QuantileRho function (indicating the quantile to use with default 0.5 for the median regression) Default: 0.5\nlambda::Real: strength of the regularizer if penalty is :l2 or :l1. Strength of the L2 regularizer if penalty is :en. Default: 1.0\ngamma::Real: strength of the L1 regularizer if penalty is :en. Default: 0.0\npenalty::Union{String, Symbol}: the penalty to use, either :l2, :l1, :en (elastic net) or :none. Default: :l2\nfit_intercept::Bool: whether to fit the intercept or not. Default: true\npenalize_intercept::Bool: whether to penalize the intercept. Default: false\nscale_penalty_with_samples::Bool: whether to scale the penalty with the number of observations. Default: true\nsolver::Union{Nothing, MLJLinearModels.Solver}: some instance of MLJLinearModels.S where S is one of: LBFGS, IWLSCG, if penalty = :l2, and ProxGrad otherwise.\nIf solver = nothing (default) then LBFGS() is used, if penalty = :l2, and otherwise ProxGrad(accel=true) (FISTA) is used.\nSolver aliases: FISTA(; kwargs...) = ProxGrad(accel=true, kwargs...), ISTA(; kwargs...) = ProxGrad(accel=false, kwargs...) Default: nothing","category":"page"},{"location":"models/QuantileRegressor_MLJLinearModels/#Example","page":"QuantileRegressor","title":"Example","text":"","category":"section"},{"location":"models/QuantileRegressor_MLJLinearModels/","page":"QuantileRegressor","title":"QuantileRegressor","text":"using MLJ\nX, y = make_regression()\nmach = fit!(machine(QuantileRegressor(), X, y))\npredict(mach, X)\nfitted_params(mach)","category":"page"},{"location":"models/QuantileRegressor_MLJLinearModels/","page":"QuantileRegressor","title":"QuantileRegressor","text":"See also RobustRegressor, HuberRegressor.","category":"page"},{"location":"mlj_cheatsheet/#MLJ-Cheatsheet","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"","category":"section"},{"location":"mlj_cheatsheet/#Starting-an-interactive-MLJ-session","page":"MLJ Cheatsheet","title":"Starting an interactive MLJ session","text":"","category":"section"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"using MLJ\nMLJ_VERSION # version of MLJ for this cheatsheet","category":"page"},{"location":"mlj_cheatsheet/#Model-search-and-code-loading","page":"MLJ Cheatsheet","title":"Model search and code loading","text":"","category":"section"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"info(\"PCA\") retrieves registry metadata for the model called \"PCA\"","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"info(\"RidgeRegressor\", pkg=\"MultivariateStats\") retrieves metadata for \"RidgeRegresssor\", which is provided by multiple packages","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"doc(\"DecisionTreeClassifier\", pkg=\"DecisionTree\") retrieves the model document string for the classifier, without loading model code","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"models() lists metadata of every registered model.","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"models(\"Tree\") lists models with \"Tree\" in the model or package name.","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"models(x -> x.is_supervised && x.is_pure_julia) lists all supervised models written in pure julia.","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"models(matching(X)) lists all unsupervised models compatible with input X.","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"models(matching(X, y)) lists all supervised models compatible with input/target X/y.","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"With additional conditions:","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"models() do model\n matching(model, X, y) &&\n model.prediction_type == :probabilistic &&\n model.is_pure_julia\nend","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"Tree = @load DecisionTreeClassifier pkg=DecisionTree imports \"DecisionTreeClassifier\" type and binds it to Tree tree = Tree() to instantiate a Tree. ","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"tree2 = Tree(max_depth=2) instantiates a tree with different hyperparameter","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"Ridge = @load RidgeRegressor pkg=MultivariateStats imports a type for a model provided by multiple packages","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"For interactive loading instead, use @iload","category":"page"},{"location":"mlj_cheatsheet/#Scitypes-and-coercion","page":"MLJ Cheatsheet","title":"Scitypes and coercion","text":"","category":"section"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"scitype(x) is the scientific type of x. For example scitype(2.4) == Continuous","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"(Image: scitypes_small.png)","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"type scitype\nAbstractFloat Continuous\nInteger Count\nCategoricalValue and CategoricalString Multiclass or OrderedFactor\nAbstractString Textual","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"Figure and Table for common scalar scitypes","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"Use schema(X) to get the column scitypes of a table X","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"coerce(y, Multiclass) attempts coercion of all elements of y into scitype Multiclass","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"coerce(X, :x1 => Continuous, :x2 => OrderedFactor) to coerce columns :x1 and :x2 of table X.","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"coerce(X, Count => Continuous) to coerce all columns with Count scitype to Continuous.","category":"page"},{"location":"mlj_cheatsheet/#Ingesting-data","page":"MLJ Cheatsheet","title":"Ingesting data","text":"","category":"section"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"Split the table channing into target y (the :Exit column) and features X (everything else), after a seeded row shuffling:","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"using RDatasets\nchanning = dataset(\"boot\", \"channing\")\ny, X = unpack(channing, ==(:Exit); rng=123)","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"Same as above but exclude :Time column from X:","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"using RDatasets\nchanning = dataset(\"boot\", \"channing\")\ny, X = unpack(channing,\n ==(:Exit), # y is the :Exit column\n !=(:Time); # X is the rest, except :Time\n rng=123)","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"Splitting row indices into train/validation/test, with seeded shuffling:","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"train, valid, test = partition(eachindex(y), 0.7, 0.2, rng=1234) for 70:20:10 ratio","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"For a stratified split:","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"train, test = partition(eachindex(y), 0.8, stratify=y)","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"Split a table or matrix X, instead of indices:","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"Xtrain, Xvalid, Xtest = partition(X, 0.5, 0.3, rng=123) ","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"Getting data from OpenML:","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"table = OpenML.load(91)","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"Creating synthetic classification data:","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"X, y = make_blobs(100, 2) (also: make_moons, make_circles)","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"Creating synthetic regression data:","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"X, y = make_regression(100, 2)","category":"page"},{"location":"mlj_cheatsheet/#Machine-construction","page":"MLJ Cheatsheet","title":"Machine construction","text":"","category":"section"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"Supervised case:","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"model = KNNRegressor(K=1) and mach = machine(model, X, y)","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"Unsupervised case:","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"model = OneHotEncoder() and mach = machine(model, X)","category":"page"},{"location":"mlj_cheatsheet/#Fitting","page":"MLJ Cheatsheet","title":"Fitting","text":"","category":"section"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"fit!(mach, rows=1:100, verbosity=1, force=false) (defaults shown)","category":"page"},{"location":"mlj_cheatsheet/#Prediction","page":"MLJ Cheatsheet","title":"Prediction","text":"","category":"section"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"Supervised case: predict(mach, Xnew) or predict(mach, rows=1:100)","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"Similarly, for probabilistic models: predict_mode, predict_mean and predict_median.","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"Unsupervised case: transform(mach, rows=1:100) or inverse_transform(mach, rows), etc.","category":"page"},{"location":"mlj_cheatsheet/#Inspecting-objects","page":"MLJ Cheatsheet","title":"Inspecting objects","text":"","category":"section"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"@more gets detail on the last object in REPL","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"params(model) gets a nested-tuple of all hyperparameters, even nested ones","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"info(ConstantRegressor()), info(\"PCA\"), info(\"RidgeRegressor\", pkg=\"MultivariateStats\") gets all properties (aka traits) of registered models","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"info(rms) gets all properties of a performance measure","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"schema(X) get column names, types and scitypes, and nrows, of a table X","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"scitype(X) gets the scientific type of X","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"fitted_params(mach) gets learned parameters of the fitted machine","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"report(mach) gets other training results (e.g. feature rankings)","category":"page"},{"location":"mlj_cheatsheet/#Saving-and-retrieving-machines-using-Julia-serializer","page":"MLJ Cheatsheet","title":"Saving and retrieving machines using Julia serializer","text":"","category":"section"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"MLJ.save(\"trained_for_five_days.jls\", mach) to save machine mach (without data)","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"predict_only_mach = machine(\"trained_for_five_days.jlso\") to deserialize.","category":"page"},{"location":"mlj_cheatsheet/#Performance-estimation","page":"MLJ Cheatsheet","title":"Performance estimation","text":"","category":"section"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"evaluate(model, X, y, resampling=CV(), measure=rms, operation=predict, weights=..., verbosity=1)","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"evaluate!(mach, resampling=Holdout(), measure=[rms, mav], operation=predict, weights=..., verbosity=1)","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"evaluate!(mach, resampling=[(fold1, fold2), (fold2, fold1)], measure=rms)","category":"page"},{"location":"mlj_cheatsheet/#Resampling-strategies-(resampling...)","page":"MLJ Cheatsheet","title":"Resampling strategies (resampling=...)","text":"","category":"section"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"Holdout(fraction_train=0.7, rng=1234) for simple holdout","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"CV(nfolds=6, rng=1234) for cross-validation","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"StratifiedCV(nfolds=6, rng=1234) for stratified cross-validation","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"TimeSeriesSV(nfolds=4) for time-series cross-validation","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"or a list of pairs of row indices:","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"[(train1, eval1), (train2, eval2), ... (traink, evalk)]","category":"page"},{"location":"mlj_cheatsheet/#Tuning","page":"MLJ Cheatsheet","title":"Tuning","text":"","category":"section"},{"location":"mlj_cheatsheet/#Tuning-model-wrapper","page":"MLJ Cheatsheet","title":"Tuning model wrapper","text":"","category":"section"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"tuned_model = TunedModel(model=…, tuning=RandomSearch(), resampling=Holdout(), measure=…, operation=predict, range=…)","category":"page"},{"location":"mlj_cheatsheet/#Ranges-for-tuning-(range...)","page":"MLJ Cheatsheet","title":"Ranges for tuning (range=...)","text":"","category":"section"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"If r = range(KNNRegressor(), :K, lower=1, upper = 20, scale=:log)","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"then Grid() search uses iterator(r, 6) == [1, 2, 3, 6, 11, 20].","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"lower=-Inf and upper=Inf are allowed.","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"Non-numeric ranges: r = range(model, :parameter, values=…)","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"Nested ranges: Use dot syntax, as in r = range(EnsembleModel(atom=tree), :(atom.max_depth), ...)","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"Can specify multiple ranges, as in range=[r1, r2, r3]. For more range options do ?Grid or ?RandomSearch","category":"page"},{"location":"mlj_cheatsheet/#Tuning-strategies","page":"MLJ Cheatsheet","title":"Tuning strategies","text":"","category":"section"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"RandomSearch(rng=1234) for basic random search","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"Grid(resolution=10) or Grid(goal=50) for basic grid search","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"Also available: LatinHyperCube, Explicit (built-in), MLJTreeParzenTuning, ParticleSwarm, AdaptiveParticleSwarm (3rd-party packages)","category":"page"},{"location":"mlj_cheatsheet/#Learning-curves","page":"MLJ Cheatsheet","title":"Learning curves","text":"","category":"section"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"For generating a plot of performance against parameter specified by range:","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"curve = learning_curve(mach, resolution=30, resampling=Holdout(), measure=…, operation=predict, range=…, n=1)","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"curve = learning_curve(model, X, y, resolution=30, resampling=Holdout(), measure=…, operation=predict, range=…, n=1)","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"If using Plots.jl:","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"plot(curve.parameter_values, curve.measurements, xlab=curve.parameter_name, xscale=curve.parameter_scale)","category":"page"},{"location":"mlj_cheatsheet/#Controlling-iterative-models","page":"MLJ Cheatsheet","title":"Controlling iterative models","text":"","category":"section"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"Requires: using MLJIteration","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"iterated_model = IteratedModel(model=…, resampling=Holdout(), measure=…, controls=…, retrain=false)","category":"page"},{"location":"mlj_cheatsheet/#Controls","page":"MLJ Cheatsheet","title":"Controls","text":"","category":"section"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"Increment training: Step(n=1)","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"Stopping: TimeLimit(t=0.5) (in hours), NumberLimit(n=100), NumberSinceBest(n=6), NotANumber(), Threshold(value=0.0), GL(alpha=2.0), PQ(alpha=0.75, k=5), Patience(n=5)","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"Logging: Info(f=identity), Warn(f=\"\"), Error(predicate, f=\"\")","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"Callbacks: Callback(f=mach->nothing), WithNumberDo(f=n->@info(n)), WithIterationsDo(f=i->@info(\"num iterations: $i\")), WithLossDo(f=x->@info(\"loss: $x\")), WithTrainingLossesDo(f=v->@info(v))","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"Snapshots: Save(filename=\"machine.jlso\")","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"Wraps: MLJIteration.skip(control, predicate=1), IterationControl.with_state_do(control)","category":"page"},{"location":"mlj_cheatsheet/#Performance-measures-(metrics)","page":"MLJ Cheatsheet","title":"Performance measures (metrics)","text":"","category":"section"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"Do measures() to get full list.","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"info(rms) to list properties (aka traits) of the rms measure","category":"page"},{"location":"mlj_cheatsheet/#Transformers","page":"MLJ Cheatsheet","title":"Transformers","text":"","category":"section"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"Built-ins include: Standardizer, OneHotEncoder, UnivariateBoxCoxTransformer, FeatureSelector, FillImputer, UnivariateDiscretizer, ContinuousEncoder, UnivariateTimeTypeToContinuous","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"Externals include: PCA (in MultivariateStats), KMeans, KMedoids (in Clustering).","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"models(m -> !m.is_supervised) to get full list","category":"page"},{"location":"mlj_cheatsheet/#Ensemble-model-wrapper","page":"MLJ Cheatsheet","title":"Ensemble model wrapper","text":"","category":"section"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"EnsembleModel(atom=…, weights=Float64[], bagging_fraction=0.8, rng=GLOBAL_RNG, n=100, parallel=true, out_of_bag_measure=[])","category":"page"},{"location":"mlj_cheatsheet/#Target-transformation-wrapper","page":"MLJ Cheatsheet","title":"Target transformation wrapper","text":"","category":"section"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"TransformedTargetModel(model=ConstantClassifier(), target=Standardizer())","category":"page"},{"location":"mlj_cheatsheet/#Pipelines","page":"MLJ Cheatsheet","title":"Pipelines","text":"","category":"section"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"pipe = (X -> coerce(X, :height=>Continuous)) |> OneHotEncoder |> KNNRegressor(K=3) ","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"Unsupervised:","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"pipe = Standardizer |> OneHotEncoder","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"Concatenation:","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"pipe1 |> pipe2 or model |> pipe or pipe |> model, etc","category":"page"},{"location":"mlj_cheatsheet/#Define-a-supervised-learning-network:","page":"MLJ Cheatsheet","title":"Define a supervised learning network:","text":"","category":"section"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"Xs = source(X) ys = source(y)","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"... define further nodal machines and nodes ...","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"yhat = predict(knn_machine, W, ys) (final node)","category":"page"},{"location":"mlj_cheatsheet/#Exporting-a-learning-network-as-a-stand-alone-model:","page":"MLJ Cheatsheet","title":"Exporting a learning network as a stand-alone model:","text":"","category":"section"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"Supervised, with final node yhat returning point predictions:","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"@from_network machine(Deterministic(), Xs, ys; predict=yhat) begin\n mutable struct Composite\n\t reducer=network_pca\n\t\tregressor=network_knn\n end","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"Here network_pca and network_knn are models appearing in the learning network.","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"Supervised, with yhat final node returning probabilistic predictions:","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"@from_network machine(Probabilistic(), Xs, ys; predict=yhat) begin\n mutable struct Composite\n reducer=network_pca\n classifier=network_tree\n end","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"Unsupervised, with final node Xout:","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"@from_network machine(Unsupervised(), Xs; transform=Xout) begin\n mutable struct Composite\n\t reducer1=network_pca\n\t\treducer2=clusterer\n end\nend","category":"page"},{"location":"mlj_cheatsheet/","page":"MLJ Cheatsheet","title":"MLJ Cheatsheet","text":"UnivariateTimeTypeToContinuous","category":"page"},{"location":"models/ExtraTreesClassifier_MLJScikitLearnInterface/#ExtraTreesClassifier_MLJScikitLearnInterface","page":"ExtraTreesClassifier","title":"ExtraTreesClassifier","text":"","category":"section"},{"location":"models/ExtraTreesClassifier_MLJScikitLearnInterface/","page":"ExtraTreesClassifier","title":"ExtraTreesClassifier","text":"ExtraTreesClassifier","category":"page"},{"location":"models/ExtraTreesClassifier_MLJScikitLearnInterface/","page":"ExtraTreesClassifier","title":"ExtraTreesClassifier","text":"A model type for constructing a extra trees classifier, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/ExtraTreesClassifier_MLJScikitLearnInterface/","page":"ExtraTreesClassifier","title":"ExtraTreesClassifier","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/ExtraTreesClassifier_MLJScikitLearnInterface/","page":"ExtraTreesClassifier","title":"ExtraTreesClassifier","text":"ExtraTreesClassifier = @load ExtraTreesClassifier pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/ExtraTreesClassifier_MLJScikitLearnInterface/","page":"ExtraTreesClassifier","title":"ExtraTreesClassifier","text":"Do model = ExtraTreesClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in ExtraTreesClassifier(n_estimators=...).","category":"page"},{"location":"models/ExtraTreesClassifier_MLJScikitLearnInterface/","page":"ExtraTreesClassifier","title":"ExtraTreesClassifier","text":"Extra trees classifier, fits a number of randomized decision trees on various sub-samples of the dataset and uses averaging to improve the predictive accuracy and control over-fitting.","category":"page"},{"location":"models/SGDRegressor_MLJScikitLearnInterface/#SGDRegressor_MLJScikitLearnInterface","page":"SGDRegressor","title":"SGDRegressor","text":"","category":"section"},{"location":"models/SGDRegressor_MLJScikitLearnInterface/","page":"SGDRegressor","title":"SGDRegressor","text":"SGDRegressor","category":"page"},{"location":"models/SGDRegressor_MLJScikitLearnInterface/","page":"SGDRegressor","title":"SGDRegressor","text":"A model type for constructing a stochastic gradient descent-based regressor, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/SGDRegressor_MLJScikitLearnInterface/","page":"SGDRegressor","title":"SGDRegressor","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/SGDRegressor_MLJScikitLearnInterface/","page":"SGDRegressor","title":"SGDRegressor","text":"SGDRegressor = @load SGDRegressor pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/SGDRegressor_MLJScikitLearnInterface/","page":"SGDRegressor","title":"SGDRegressor","text":"Do model = SGDRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in SGDRegressor(loss=...).","category":"page"},{"location":"models/SGDRegressor_MLJScikitLearnInterface/#Hyper-parameters","page":"SGDRegressor","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/SGDRegressor_MLJScikitLearnInterface/","page":"SGDRegressor","title":"SGDRegressor","text":"loss = squared_error\npenalty = l2\nalpha = 0.0001\nl1_ratio = 0.15\nfit_intercept = true\nmax_iter = 1000\ntol = 0.001\nshuffle = true\nverbose = 0\nepsilon = 0.1\nrandom_state = nothing\nlearning_rate = invscaling\neta0 = 0.01\npower_t = 0.25\nearly_stopping = false\nvalidation_fraction = 0.1\nn_iter_no_change = 5\nwarm_start = false\naverage = false","category":"page"},{"location":"models/LassoCVRegressor_MLJScikitLearnInterface/#LassoCVRegressor_MLJScikitLearnInterface","page":"LassoCVRegressor","title":"LassoCVRegressor","text":"","category":"section"},{"location":"models/LassoCVRegressor_MLJScikitLearnInterface/","page":"LassoCVRegressor","title":"LassoCVRegressor","text":"LassoCVRegressor","category":"page"},{"location":"models/LassoCVRegressor_MLJScikitLearnInterface/","page":"LassoCVRegressor","title":"LassoCVRegressor","text":"A model type for constructing a lasso regressor with built-in cross-validation, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/LassoCVRegressor_MLJScikitLearnInterface/","page":"LassoCVRegressor","title":"LassoCVRegressor","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/LassoCVRegressor_MLJScikitLearnInterface/","page":"LassoCVRegressor","title":"LassoCVRegressor","text":"LassoCVRegressor = @load LassoCVRegressor pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/LassoCVRegressor_MLJScikitLearnInterface/","page":"LassoCVRegressor","title":"LassoCVRegressor","text":"Do model = LassoCVRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in LassoCVRegressor(eps=...).","category":"page"},{"location":"models/LassoCVRegressor_MLJScikitLearnInterface/#Hyper-parameters","page":"LassoCVRegressor","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/LassoCVRegressor_MLJScikitLearnInterface/","page":"LassoCVRegressor","title":"LassoCVRegressor","text":"eps = 0.001\nn_alphas = 100\nalphas = nothing\nfit_intercept = true\nprecompute = auto\nmax_iter = 1000\ntol = 0.0001\ncopy_X = true\ncv = 5\nverbose = false\nn_jobs = nothing\npositive = false\nrandom_state = nothing\nselection = cyclic","category":"page"},{"location":"models/BorderlineSMOTE1_Imbalance/#BorderlineSMOTE1_Imbalance","page":"BorderlineSMOTE1","title":"BorderlineSMOTE1","text":"","category":"section"},{"location":"models/BorderlineSMOTE1_Imbalance/","page":"BorderlineSMOTE1","title":"BorderlineSMOTE1","text":"Initiate a BorderlineSMOTE1 model with the given hyper-parameters.","category":"page"},{"location":"models/BorderlineSMOTE1_Imbalance/","page":"BorderlineSMOTE1","title":"BorderlineSMOTE1","text":"BorderlineSMOTE1","category":"page"},{"location":"models/BorderlineSMOTE1_Imbalance/","page":"BorderlineSMOTE1","title":"BorderlineSMOTE1","text":"A model type for constructing a borderline smot e1, based on Imbalance.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/BorderlineSMOTE1_Imbalance/","page":"BorderlineSMOTE1","title":"BorderlineSMOTE1","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/BorderlineSMOTE1_Imbalance/","page":"BorderlineSMOTE1","title":"BorderlineSMOTE1","text":"BorderlineSMOTE1 = @load BorderlineSMOTE1 pkg=Imbalance","category":"page"},{"location":"models/BorderlineSMOTE1_Imbalance/","page":"BorderlineSMOTE1","title":"BorderlineSMOTE1","text":"Do model = BorderlineSMOTE1() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in BorderlineSMOTE1(m=...).","category":"page"},{"location":"models/BorderlineSMOTE1_Imbalance/","page":"BorderlineSMOTE1","title":"BorderlineSMOTE1","text":"BorderlineSMOTE1 implements the BorderlineSMOTE1 algorithm to correct for class imbalance as in Han, H., Wang, W.-Y., & Mao, B.-H. (2005). Borderline-SMOTE: A new over-sampling method in imbalanced data sets learning. In D.S. Huang, X.-P. Zhang, & G.-B. Huang (Eds.), Advances in Intelligent Computing (pp. 878-887). Springer. ","category":"page"},{"location":"models/BorderlineSMOTE1_Imbalance/#Training-data","page":"BorderlineSMOTE1","title":"Training data","text":"","category":"section"},{"location":"models/BorderlineSMOTE1_Imbalance/","page":"BorderlineSMOTE1","title":"BorderlineSMOTE1","text":"In MLJ or MLJBase, wrap the model in a machine by","category":"page"},{"location":"models/BorderlineSMOTE1_Imbalance/","page":"BorderlineSMOTE1","title":"BorderlineSMOTE1","text":"mach = machine(model)","category":"page"},{"location":"models/BorderlineSMOTE1_Imbalance/","page":"BorderlineSMOTE1","title":"BorderlineSMOTE1","text":"There is no need to provide any data here because the model is a static transformer.","category":"page"},{"location":"models/BorderlineSMOTE1_Imbalance/","page":"BorderlineSMOTE1","title":"BorderlineSMOTE1","text":"Likewise, there is no need to fit!(mach).","category":"page"},{"location":"models/BorderlineSMOTE1_Imbalance/","page":"BorderlineSMOTE1","title":"BorderlineSMOTE1","text":"For default values of the hyper-parameters, model can be constructed by","category":"page"},{"location":"models/BorderlineSMOTE1_Imbalance/","page":"BorderlineSMOTE1","title":"BorderlineSMOTE1","text":"model = BorderlineSMOTE1()","category":"page"},{"location":"models/BorderlineSMOTE1_Imbalance/#Hyperparameters","page":"BorderlineSMOTE1","title":"Hyperparameters","text":"","category":"section"},{"location":"models/BorderlineSMOTE1_Imbalance/","page":"BorderlineSMOTE1","title":"BorderlineSMOTE1","text":"m::Integer=5: The number of neighbors to consider while checking the BorderlineSMOTE1 condition. Should be within the range 0 < m < N where N is the number of observations in the data. It will be automatically set to N-1 if N ≤ m.\nk::Integer=5: Number of nearest neighbors to consider in the SMOTE part of the algorithm. Should be within the range 0 < k < n where n is the number of observations in the smallest class. It will be automatically set to l-1 for any class with l points where l ≤ k.\nratios=1.0: A parameter that controls the amount of oversampling to be done for each class\nCan be a float and in this case each class will be oversampled to the size of the majority class times the float. By default, all classes are oversampled to the size of the majority class\nCan be a dictionary mapping each class label to the float ratio for that class\nrng::Union{AbstractRNG, Integer}=default_rng(): Either an AbstractRNG object or an Integer seed to be used with Xoshiro if the Julia VERSION supports it. Otherwise, uses MersenneTwister`.\nverbosity::Integer=1: Whenever higher than 0 info regarding the points that will participate in oversampling is logged.","category":"page"},{"location":"models/BorderlineSMOTE1_Imbalance/#Transform-Inputs","page":"BorderlineSMOTE1","title":"Transform Inputs","text":"","category":"section"},{"location":"models/BorderlineSMOTE1_Imbalance/","page":"BorderlineSMOTE1","title":"BorderlineSMOTE1","text":"X: A matrix or table of floats where each row is an observation from the dataset\ny: An abstract vector of labels (e.g., strings) that correspond to the observations in X","category":"page"},{"location":"models/BorderlineSMOTE1_Imbalance/#Transform-Outputs","page":"BorderlineSMOTE1","title":"Transform Outputs","text":"","category":"section"},{"location":"models/BorderlineSMOTE1_Imbalance/","page":"BorderlineSMOTE1","title":"BorderlineSMOTE1","text":"Xover: A matrix or table that includes original data and the new observations due to oversampling. depending on whether the input X is a matrix or table respectively\nyover: An abstract vector of labels corresponding to Xover","category":"page"},{"location":"models/BorderlineSMOTE1_Imbalance/#Operations","page":"BorderlineSMOTE1","title":"Operations","text":"","category":"section"},{"location":"models/BorderlineSMOTE1_Imbalance/","page":"BorderlineSMOTE1","title":"BorderlineSMOTE1","text":"transform(mach, X, y): resample the data X and y using BorderlineSMOTE1, returning both the new and original observations","category":"page"},{"location":"models/BorderlineSMOTE1_Imbalance/#Example","page":"BorderlineSMOTE1","title":"Example","text":"","category":"section"},{"location":"models/BorderlineSMOTE1_Imbalance/","page":"BorderlineSMOTE1","title":"BorderlineSMOTE1","text":"using MLJ\nimport Imbalance\n\n## set probability of each class\nclass_probs = [0.5, 0.2, 0.3] \nnum_rows, num_continuous_feats = 1000, 5\n## generate a table and categorical vector accordingly\nX, y = Imbalance.generate_imbalanced_data(num_rows, num_continuous_feats; \n stds=[0.1 0.1 0.1], min_sep=0.01, class_probs, rng=42) \n\njulia> Imbalance.checkbalance(y)\n1: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 200 (40.8%) \n2: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 310 (63.3%) \n0: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 490 (100.0%) \n\n## load BorderlineSMOTE1\nBorderlineSMOTE1 = @load BorderlineSMOTE1 pkg=Imbalance\n\n## wrap the model in a machine\noversampler = BorderlineSMOTE1(m=3, k=5, ratios=Dict(0=>1.0, 1=> 0.9, 2=>0.8), rng=42)\nmach = machine(oversampler)\n\n## provide the data to transform (there is nothing to fit)\nXover, yover = transform(mach, X, y)\n\n\njulia> Imbalance.checkbalance(yover)\n2: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 392 (80.0%) \n1: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 441 (90.0%) \n0: ▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 490 (100.0%) ","category":"page"},{"location":"models/DecisionTreeClassifier_BetaML/#DecisionTreeClassifier_BetaML","page":"DecisionTreeClassifier","title":"DecisionTreeClassifier","text":"","category":"section"},{"location":"models/DecisionTreeClassifier_BetaML/","page":"DecisionTreeClassifier","title":"DecisionTreeClassifier","text":"mutable struct DecisionTreeClassifier <: MLJModelInterface.Probabilistic","category":"page"},{"location":"models/DecisionTreeClassifier_BetaML/","page":"DecisionTreeClassifier","title":"DecisionTreeClassifier","text":"A simple Decision Tree model for classification with support for Missing data, from the Beta Machine Learning Toolkit (BetaML).","category":"page"},{"location":"models/DecisionTreeClassifier_BetaML/#Hyperparameters:","page":"DecisionTreeClassifier","title":"Hyperparameters:","text":"","category":"section"},{"location":"models/DecisionTreeClassifier_BetaML/","page":"DecisionTreeClassifier","title":"DecisionTreeClassifier","text":"max_depth::Int64: The maximum depth the tree is allowed to reach. When this is reached the node is forced to become a leaf [def: 0, i.e. no limits]\nmin_gain::Float64: The minimum information gain to allow for a node's partition [def: 0]\nmin_records::Int64: The minimum number of records a node must holds to consider for a partition of it [def: 2]\nmax_features::Int64: The maximum number of (random) features to consider at each partitioning [def: 0, i.e. look at all features]\nsplitting_criterion::Function: This is the name of the function to be used to compute the information gain of a specific partition. This is done by measuring the difference betwwen the \"impurity\" of the labels of the parent node with those of the two child nodes, weighted by the respective number of items. [def: gini]. Either gini, entropy or a custom function. It can also be an anonymous function.\nrng::Random.AbstractRNG: A Random Number Generator to be used in stochastic parts of the code [deafult: Random.GLOBAL_RNG]","category":"page"},{"location":"models/DecisionTreeClassifier_BetaML/#Example:","page":"DecisionTreeClassifier","title":"Example:","text":"","category":"section"},{"location":"models/DecisionTreeClassifier_BetaML/","page":"DecisionTreeClassifier","title":"DecisionTreeClassifier","text":"julia> using MLJ\n\njulia> X, y = @load_iris;\n\njulia> modelType = @load DecisionTreeClassifier pkg = \"BetaML\" verbosity=0\nBetaML.Trees.DecisionTreeClassifier\n\njulia> model = modelType()\nDecisionTreeClassifier(\n max_depth = 0, \n min_gain = 0.0, \n min_records = 2, \n max_features = 0, \n splitting_criterion = BetaML.Utils.gini, \n rng = Random._GLOBAL_RNG())\n\njulia> mach = machine(model, X, y);\n\njulia> fit!(mach);\n[ Info: Training machine(DecisionTreeClassifier(max_depth = 0, …), …).\n\njulia> cat_est = predict(mach, X)\n150-element CategoricalDistributions.UnivariateFiniteVector{Multiclass{3}, String, UInt32, Float64}:\n UnivariateFinite{Multiclass{3}}(setosa=>1.0, versicolor=>0.0, virginica=>0.0)\n UnivariateFinite{Multiclass{3}}(setosa=>1.0, versicolor=>0.0, virginica=>0.0)\n ⋮\n UnivariateFinite{Multiclass{3}}(setosa=>0.0, versicolor=>0.0, virginica=>1.0)\n UnivariateFinite{Multiclass{3}}(setosa=>0.0, versicolor=>0.0, virginica=>1.0)\n UnivariateFinite{Multiclass{3}}(setosa=>0.0, versicolor=>0.0, virginica=>1.0)","category":"page"},{"location":"loading_model_code/#Loading-Model-Code","page":"Loading Model Code","title":"Loading Model Code","text":"","category":"section"},{"location":"loading_model_code/","page":"Loading Model Code","title":"Loading Model Code","text":"Once the name of a model, and the package providing that model, have been identified (see Model Search) one can either import the model type interactively with @iload, as shown under Installation, or use @load as shown below. The @load macro works from within a module, a package or a function, provided the relevant package providing the MLJ interface has been added to your package environment. It will attempt to load the model type into the global namespace of the module in which @load is invoked (Main if invoked at the REPL).","category":"page"},{"location":"loading_model_code/","page":"Loading Model Code","title":"Loading Model Code","text":"In general, the code providing core functionality for the model (living in a package you should consult for documentation) may be different from the package providing the MLJ interface. Since the core package is a dependency of the interface package, only the interface package needs to be added to your environment.","category":"page"},{"location":"loading_model_code/","page":"Loading Model Code","title":"Loading Model Code","text":"For instance, suppose you have activated a Julia package environment my_env that you wish to use for your MLJ project; for example, you have run:","category":"page"},{"location":"loading_model_code/","page":"Loading Model Code","title":"Loading Model Code","text":"using Pkg\nPkg.activate(\"my_env\", shared=true)","category":"page"},{"location":"loading_model_code/","page":"Loading Model Code","title":"Loading Model Code","text":"Furthermore, suppose you want to use DecisionTreeClassifier, provided by the DecisionTree.jl package. Then, to determine which package provides the MLJ interface you call load_path:","category":"page"},{"location":"loading_model_code/","page":"Loading Model Code","title":"Loading Model Code","text":"julia> load_path(\"DecisionTreeClassifier\", pkg=\"DecisionTree\")\n\"MLJDecisionTreeInterface.DecisionTreeClassifier\"","category":"page"},{"location":"loading_model_code/","page":"Loading Model Code","title":"Loading Model Code","text":"In this case, we see that the package required is MLJDecisionTreeInterface.jl. If this package is not in my_env (do Pkg.status() to check) you add it by running","category":"page"},{"location":"loading_model_code/","page":"Loading Model Code","title":"Loading Model Code","text":"julia> Pkg.add(\"MLJDecisionTreeInterface\");","category":"page"},{"location":"loading_model_code/","page":"Loading Model Code","title":"Loading Model Code","text":"So long as my_env is the active environment, this action need never be repeated (unless you run Pkg.rm(\"MLJDecisionTreeInterface\")). You are now ready to instantiate a decision tree classifier:","category":"page"},{"location":"loading_model_code/","page":"Loading Model Code","title":"Loading Model Code","text":"julia> Tree = @load DecisionTree pkg=DecisionTree\njulia> tree = Tree()","category":"page"},{"location":"loading_model_code/","page":"Loading Model Code","title":"Loading Model Code","text":"which is equivalent to","category":"page"},{"location":"loading_model_code/","page":"Loading Model Code","title":"Loading Model Code","text":"julia> import MLJDecisionTreeInterface.DecisionTreeClassifier\njulia> Tree = MLJDecisionTreeInterface.DecisionTreeClassifier\njulia> tree = Tree()","category":"page"},{"location":"loading_model_code/","page":"Loading Model Code","title":"Loading Model Code","text":"Tip. The specification pkg=... above can be dropped for the many models that are provided by only a single package.","category":"page"},{"location":"loading_model_code/#API","page":"Loading Model Code","title":"API","text":"","category":"section"},{"location":"loading_model_code/","page":"Loading Model Code","title":"Loading Model Code","text":"load_path\n@load\n@iload","category":"page"},{"location":"loading_model_code/#StatisticalTraits.load_path","page":"Loading Model Code","title":"StatisticalTraits.load_path","text":"load_path(model_name::String, pkg=nothing)\n\nReturn the load path for model type with name model_name, specifying the algorithm=providing package name pkg to resolve name conflicts, if necessary.\n\nload_path(proxy::NamedTuple)\n\nReturn the load path for the model whose name is proxy.name and whose algorithm-providing package has name proxy.package_name. For example, proxy could be any element of the vector returned by models().\n\nload_path(model)\n\nReturn the load path of a model instance or type. Usually requires necessary model code to have been separately loaded. Supply strings as above if code is not loaded.\n\n\n\n\n\n","category":"function"},{"location":"loading_model_code/#MLJModels.@load","page":"Loading Model Code","title":"MLJModels.@load","text":"@load ModelName pkg=nothing verbosity=0 add=false\n\nImport the model type the model named in the first argument into the calling module, specfying pkg in the case of an ambiguous name (to packages providing a model type with the same name). Returns the model type.\n\nWarning In older versions of MLJ/MLJModels, @load returned an instance instead.\n\nTo automatically add required interface packages to the current environment, specify add=true. For interactive loading, use @iload instead.\n\nExamples\n\nTree = @load DecisionTreeRegressor\ntree = Tree()\ntree2 = Tree(min_samples_split=6)\n\nSVM = @load SVC pkg=LIBSVM\nsvm = SVM()\n\nSee also @iload\n\n\n\n\n\n","category":"macro"},{"location":"loading_model_code/#MLJModels.@iload","page":"Loading Model Code","title":"MLJModels.@iload","text":"@iload ModelName\n\nInteractive alternative to @load. Provides user with an optioin to install (add) the required interface package to the current environment, and to choose the relevant model-providing package in ambiguous cases. See @load\n\n\n\n\n\n","category":"macro"},{"location":"models/MCDDetector_OutlierDetectionPython/#MCDDetector_OutlierDetectionPython","page":"MCDDetector","title":"MCDDetector","text":"","category":"section"},{"location":"models/MCDDetector_OutlierDetectionPython/","page":"MCDDetector","title":"MCDDetector","text":"MCDDetector(store_precision = true,\n assume_centered = false,\n support_fraction = nothing,\n random_state = nothing)","category":"page"},{"location":"models/MCDDetector_OutlierDetectionPython/","page":"MCDDetector","title":"MCDDetector","text":"https://pyod.readthedocs.io/en/latest/pyod.models.html#module-pyod.models.mcd","category":"page"},{"location":"models/OneClassSVM_LIBSVM/#OneClassSVM_LIBSVM","page":"OneClassSVM","title":"OneClassSVM","text":"","category":"section"},{"location":"models/OneClassSVM_LIBSVM/","page":"OneClassSVM","title":"OneClassSVM","text":"OneClassSVM","category":"page"},{"location":"models/OneClassSVM_LIBSVM/","page":"OneClassSVM","title":"OneClassSVM","text":"A model type for constructing a one-class support vector machine, based on LIBSVM.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/OneClassSVM_LIBSVM/","page":"OneClassSVM","title":"OneClassSVM","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/OneClassSVM_LIBSVM/","page":"OneClassSVM","title":"OneClassSVM","text":"OneClassSVM = @load OneClassSVM pkg=LIBSVM","category":"page"},{"location":"models/OneClassSVM_LIBSVM/","page":"OneClassSVM","title":"OneClassSVM","text":"Do model = OneClassSVM() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in OneClassSVM(kernel=...).","category":"page"},{"location":"models/OneClassSVM_LIBSVM/","page":"OneClassSVM","title":"OneClassSVM","text":"Reference for algorithm and core C-library: C.-C. Chang and C.-J. Lin (2011): \"LIBSVM: a library for support vector machines.\" ACM Transactions on Intelligent Systems and Technology, 2(3):27:1–27:27. Updated at https://www.csie.ntu.edu.tw/~cjlin/papers/libsvm.pdf. ","category":"page"},{"location":"models/OneClassSVM_LIBSVM/","page":"OneClassSVM","title":"OneClassSVM","text":"This model is an outlier detection model delivering raw scores based on the decision function of a support vector machine. Like the NuSVC classifier, it uses the nu re-parameterization of the cost parameter appearing in standard support vector classification SVC.","category":"page"},{"location":"models/OneClassSVM_LIBSVM/","page":"OneClassSVM","title":"OneClassSVM","text":"To extract normalized scores (\"probabilities\") wrap the model using ProbabilisticDetector from OutlierDetection.jl. For threshold-based classification, wrap the probabilistic model using MLJ's BinaryThresholdPredictor. Examples of wrapping appear below.","category":"page"},{"location":"models/OneClassSVM_LIBSVM/#Training-data","page":"OneClassSVM","title":"Training data","text":"","category":"section"},{"location":"models/OneClassSVM_LIBSVM/","page":"OneClassSVM","title":"OneClassSVM","text":"In MLJ or MLJBase, bind an instance model to data with:","category":"page"},{"location":"models/OneClassSVM_LIBSVM/","page":"OneClassSVM","title":"OneClassSVM","text":"mach = machine(model, X, y)","category":"page"},{"location":"models/OneClassSVM_LIBSVM/","page":"OneClassSVM","title":"OneClassSVM","text":"where","category":"page"},{"location":"models/OneClassSVM_LIBSVM/","page":"OneClassSVM","title":"OneClassSVM","text":"X: any table of input features (eg, a DataFrame) whose columns each have Continuous element scitype; check column scitypes with schema(X)","category":"page"},{"location":"models/OneClassSVM_LIBSVM/","page":"OneClassSVM","title":"OneClassSVM","text":"Train the machine using fit!(mach, rows=...).","category":"page"},{"location":"models/OneClassSVM_LIBSVM/#Hyper-parameters","page":"OneClassSVM","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/OneClassSVM_LIBSVM/","page":"OneClassSVM","title":"OneClassSVM","text":"kernel=LIBSVM.Kernel.RadialBasis: either an object that can be called, as in kernel(x1, x2), or one of the built-in kernels from the LIBSVM.jl package listed below. Here x1 and x2 are vectors whose lengths match the number of columns of the training data X (see \"Examples\" below).\nLIBSVM.Kernel.Linear: (x1, x2) -> x1'*x2\nLIBSVM.Kernel.Polynomial: (x1, x2) -> gamma*x1'*x2 + coef0)^degree\nLIBSVM.Kernel.RadialBasis: (x1, x2) -> (exp(-gamma*norm(x1 - x2)^2))\nLIBSVM.Kernel.Sigmoid: (x1, x2) - > tanh(gamma*x1'*x2 + coef0)\nHere gamma, coef0, degree are other hyper-parameters. Serialization of models with user-defined kernels comes with some restrictions. See LIVSVM.jl issue91\ngamma = 0.0: kernel parameter (see above); if gamma==-1.0 then gamma = 1/nfeatures is used in training, where nfeatures is the number of features (columns of X). If gamma==0.0 then gamma = 1/(var(Tables.matrix(X))*nfeatures) is used. Actual value used appears in the report (see below).\ncoef0 = 0.0: kernel parameter (see above)\ndegree::Int32 = Int32(3): degree in polynomial kernel (see above)\nnu=0.5 (range (0, 1]): An upper bound on the fraction of margin errors and a lower bound of the fraction of support vectors. Denoted ν in the cited paper. Changing nu changes the thickness of the margin (a neighborhood of the decision surface) and a margin error is said to have occurred if a training observation lies on the wrong side of the surface or within the margin.\ncachesize=200.0 cache memory size in MB\ntolerance=0.001: tolerance for the stopping criterion\nshrinking=true: whether to use shrinking heuristics","category":"page"},{"location":"models/OneClassSVM_LIBSVM/#Operations","page":"OneClassSVM","title":"Operations","text":"","category":"section"},{"location":"models/OneClassSVM_LIBSVM/","page":"OneClassSVM","title":"OneClassSVM","text":"transform(mach, Xnew): return scores for outlierness, given features Xnew having the same scitype as X above. The greater the score, the more likely it is an outlier. This score is based on the SVM decision function. For normalized scores, wrap model using ProbabilisticDetector from OutlierDetection.jl and call predict instead, and for threshold-based classification, wrap again using BinaryThresholdPredictor. See the examples below.","category":"page"},{"location":"models/OneClassSVM_LIBSVM/#Fitted-parameters","page":"OneClassSVM","title":"Fitted parameters","text":"","category":"section"},{"location":"models/OneClassSVM_LIBSVM/","page":"OneClassSVM","title":"OneClassSVM","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/OneClassSVM_LIBSVM/","page":"OneClassSVM","title":"OneClassSVM","text":"libsvm_model: the trained model object created by the LIBSVM.jl package\norientation: this equals 1 if the decision function for libsvm_model is increasing with increasing outlierness, and -1 if it is decreasing instead. Correspondingly, the libsvm_model attaches true to outliers in the first case, and false in the second. (The scores given in the MLJ report and generated by MLJ.transform already correct for this ambiguity, which is therefore only an issue for users directly accessing libsvm_model.)","category":"page"},{"location":"models/OneClassSVM_LIBSVM/#Report","page":"OneClassSVM","title":"Report","text":"","category":"section"},{"location":"models/OneClassSVM_LIBSVM/","page":"OneClassSVM","title":"OneClassSVM","text":"The fields of report(mach) are:","category":"page"},{"location":"models/OneClassSVM_LIBSVM/","page":"OneClassSVM","title":"OneClassSVM","text":"gamma: actual value of the kernel parameter gamma used in training","category":"page"},{"location":"models/OneClassSVM_LIBSVM/#Examples","page":"OneClassSVM","title":"Examples","text":"","category":"section"},{"location":"models/OneClassSVM_LIBSVM/#Generating-raw-scores-for-outlierness","page":"OneClassSVM","title":"Generating raw scores for outlierness","text":"","category":"section"},{"location":"models/OneClassSVM_LIBSVM/","page":"OneClassSVM","title":"OneClassSVM","text":"using MLJ\nimport LIBSVM\nimport StableRNGs.StableRNG\n\nOneClassSVM = @load OneClassSVM pkg=LIBSVM ## model type\nmodel = OneClassSVM(kernel=LIBSVM.Kernel.Polynomial) ## instance\n\nrng = StableRNG(123)\nXmatrix = randn(rng, 5, 3)\nXmatrix[1, 1] = 100.0\nX = MLJ.table(Xmatrix)\n\nmach = machine(model, X) |> fit!\n\n## training scores (outliers have larger scores):\njulia> report(mach).scores\n5-element Vector{Float64}:\n 6.711689156091755e-7\n -6.740101976655081e-7\n -6.711632439648446e-7\n -6.743015858874887e-7\n -6.745393717880104e-7\n\n## scores for new data:\nXnew = MLJ.table(rand(rng, 2, 3))\n\njulia> transform(mach, rand(rng, 2, 3))\n2-element Vector{Float64}:\n -6.746293022511047e-7\n -6.744289265348623e-7","category":"page"},{"location":"models/OneClassSVM_LIBSVM/#Generating-probabilistic-predictions-of-outlierness","page":"OneClassSVM","title":"Generating probabilistic predictions of outlierness","text":"","category":"section"},{"location":"models/OneClassSVM_LIBSVM/","page":"OneClassSVM","title":"OneClassSVM","text":"Continuing the previous example:","category":"page"},{"location":"models/OneClassSVM_LIBSVM/","page":"OneClassSVM","title":"OneClassSVM","text":"using OutlierDetection\npmodel = ProbabilisticDetector(model)\npmach = machine(pmodel, X) |> fit!\n\n## probabilistic predictions on new data:\n\njulia> y_prob = predict(pmach, Xnew)\n2-element UnivariateFiniteVector{OrderedFactor{2}, String, UInt8, Float64}:\n UnivariateFinite{OrderedFactor{2}}(normal=>1.0, outlier=>9.57e-5)\n UnivariateFinite{OrderedFactor{2}}(normal=>1.0, outlier=>0.0)\n\n## probabilities for outlierness:\n\njulia> pdf.(y_prob, \"outlier\")\n2-element Vector{Float64}:\n 9.572583265925801e-5\n 0.0\n\n## raw scores are still available using `transform`:\n\njulia> transform(pmach, Xnew)\n2-element Vector{Float64}:\n 9.572583265925801e-5\n 0.0","category":"page"},{"location":"models/OneClassSVM_LIBSVM/#Outlier-classification-using-a-probability-threshold:","page":"OneClassSVM","title":"Outlier classification using a probability threshold:","text":"","category":"section"},{"location":"models/OneClassSVM_LIBSVM/","page":"OneClassSVM","title":"OneClassSVM","text":"Continuing the previous example:","category":"page"},{"location":"models/OneClassSVM_LIBSVM/","page":"OneClassSVM","title":"OneClassSVM","text":"dmodel = BinaryThresholdPredictor(pmodel, threshold=0.9)\ndmach = machine(dmodel, X) |> fit!\n\njulia> yhat = predict(dmach, Xnew)\n2-element CategoricalArrays.CategoricalArray{String,1,UInt8}:\n \"normal\"\n \"normal\"","category":"page"},{"location":"models/OneClassSVM_LIBSVM/#User-defined-kernels","page":"OneClassSVM","title":"User-defined kernels","text":"","category":"section"},{"location":"models/OneClassSVM_LIBSVM/","page":"OneClassSVM","title":"OneClassSVM","text":"Continuing the first example:","category":"page"},{"location":"models/OneClassSVM_LIBSVM/","page":"OneClassSVM","title":"OneClassSVM","text":"k(x1, x2) = x1'*x2 ## equivalent to `LIBSVM.Kernel.Linear`\nmodel = OneClassSVM(kernel=k)\nmach = machine(model, X) |> fit!\n\njulia> yhat = transform(mach, Xnew)\n2-element Vector{Float64}:\n -0.4825363352732942\n -0.4848772169720227","category":"page"},{"location":"models/OneClassSVM_LIBSVM/","page":"OneClassSVM","title":"OneClassSVM","text":"See also LIVSVM.jl and the original C implementation documentation. For an alternative source of outlier detection models with an MLJ interface, see OutlierDetection.jl.","category":"page"},{"location":"models/KNeighborsClassifier_MLJScikitLearnInterface/#KNeighborsClassifier_MLJScikitLearnInterface","page":"KNeighborsClassifier","title":"KNeighborsClassifier","text":"","category":"section"},{"location":"models/KNeighborsClassifier_MLJScikitLearnInterface/","page":"KNeighborsClassifier","title":"KNeighborsClassifier","text":"KNeighborsClassifier","category":"page"},{"location":"models/KNeighborsClassifier_MLJScikitLearnInterface/","page":"KNeighborsClassifier","title":"KNeighborsClassifier","text":"A model type for constructing a K-nearest neighbors classifier, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/KNeighborsClassifier_MLJScikitLearnInterface/","page":"KNeighborsClassifier","title":"KNeighborsClassifier","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/KNeighborsClassifier_MLJScikitLearnInterface/","page":"KNeighborsClassifier","title":"KNeighborsClassifier","text":"KNeighborsClassifier = @load KNeighborsClassifier pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/KNeighborsClassifier_MLJScikitLearnInterface/","page":"KNeighborsClassifier","title":"KNeighborsClassifier","text":"Do model = KNeighborsClassifier() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in KNeighborsClassifier(n_neighbors=...).","category":"page"},{"location":"models/KNeighborsClassifier_MLJScikitLearnInterface/#Hyper-parameters","page":"KNeighborsClassifier","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/KNeighborsClassifier_MLJScikitLearnInterface/","page":"KNeighborsClassifier","title":"KNeighborsClassifier","text":"n_neighbors = 5\nweights = uniform\nalgorithm = auto\nleaf_size = 30\np = 2\nmetric = minkowski\nmetric_params = nothing\nn_jobs = nothing","category":"page"},{"location":"models/BayesianLDA_MLJScikitLearnInterface/#BayesianLDA_MLJScikitLearnInterface","page":"BayesianLDA","title":"BayesianLDA","text":"","category":"section"},{"location":"models/BayesianLDA_MLJScikitLearnInterface/","page":"BayesianLDA","title":"BayesianLDA","text":"BayesianLDA","category":"page"},{"location":"models/BayesianLDA_MLJScikitLearnInterface/","page":"BayesianLDA","title":"BayesianLDA","text":"A model type for constructing a Bayesian linear discriminant analysis, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/BayesianLDA_MLJScikitLearnInterface/","page":"BayesianLDA","title":"BayesianLDA","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/BayesianLDA_MLJScikitLearnInterface/","page":"BayesianLDA","title":"BayesianLDA","text":"BayesianLDA = @load BayesianLDA pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/BayesianLDA_MLJScikitLearnInterface/","page":"BayesianLDA","title":"BayesianLDA","text":"Do model = BayesianLDA() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in BayesianLDA(solver=...).","category":"page"},{"location":"models/BayesianLDA_MLJScikitLearnInterface/#Hyper-parameters","page":"BayesianLDA","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/BayesianLDA_MLJScikitLearnInterface/","page":"BayesianLDA","title":"BayesianLDA","text":"solver = svd\nshrinkage = nothing\npriors = nothing\nn_components = nothing\nstore_covariance = false\ntol = 0.0001\ncovariance_estimator = nothing","category":"page"},{"location":"models/LassoLarsCVRegressor_MLJScikitLearnInterface/#LassoLarsCVRegressor_MLJScikitLearnInterface","page":"LassoLarsCVRegressor","title":"LassoLarsCVRegressor","text":"","category":"section"},{"location":"models/LassoLarsCVRegressor_MLJScikitLearnInterface/","page":"LassoLarsCVRegressor","title":"LassoLarsCVRegressor","text":"LassoLarsCVRegressor","category":"page"},{"location":"models/LassoLarsCVRegressor_MLJScikitLearnInterface/","page":"LassoLarsCVRegressor","title":"LassoLarsCVRegressor","text":"A model type for constructing a Lasso model fit with least angle regression (LARS) with built-in cross-validation, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/LassoLarsCVRegressor_MLJScikitLearnInterface/","page":"LassoLarsCVRegressor","title":"LassoLarsCVRegressor","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/LassoLarsCVRegressor_MLJScikitLearnInterface/","page":"LassoLarsCVRegressor","title":"LassoLarsCVRegressor","text":"LassoLarsCVRegressor = @load LassoLarsCVRegressor pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/LassoLarsCVRegressor_MLJScikitLearnInterface/","page":"LassoLarsCVRegressor","title":"LassoLarsCVRegressor","text":"Do model = LassoLarsCVRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in LassoLarsCVRegressor(fit_intercept=...).","category":"page"},{"location":"models/LassoLarsCVRegressor_MLJScikitLearnInterface/#Hyper-parameters","page":"LassoLarsCVRegressor","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/LassoLarsCVRegressor_MLJScikitLearnInterface/","page":"LassoLarsCVRegressor","title":"LassoLarsCVRegressor","text":"fit_intercept = true\nverbose = false\nmax_iter = 500\nnormalize = false\nprecompute = auto\ncv = 5\nmax_n_alphas = 1000\nn_jobs = nothing\neps = 2.220446049250313e-16\ncopy_X = true\npositive = false","category":"page"},{"location":"models/OrthogonalMatchingPursuitCVRegressor_MLJScikitLearnInterface/#OrthogonalMatchingPursuitCVRegressor_MLJScikitLearnInterface","page":"OrthogonalMatchingPursuitCVRegressor","title":"OrthogonalMatchingPursuitCVRegressor","text":"","category":"section"},{"location":"models/OrthogonalMatchingPursuitCVRegressor_MLJScikitLearnInterface/","page":"OrthogonalMatchingPursuitCVRegressor","title":"OrthogonalMatchingPursuitCVRegressor","text":"OrthogonalMatchingPursuitCVRegressor","category":"page"},{"location":"models/OrthogonalMatchingPursuitCVRegressor_MLJScikitLearnInterface/","page":"OrthogonalMatchingPursuitCVRegressor","title":"OrthogonalMatchingPursuitCVRegressor","text":"A model type for constructing a orthogonal ,atching pursuit (OMP) model with built-in cross-validation, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/OrthogonalMatchingPursuitCVRegressor_MLJScikitLearnInterface/","page":"OrthogonalMatchingPursuitCVRegressor","title":"OrthogonalMatchingPursuitCVRegressor","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/OrthogonalMatchingPursuitCVRegressor_MLJScikitLearnInterface/","page":"OrthogonalMatchingPursuitCVRegressor","title":"OrthogonalMatchingPursuitCVRegressor","text":"OrthogonalMatchingPursuitCVRegressor = @load OrthogonalMatchingPursuitCVRegressor pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/OrthogonalMatchingPursuitCVRegressor_MLJScikitLearnInterface/","page":"OrthogonalMatchingPursuitCVRegressor","title":"OrthogonalMatchingPursuitCVRegressor","text":"Do model = OrthogonalMatchingPursuitCVRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in OrthogonalMatchingPursuitCVRegressor(copy=...).","category":"page"},{"location":"models/OrthogonalMatchingPursuitCVRegressor_MLJScikitLearnInterface/#Hyper-parameters","page":"OrthogonalMatchingPursuitCVRegressor","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/OrthogonalMatchingPursuitCVRegressor_MLJScikitLearnInterface/","page":"OrthogonalMatchingPursuitCVRegressor","title":"OrthogonalMatchingPursuitCVRegressor","text":"copy = true\nfit_intercept = true\nnormalize = false\nmax_iter = nothing\ncv = 5\nn_jobs = 1\nverbose = false","category":"page"},{"location":"models/KMeansClusterer_BetaML/#KMeansClusterer_BetaML","page":"KMeansClusterer","title":"KMeansClusterer","text":"","category":"section"},{"location":"models/KMeansClusterer_BetaML/","page":"KMeansClusterer","title":"KMeansClusterer","text":"mutable struct KMeansClusterer <: MLJModelInterface.Unsupervised","category":"page"},{"location":"models/KMeansClusterer_BetaML/","page":"KMeansClusterer","title":"KMeansClusterer","text":"The classical KMeansClusterer clustering algorithm, from the Beta Machine Learning Toolkit (BetaML).","category":"page"},{"location":"models/KMeansClusterer_BetaML/#Parameters:","page":"KMeansClusterer","title":"Parameters:","text":"","category":"section"},{"location":"models/KMeansClusterer_BetaML/","page":"KMeansClusterer","title":"KMeansClusterer","text":"n_classes::Int64: Number of classes to discriminate the data [def: 3]\ndist::Function: Function to employ as distance. Default to the Euclidean distance. Can be one of the predefined distances (l1_distance, l2_distance, l2squared_distance), cosine_distance), any user defined function accepting two vectors and returning a scalar or an anonymous function with the same characteristics. Attention that, contrary to KMedoidsClusterer, the KMeansClusterer algorithm is not guaranteed to converge with other distances than the Euclidean one.\ninitialisation_strategy::String: The computation method of the vector of the initial representatives. One of the following:\n\"random\": randomly in the X space\n\"grid\": using a grid approach\n\"shuffle\": selecting randomly within the available points [default]\n\"given\": using a provided set of initial representatives provided in the initial_representatives parameter\ninitial_representatives::Union{Nothing, Matrix{Float64}}: Provided (K x D) matrix of initial representatives (useful only with initialisation_strategy=\"given\") [default: nothing]\nrng::Random.AbstractRNG: Random Number Generator [deafult: Random.GLOBAL_RNG]","category":"page"},{"location":"models/KMeansClusterer_BetaML/#Notes:","page":"KMeansClusterer","title":"Notes:","text":"","category":"section"},{"location":"models/KMeansClusterer_BetaML/","page":"KMeansClusterer","title":"KMeansClusterer","text":"data must be numerical\nonline fitting (re-fitting with new data) is supported","category":"page"},{"location":"models/KMeansClusterer_BetaML/#Example:","page":"KMeansClusterer","title":"Example:","text":"","category":"section"},{"location":"models/KMeansClusterer_BetaML/","page":"KMeansClusterer","title":"KMeansClusterer","text":"julia> using MLJ\n\njulia> X, y = @load_iris;\n\njulia> modelType = @load KMeansClusterer pkg = \"BetaML\" verbosity=0\nBetaML.Clustering.KMeansClusterer\n\njulia> model = modelType()\nKMeansClusterer(\n n_classes = 3, \n dist = BetaML.Clustering.var\"#34#36\"(), \n initialisation_strategy = \"shuffle\", \n initial_representatives = nothing, \n rng = Random._GLOBAL_RNG())\n\njulia> mach = machine(model, X);\n\njulia> fit!(mach);\n[ Info: Training machine(KMeansClusterer(n_classes = 3, …), …).\n\njulia> classes_est = predict(mach, X);\n\njulia> hcat(y,classes_est)\n150×2 CategoricalArrays.CategoricalArray{Union{Int64, String},2,UInt32}:\n \"setosa\" 2\n \"setosa\" 2\n \"setosa\" 2\n ⋮ \n \"virginica\" 3\n \"virginica\" 3\n \"virginica\" 1","category":"page"},{"location":"models/LassoRegressor_MLJLinearModels/#LassoRegressor_MLJLinearModels","page":"LassoRegressor","title":"LassoRegressor","text":"","category":"section"},{"location":"models/LassoRegressor_MLJLinearModels/","page":"LassoRegressor","title":"LassoRegressor","text":"LassoRegressor","category":"page"},{"location":"models/LassoRegressor_MLJLinearModels/","page":"LassoRegressor","title":"LassoRegressor","text":"A model type for constructing a lasso regressor, based on MLJLinearModels.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/LassoRegressor_MLJLinearModels/","page":"LassoRegressor","title":"LassoRegressor","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/LassoRegressor_MLJLinearModels/","page":"LassoRegressor","title":"LassoRegressor","text":"LassoRegressor = @load LassoRegressor pkg=MLJLinearModels","category":"page"},{"location":"models/LassoRegressor_MLJLinearModels/","page":"LassoRegressor","title":"LassoRegressor","text":"Do model = LassoRegressor() to construct an instance with default hyper-parameters.","category":"page"},{"location":"models/LassoRegressor_MLJLinearModels/","page":"LassoRegressor","title":"LassoRegressor","text":"Lasso regression is a linear model with objective function","category":"page"},{"location":"models/LassoRegressor_MLJLinearModels/","page":"LassoRegressor","title":"LassoRegressor","text":"$","category":"page"},{"location":"models/LassoRegressor_MLJLinearModels/","page":"LassoRegressor","title":"LassoRegressor","text":"|Xθ - y|₂²/2 + n⋅λ|θ|₁ $","category":"page"},{"location":"models/LassoRegressor_MLJLinearModels/","page":"LassoRegressor","title":"LassoRegressor","text":"where n is the number of observations.","category":"page"},{"location":"models/LassoRegressor_MLJLinearModels/","page":"LassoRegressor","title":"LassoRegressor","text":"If scale_penalty_with_samples = false the objective function is","category":"page"},{"location":"models/LassoRegressor_MLJLinearModels/","page":"LassoRegressor","title":"LassoRegressor","text":"$","category":"page"},{"location":"models/LassoRegressor_MLJLinearModels/","page":"LassoRegressor","title":"LassoRegressor","text":"|Xθ - y|₂²/2 + λ|θ|₁ $","category":"page"},{"location":"models/LassoRegressor_MLJLinearModels/","page":"LassoRegressor","title":"LassoRegressor","text":".","category":"page"},{"location":"models/LassoRegressor_MLJLinearModels/","page":"LassoRegressor","title":"LassoRegressor","text":"Different solver options exist, as indicated under \"Hyperparameters\" below. ","category":"page"},{"location":"models/LassoRegressor_MLJLinearModels/#Training-data","page":"LassoRegressor","title":"Training data","text":"","category":"section"},{"location":"models/LassoRegressor_MLJLinearModels/","page":"LassoRegressor","title":"LassoRegressor","text":"In MLJ or MLJBase, bind an instance model to data with","category":"page"},{"location":"models/LassoRegressor_MLJLinearModels/","page":"LassoRegressor","title":"LassoRegressor","text":"mach = machine(model, X, y)","category":"page"},{"location":"models/LassoRegressor_MLJLinearModels/","page":"LassoRegressor","title":"LassoRegressor","text":"where:","category":"page"},{"location":"models/LassoRegressor_MLJLinearModels/","page":"LassoRegressor","title":"LassoRegressor","text":"X is any table of input features (eg, a DataFrame) whose columns have Continuous scitype; check column scitypes with schema(X)\ny is the target, which can be any AbstractVector whose element scitype is Continuous; check the scitype with scitype(y)","category":"page"},{"location":"models/LassoRegressor_MLJLinearModels/","page":"LassoRegressor","title":"LassoRegressor","text":"Train the machine using fit!(mach, rows=...).","category":"page"},{"location":"models/LassoRegressor_MLJLinearModels/#Hyperparameters","page":"LassoRegressor","title":"Hyperparameters","text":"","category":"section"},{"location":"models/LassoRegressor_MLJLinearModels/","page":"LassoRegressor","title":"LassoRegressor","text":"lambda::Real: strength of the L1 regularization. Default: 1.0\nfit_intercept::Bool: whether to fit the intercept or not. Default: true\npenalize_intercept::Bool: whether to penalize the intercept. Default: false\nscale_penalty_with_samples::Bool: whether to scale the penalty with the number of observations. Default: true\nsolver::Union{Nothing, MLJLinearModels.Solver}: any instance of MLJLinearModels.ProxGrad. If solver=nothing (default) then ProxGrad(accel=true) (FISTA) is used. Solver aliases: FISTA(; kwargs...) = ProxGrad(accel=true, kwargs...), ISTA(; kwargs...) = ProxGrad(accel=false, kwargs...). Default: nothing","category":"page"},{"location":"models/LassoRegressor_MLJLinearModels/#Example","page":"LassoRegressor","title":"Example","text":"","category":"section"},{"location":"models/LassoRegressor_MLJLinearModels/","page":"LassoRegressor","title":"LassoRegressor","text":"using MLJ\nX, y = make_regression()\nmach = fit!(machine(LassoRegressor(), X, y))\npredict(mach, X)\nfitted_params(mach)","category":"page"},{"location":"models/LassoRegressor_MLJLinearModels/","page":"LassoRegressor","title":"LassoRegressor","text":"See also ElasticNetRegressor.","category":"page"},{"location":"model_stacking/#Model-Stacking","page":"Model Stacking","title":"Model Stacking","text":"","category":"section"},{"location":"model_stacking/","page":"Model Stacking","title":"Model Stacking","text":"In a model stack, as introduced by Wolpert (1992), an adjudicating model learns the best way to combine the predictions of multiple base models. In MLJ, such models are constructed using the Stack constructor. To learn more about stacking and to see how to construct a stack \"by hand\" using Learning Networks, see this Data Science in Julia tutorial","category":"page"},{"location":"model_stacking/","page":"Model Stacking","title":"Model Stacking","text":"MLJBase.Stack","category":"page"},{"location":"model_stacking/#MLJBase.Stack","page":"Model Stacking","title":"MLJBase.Stack","text":"Stack(; metalearner=nothing, name1=model1, name2=model2, ..., keyword_options...)\n\nImplements the two-layer generalized stack algorithm introduced by Wolpert (1992) and generalized by Van der Laan et al (2007). Returns an instance of type ProbabilisticStack or DeterministicStack, depending on the prediction type of metalearner.\n\nWhen training a machine bound to such an instance:\n\nThe data is split into training/validation sets according to the specified resampling strategy.\nEach base model model1, model2, ... is trained on each training subset and outputs predictions on the corresponding validation sets. The multi-fold predictions are spliced together into a so-called out-of-sample prediction for each model.\nThe adjudicating model, metalearner, is subsequently trained on the out-of-sample predictions to learn the best combination of base model predictions.\nEach base model is retrained on all supplied data for purposes of passing on new production data onto the adjudicator for making new predictions\n\nArguments\n\nmetalearner::Supervised: The model that will optimize the desired criterion based on its internals. For instance, a LinearRegression model will optimize the squared error.\nresampling: The resampling strategy used to prepare out-of-sample predictions of the base learners.\nmeasures: A measure or iterable over measures, to perform an internal evaluation of the learners in the Stack while training. This is not for the evaluation of the Stack itself.\ncache: Whether machines created in the learning network will cache data or not.\nacceleration: A supported AbstractResource to define the training parallelization mode of the stack.\nname1=model1, name2=model2, ...: the Supervised model instances to be used as base learners. The provided names become properties of the instance created to allow hyper-parameter access\n\nExample\n\nThe following code defines a DeterministicStack instance for learning a Continuous target, and demonstrates that:\n\nBase models can be Probabilistic models even if the stack itself is Deterministic (predict_mean is applied in such cases).\nAs an alternative to hyperparameter optimization, one can stack multiple copies of given model, mutating the hyper-parameter used in each copy.\n\nusing MLJ\n\nDecisionTreeRegressor = @load DecisionTreeRegressor pkg=DecisionTree\nEvoTreeRegressor = @load EvoTreeRegressor\nXGBoostRegressor = @load XGBoostRegressor\nKNNRegressor = @load KNNRegressor pkg=NearestNeighborModels\nLinearRegressor = @load LinearRegressor pkg=MLJLinearModels\n\nX, y = make_regression(500, 5)\n\nstack = Stack(;metalearner=LinearRegressor(),\n resampling=CV(),\n measures=rmse,\n constant=ConstantRegressor(),\n tree_2=DecisionTreeRegressor(max_depth=2),\n tree_3=DecisionTreeRegressor(max_depth=3),\n evo=EvoTreeRegressor(),\n knn=KNNRegressor(),\n xgb=XGBoostRegressor())\n\nmach = machine(stack, X, y)\nevaluate!(mach; resampling=Holdout(), measure=rmse)\n\n\nThe internal evaluation report can be accessed like this and provides a PerformanceEvaluation object for each model:\n\nreport(mach).cv_report\n\n\n\n\n\n","category":"type"},{"location":"models/LGBMClassifier_LightGBM/#LGBMClassifier_LightGBM","page":"LGBMClassifier","title":"LGBMClassifier","text":"","category":"section"},{"location":"models/LGBMClassifier_LightGBM/","page":"LGBMClassifier","title":"LGBMClassifier","text":"Microsoft LightGBM FFI wrapper: Classifier","category":"page"},{"location":"models/Birch_MLJScikitLearnInterface/#Birch_MLJScikitLearnInterface","page":"Birch","title":"Birch","text":"","category":"section"},{"location":"models/Birch_MLJScikitLearnInterface/","page":"Birch","title":"Birch","text":"Birch","category":"page"},{"location":"models/Birch_MLJScikitLearnInterface/","page":"Birch","title":"Birch","text":"A model type for constructing a birch, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/Birch_MLJScikitLearnInterface/","page":"Birch","title":"Birch","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/Birch_MLJScikitLearnInterface/","page":"Birch","title":"Birch","text":"Birch = @load Birch pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/Birch_MLJScikitLearnInterface/","page":"Birch","title":"Birch","text":"Do model = Birch() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in Birch(threshold=...).","category":"page"},{"location":"models/Birch_MLJScikitLearnInterface/","page":"Birch","title":"Birch","text":"Memory-efficient, online-learning algorithm provided as an alternative to MiniBatchKMeans. Note: noisy samples are given the label -1.","category":"page"},{"location":"models/MultitargetGaussianMixtureRegressor_BetaML/#MultitargetGaussianMixtureRegressor_BetaML","page":"MultitargetGaussianMixtureRegressor","title":"MultitargetGaussianMixtureRegressor","text":"","category":"section"},{"location":"models/MultitargetGaussianMixtureRegressor_BetaML/","page":"MultitargetGaussianMixtureRegressor","title":"MultitargetGaussianMixtureRegressor","text":"mutable struct MultitargetGaussianMixtureRegressor <: MLJModelInterface.Deterministic","category":"page"},{"location":"models/MultitargetGaussianMixtureRegressor_BetaML/","page":"MultitargetGaussianMixtureRegressor","title":"MultitargetGaussianMixtureRegressor","text":"A non-linear regressor derived from fitting the data on a probabilistic model (Gaussian Mixture Model). Relatively fast but generally not very precise, except for data with a structure matching the chosen underlying mixture.","category":"page"},{"location":"models/MultitargetGaussianMixtureRegressor_BetaML/","page":"MultitargetGaussianMixtureRegressor","title":"MultitargetGaussianMixtureRegressor","text":"This is the multi-target version of the model. If you want to predict a single label (y), use the MLJ model GaussianMixtureRegressor.","category":"page"},{"location":"models/MultitargetGaussianMixtureRegressor_BetaML/#Hyperparameters:","page":"MultitargetGaussianMixtureRegressor","title":"Hyperparameters:","text":"","category":"section"},{"location":"models/MultitargetGaussianMixtureRegressor_BetaML/","page":"MultitargetGaussianMixtureRegressor","title":"MultitargetGaussianMixtureRegressor","text":"n_classes::Int64: Number of mixtures (latent classes) to consider [def: 3]\ninitial_probmixtures::Vector{Float64}: Initial probabilities of the categorical distribution (n_classes x 1) [default: []]\nmixtures::Union{Type, Vector{<:BetaML.GMM.AbstractMixture}}: An array (of length n_classes) of the mixtures to employ (see the [?GMM](@ref GMM) module). Each mixture object can be provided with or without its parameters (e.g. mean and variance for the gaussian ones). Fully qualified mixtures are useful only if theinitialisationstrategyparameter is set to \"gived\" This parameter can also be given symply in term of a _type. In this case it is automatically extended to a vector of n_classesmixtures of the specified type. Note that mixing of different mixture types is not currently supported. [def:[DiagonalGaussian() for i in 1:n_classes]`]\ntol::Float64: Tolerance to stop the algorithm [default: 10^(-6)]\nminimum_variance::Float64: Minimum variance for the mixtures [default: 0.05]\nminimum_covariance::Float64: Minimum covariance for the mixtures with full covariance matrix [default: 0]. This should be set different than minimum_variance (see notes).\ninitialisation_strategy::String: The computation method of the vector of the initial mixtures. One of the following:\n\"grid\": using a grid approach\n\"given\": using the mixture provided in the fully qualified mixtures parameter\n\"kmeans\": use first kmeans (itself initialised with a \"grid\" strategy) to set the initial mixture centers [default]\nNote that currently \"random\" and \"shuffle\" initialisations are not supported in gmm-based algorithms.\nmaximum_iterations::Int64: Maximum number of iterations [def: typemax(Int64), i.e. ∞]\nrng::Random.AbstractRNG: Random Number Generator [deafult: Random.GLOBAL_RNG]","category":"page"},{"location":"models/MultitargetGaussianMixtureRegressor_BetaML/#Example:","page":"MultitargetGaussianMixtureRegressor","title":"Example:","text":"","category":"section"},{"location":"models/MultitargetGaussianMixtureRegressor_BetaML/","page":"MultitargetGaussianMixtureRegressor","title":"MultitargetGaussianMixtureRegressor","text":"julia> using MLJ\n\njulia> X, y = @load_boston;\n\njulia> ydouble = hcat(y, y .*2 .+5);\n\njulia> modelType = @load MultitargetGaussianMixtureRegressor pkg = \"BetaML\" verbosity=0\nBetaML.GMM.MultitargetGaussianMixtureRegressor\n\njulia> model = modelType()\nMultitargetGaussianMixtureRegressor(\n n_classes = 3, \n initial_probmixtures = Float64[], \n mixtures = BetaML.GMM.DiagonalGaussian{Float64}[BetaML.GMM.DiagonalGaussian{Float64}(nothing, nothing), BetaML.GMM.DiagonalGaussian{Float64}(nothing, nothing), BetaML.GMM.DiagonalGaussian{Float64}(nothing, nothing)], \n tol = 1.0e-6, \n minimum_variance = 0.05, \n minimum_covariance = 0.0, \n initialisation_strategy = \"kmeans\", \n maximum_iterations = 9223372036854775807, \n rng = Random._GLOBAL_RNG())\n\njulia> mach = machine(model, X, ydouble);\n\njulia> fit!(mach);\n[ Info: Training machine(MultitargetGaussianMixtureRegressor(n_classes = 3, …), …).\nIter. 1: Var. of the post 20.46947926187522 Log-likelihood -23662.72770575145\n\njulia> ŷdouble = predict(mach, X)\n506×2 Matrix{Float64}:\n 23.3358 51.6717\n 23.3358 51.6717\n ⋮ \n 16.6843 38.3686\n 16.6843 38.3686","category":"page"},{"location":"models/DBSCAN_MLJScikitLearnInterface/#DBSCAN_MLJScikitLearnInterface","page":"DBSCAN","title":"DBSCAN","text":"","category":"section"},{"location":"models/DBSCAN_MLJScikitLearnInterface/","page":"DBSCAN","title":"DBSCAN","text":"DBSCAN","category":"page"},{"location":"models/DBSCAN_MLJScikitLearnInterface/","page":"DBSCAN","title":"DBSCAN","text":"A model type for constructing a dbscan, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/DBSCAN_MLJScikitLearnInterface/","page":"DBSCAN","title":"DBSCAN","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/DBSCAN_MLJScikitLearnInterface/","page":"DBSCAN","title":"DBSCAN","text":"DBSCAN = @load DBSCAN pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/DBSCAN_MLJScikitLearnInterface/","page":"DBSCAN","title":"DBSCAN","text":"Do model = DBSCAN() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in DBSCAN(eps=...).","category":"page"},{"location":"models/DBSCAN_MLJScikitLearnInterface/","page":"DBSCAN","title":"DBSCAN","text":"Density-Based Spatial Clustering of Applications with Noise. Finds core samples of high density and expands clusters from them. Good for data which contains clusters of similar density.","category":"page"},{"location":"models/KNNRegressor_NearestNeighborModels/#KNNRegressor_NearestNeighborModels","page":"KNNRegressor","title":"KNNRegressor","text":"","category":"section"},{"location":"models/KNNRegressor_NearestNeighborModels/","page":"KNNRegressor","title":"KNNRegressor","text":"KNNRegressor","category":"page"},{"location":"models/KNNRegressor_NearestNeighborModels/","page":"KNNRegressor","title":"KNNRegressor","text":"A model type for constructing a K-nearest neighbor regressor, based on NearestNeighborModels.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/KNNRegressor_NearestNeighborModels/","page":"KNNRegressor","title":"KNNRegressor","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/KNNRegressor_NearestNeighborModels/","page":"KNNRegressor","title":"KNNRegressor","text":"KNNRegressor = @load KNNRegressor pkg=NearestNeighborModels","category":"page"},{"location":"models/KNNRegressor_NearestNeighborModels/","page":"KNNRegressor","title":"KNNRegressor","text":"Do model = KNNRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in KNNRegressor(K=...).","category":"page"},{"location":"models/KNNRegressor_NearestNeighborModels/","page":"KNNRegressor","title":"KNNRegressor","text":"KNNRegressor implements K-Nearest Neighbors regressor which is non-parametric algorithm that predicts the response associated with a new point by taking an weighted average of the response of the K-nearest points.","category":"page"},{"location":"models/KNNRegressor_NearestNeighborModels/#Training-data","page":"KNNRegressor","title":"Training data","text":"","category":"section"},{"location":"models/KNNRegressor_NearestNeighborModels/","page":"KNNRegressor","title":"KNNRegressor","text":"In MLJ or MLJBase, bind an instance model to data with","category":"page"},{"location":"models/KNNRegressor_NearestNeighborModels/","page":"KNNRegressor","title":"KNNRegressor","text":"mach = machine(model, X, y)","category":"page"},{"location":"models/KNNRegressor_NearestNeighborModels/","page":"KNNRegressor","title":"KNNRegressor","text":"OR","category":"page"},{"location":"models/KNNRegressor_NearestNeighborModels/","page":"KNNRegressor","title":"KNNRegressor","text":"mach = machine(model, X, y, w)","category":"page"},{"location":"models/KNNRegressor_NearestNeighborModels/","page":"KNNRegressor","title":"KNNRegressor","text":"Here:","category":"page"},{"location":"models/KNNRegressor_NearestNeighborModels/","page":"KNNRegressor","title":"KNNRegressor","text":"X is any table of input features (eg, a DataFrame) whose columns are of scitype Continuous; check column scitypes with schema(X).\ny is the target, which can be any table of responses whose element scitype is Continuous; check the scitype with scitype(y).\nw is the observation weights which can either be nothing(default) or an AbstractVector whoose element scitype is Count or Continuous. This is different from weights kernel which is an hyperparameter to the model, see below.","category":"page"},{"location":"models/KNNRegressor_NearestNeighborModels/","page":"KNNRegressor","title":"KNNRegressor","text":"Train the machine using fit!(mach, rows=...).","category":"page"},{"location":"models/KNNRegressor_NearestNeighborModels/#Hyper-parameters","page":"KNNRegressor","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/KNNRegressor_NearestNeighborModels/","page":"KNNRegressor","title":"KNNRegressor","text":"K::Int=5 : number of neighbors\nalgorithm::Symbol = :kdtree : one of (:kdtree, :brutetree, :balltree)\nmetric::Metric = Euclidean() : any Metric from Distances.jl for the distance between points. For algorithm = :kdtree only metrics which are instances of Union{Distances.Chebyshev, Distances.Cityblock, Distances.Euclidean, Distances.Minkowski, Distances.WeightedCityblock, Distances.WeightedEuclidean, Distances.WeightedMinkowski} are supported.\nleafsize::Int = algorithm == 10 : determines the number of points at which to stop splitting the tree. This option is ignored and always taken as 0 for algorithm = :brutetree, since brutetree isn't actually a tree.\nreorder::Bool = true : if true then points which are close in distance are placed close in memory. In this case, a copy of the original data will be made so that the original data is left unmodified. Setting this to true can significantly improve performance of the specified algorithm (except :brutetree). This option is ignored and always taken as false for algorithm = :brutetree.\nweights::KNNKernel=Uniform() : kernel used in assigning weights to the k-nearest neighbors for each observation. An instance of one of the types in list_kernels(). User-defined weighting functions can be passed by wrapping the function in a UserDefinedKernel kernel (do ?NearestNeighborModels.UserDefinedKernel for more info). If observation weights w are passed during machine construction then the weight assigned to each neighbor vote is the product of the kernel generated weight for that neighbor and the corresponding observation weight.","category":"page"},{"location":"models/KNNRegressor_NearestNeighborModels/#Operations","page":"KNNRegressor","title":"Operations","text":"","category":"section"},{"location":"models/KNNRegressor_NearestNeighborModels/","page":"KNNRegressor","title":"KNNRegressor","text":"predict(mach, Xnew): Return predictions of the target given features Xnew, which should have same scitype as X above.","category":"page"},{"location":"models/KNNRegressor_NearestNeighborModels/#Fitted-parameters","page":"KNNRegressor","title":"Fitted parameters","text":"","category":"section"},{"location":"models/KNNRegressor_NearestNeighborModels/","page":"KNNRegressor","title":"KNNRegressor","text":"The fields of fitted_params(mach) are:","category":"page"},{"location":"models/KNNRegressor_NearestNeighborModels/","page":"KNNRegressor","title":"KNNRegressor","text":"tree: An instance of either KDTree, BruteTree or BallTree depending on the value of the algorithm hyperparameter (See hyper-parameters section above). These are data structures that stores the training data with the view of making quicker nearest neighbor searches on test data points.","category":"page"},{"location":"models/KNNRegressor_NearestNeighborModels/#Examples","page":"KNNRegressor","title":"Examples","text":"","category":"section"},{"location":"models/KNNRegressor_NearestNeighborModels/","page":"KNNRegressor","title":"KNNRegressor","text":"using MLJ\nKNNRegressor = @load KNNRegressor pkg=NearestNeighborModels\nX, y = @load_boston; ## loads the crabs dataset from MLJBase\n## view possible kernels\nNearestNeighborModels.list_kernels()\nmodel = KNNRegressor(weights = NearestNeighborModels.Inverse()) #KNNRegressor instantiation\nmach = machine(model, X, y) |> fit! ## wrap model and required data in an MLJ machine and fit\ny_hat = predict(mach, X)\n","category":"page"},{"location":"models/KNNRegressor_NearestNeighborModels/","page":"KNNRegressor","title":"KNNRegressor","text":"See also MultitargetKNNRegressor","category":"page"},{"location":"models/ARDRegressor_MLJScikitLearnInterface/#ARDRegressor_MLJScikitLearnInterface","page":"ARDRegressor","title":"ARDRegressor","text":"","category":"section"},{"location":"models/ARDRegressor_MLJScikitLearnInterface/","page":"ARDRegressor","title":"ARDRegressor","text":"ARDRegressor","category":"page"},{"location":"models/ARDRegressor_MLJScikitLearnInterface/","page":"ARDRegressor","title":"ARDRegressor","text":"A model type for constructing a Bayesian ARD regressor, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/ARDRegressor_MLJScikitLearnInterface/","page":"ARDRegressor","title":"ARDRegressor","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/ARDRegressor_MLJScikitLearnInterface/","page":"ARDRegressor","title":"ARDRegressor","text":"ARDRegressor = @load ARDRegressor pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/ARDRegressor_MLJScikitLearnInterface/","page":"ARDRegressor","title":"ARDRegressor","text":"Do model = ARDRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in ARDRegressor(n_iter=...).","category":"page"},{"location":"models/ARDRegressor_MLJScikitLearnInterface/#Hyper-parameters","page":"ARDRegressor","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/ARDRegressor_MLJScikitLearnInterface/","page":"ARDRegressor","title":"ARDRegressor","text":"n_iter = 300\ntol = 0.001\nalpha_1 = 1.0e-6\nalpha_2 = 1.0e-6\nlambda_1 = 1.0e-6\nlambda_2 = 1.0e-6\ncompute_score = false\nthreshold_lambda = 10000.0\nfit_intercept = true\ncopy_X = true\nverbose = false","category":"page"},{"location":"models/LinearRegressor_MLJScikitLearnInterface/#LinearRegressor_MLJScikitLearnInterface","page":"LinearRegressor","title":"LinearRegressor","text":"","category":"section"},{"location":"models/LinearRegressor_MLJScikitLearnInterface/","page":"LinearRegressor","title":"LinearRegressor","text":"LinearRegressor","category":"page"},{"location":"models/LinearRegressor_MLJScikitLearnInterface/","page":"LinearRegressor","title":"LinearRegressor","text":"A model type for constructing a ordinary least-squares regressor (OLS), based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/LinearRegressor_MLJScikitLearnInterface/","page":"LinearRegressor","title":"LinearRegressor","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/LinearRegressor_MLJScikitLearnInterface/","page":"LinearRegressor","title":"LinearRegressor","text":"LinearRegressor = @load LinearRegressor pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/LinearRegressor_MLJScikitLearnInterface/","page":"LinearRegressor","title":"LinearRegressor","text":"Do model = LinearRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in LinearRegressor(fit_intercept=...).","category":"page"},{"location":"models/LinearRegressor_MLJScikitLearnInterface/#Hyper-parameters","page":"LinearRegressor","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/LinearRegressor_MLJScikitLearnInterface/","page":"LinearRegressor","title":"LinearRegressor","text":"fit_intercept = true\ncopy_X = true\nn_jobs = nothing","category":"page"},{"location":"models/SVMNuRegressor_MLJScikitLearnInterface/#SVMNuRegressor_MLJScikitLearnInterface","page":"SVMNuRegressor","title":"SVMNuRegressor","text":"","category":"section"},{"location":"models/SVMNuRegressor_MLJScikitLearnInterface/","page":"SVMNuRegressor","title":"SVMNuRegressor","text":"SVMNuRegressor","category":"page"},{"location":"models/SVMNuRegressor_MLJScikitLearnInterface/","page":"SVMNuRegressor","title":"SVMNuRegressor","text":"A model type for constructing a nu-support vector regressor, based on MLJScikitLearnInterface.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/SVMNuRegressor_MLJScikitLearnInterface/","page":"SVMNuRegressor","title":"SVMNuRegressor","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/SVMNuRegressor_MLJScikitLearnInterface/","page":"SVMNuRegressor","title":"SVMNuRegressor","text":"SVMNuRegressor = @load SVMNuRegressor pkg=MLJScikitLearnInterface","category":"page"},{"location":"models/SVMNuRegressor_MLJScikitLearnInterface/","page":"SVMNuRegressor","title":"SVMNuRegressor","text":"Do model = SVMNuRegressor() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in SVMNuRegressor(nu=...).","category":"page"},{"location":"models/SVMNuRegressor_MLJScikitLearnInterface/#Hyper-parameters","page":"SVMNuRegressor","title":"Hyper-parameters","text":"","category":"section"},{"location":"models/SVMNuRegressor_MLJScikitLearnInterface/","page":"SVMNuRegressor","title":"SVMNuRegressor","text":"nu = 0.5\nC = 1.0\nkernel = rbf\ndegree = 3\ngamma = scale\ncoef0 = 0.0\nshrinking = true\ntol = 0.001\ncache_size = 200\nmax_iter = -1","category":"page"},{"location":"models/LinearRegressor_MLJLinearModels/#LinearRegressor_MLJLinearModels","page":"LinearRegressor","title":"LinearRegressor","text":"","category":"section"},{"location":"models/LinearRegressor_MLJLinearModels/","page":"LinearRegressor","title":"LinearRegressor","text":"LinearRegressor","category":"page"},{"location":"models/LinearRegressor_MLJLinearModels/","page":"LinearRegressor","title":"LinearRegressor","text":"A model type for constructing a linear regressor, based on MLJLinearModels.jl, and implementing the MLJ model interface.","category":"page"},{"location":"models/LinearRegressor_MLJLinearModels/","page":"LinearRegressor","title":"LinearRegressor","text":"From MLJ, the type can be imported using","category":"page"},{"location":"models/LinearRegressor_MLJLinearModels/","page":"LinearRegressor","title":"LinearRegressor","text":"LinearRegressor = @load LinearRegressor pkg=MLJLinearModels","category":"page"},{"location":"models/LinearRegressor_MLJLinearModels/","page":"LinearRegressor","title":"LinearRegressor","text":"Do model = LinearRegressor() to construct an instance with default hyper-parameters.","category":"page"},{"location":"models/LinearRegressor_MLJLinearModels/","page":"LinearRegressor","title":"LinearRegressor","text":"This model provides standard linear regression with objective function","category":"page"},{"location":"models/LinearRegressor_MLJLinearModels/","page":"LinearRegressor","title":"LinearRegressor","text":"$","category":"page"},{"location":"models/LinearRegressor_MLJLinearModels/","page":"LinearRegressor","title":"LinearRegressor","text":"|Xθ - y|₂²/2 $","category":"page"},{"location":"models/LinearRegressor_MLJLinearModels/","page":"LinearRegressor","title":"LinearRegressor","text":"Different solver options exist, as indicated under \"Hyperparameters\" below. ","category":"page"},{"location":"models/LinearRegressor_MLJLinearModels/#Training-data","page":"LinearRegressor","title":"Training data","text":"","category":"section"},{"location":"models/LinearRegressor_MLJLinearModels/","page":"LinearRegressor","title":"LinearRegressor","text":"In MLJ or MLJBase, bind an instance model to data with","category":"page"},{"location":"models/LinearRegressor_MLJLinearModels/","page":"LinearRegressor","title":"LinearRegressor","text":"mach = machine(model, X, y)","category":"page"},{"location":"models/LinearRegressor_MLJLinearModels/","page":"LinearRegressor","title":"LinearRegressor","text":"where:","category":"page"},{"location":"models/LinearRegressor_MLJLinearModels/","page":"LinearRegressor","title":"LinearRegressor","text":"X is any table of input features (eg, a DataFrame) whose columns have Continuous scitype; check column scitypes with schema(X)\ny is the target, which can be any AbstractVector whose element scitype is Continuous; check the scitype with scitype(y)","category":"page"},{"location":"models/LinearRegressor_MLJLinearModels/","page":"LinearRegressor","title":"LinearRegressor","text":"Train the machine using fit!(mach, rows=...).","category":"page"},{"location":"models/LinearRegressor_MLJLinearModels/#Hyperparameters","page":"LinearRegressor","title":"Hyperparameters","text":"","category":"section"},{"location":"models/LinearRegressor_MLJLinearModels/","page":"LinearRegressor","title":"LinearRegressor","text":"fit_intercept::Bool: whether to fit the intercept or not. Default: true\nsolver::Union{Nothing, MLJLinearModels.Solver}: \"any instance of MLJLinearModels.Analytical. Use Analytical() for Cholesky and CG()=Analytical(iterative=true) for conjugate-gradient.\nIf solver = nothing (default) then Analytical() is used. Default: nothing","category":"page"},{"location":"models/LinearRegressor_MLJLinearModels/#Example","page":"LinearRegressor","title":"Example","text":"","category":"section"},{"location":"models/LinearRegressor_MLJLinearModels/","page":"LinearRegressor","title":"LinearRegressor","text":"using MLJ\nX, y = make_regression()\nmach = fit!(machine(LinearRegressor(), X, y))\npredict(mach, X)\nfitted_params(mach)","category":"page"}] +} diff --git a/v0.20.3/simple_user_defined_models/index.html b/v0.20.3/simple_user_defined_models/index.html new file mode 100644 index 000000000..c01e60ac8 --- /dev/null +++ b/v0.20.3/simple_user_defined_models/index.html @@ -0,0 +1,57 @@ + +Simple User Defined Models · MLJ

      Simple User Defined Models

      To quickly implement a new supervised model in MLJ, it suffices to:

      • Define a mutable struct to store hyperparameters. This is either a subtype of Probabilistic or Deterministic, depending on whether probabilistic or ordinary point predictions are intended. This struct is the model.

      • Define a fit method, dispatched on the model, returning learned parameters, also known as the fitresult.

      • Define a predict method, dispatched on the model, and the fitresult, to return predictions on new patterns.

      In the examples below, the training input X of fit, and the new input Xnew passed to predict, are tables. Each training target y is an AbstractVector.

      The predictions returned by predict have the same form as y for deterministic models, but are Vectors of distributions for probabilistic models.

      Advanced model functionality not addressed here includes: (i) optional update method to avoid redundant calculations when calling fit! on machines a second time; (ii) reporting extra training-related statistics; (iii) exposing model-specific functionality; (iv) checking the scientific type of data passed to your model in machine construction; and (iv) checking the validity of hyperparameter values. All this is described in Adding Models for General Use.

      For an unsupervised model, implement transform and, optionally, inverse_transform using the same signature at predict below.

      A simple deterministic regressor

      Here's a quick-and-dirty implementation of a ridge regressor with no intercept:

      import MLJBase
      +using LinearAlgebra
      +
      +mutable struct MyRegressor <: MLJBase.Deterministic
      +    lambda::Float64
      +end
      +MyRegressor(; lambda=0.1) = MyRegressor(lambda)
      +
      +# fit returns coefficients minimizing a penalized rms loss function:
      +function MLJBase.fit(model::MyRegressor, verbosity, X, y)
      +    x = MLJBase.matrix(X)                     # convert table to matrix
      +    fitresult = (x'x + model.lambda*I)\(x'y)  # the coefficients
      +    cache=nothing
      +    report=nothing
      +    return fitresult, cache, report
      +end
      +
      +# predict uses coefficients to make a new prediction:
      +MLJBase.predict(::MyRegressor, fitresult, Xnew) = MLJBase.matrix(Xnew) * fitresult

      After loading this code, all MLJ's basic meta-algorithms can be applied to MyRegressor:

      julia> X, y = @load_boston;
      julia> model = MyRegressor(lambda=1.0)MyRegressor( + lambda = 1.0)
      julia> regressor = machine(model, X, y)untrained Machine; caches model-specific representations of data + model: MyRegressor(lambda = 1.0) + args: + 1: Source @715 ⏎ Table{AbstractVector{Continuous}} + 2: Source @922 ⏎ AbstractVector{Continuous}
      julia> evaluate!(regressor, resampling=CV(), measure=rms, verbosity=0)PerformanceEvaluation object with these fields: + model, measure, operation, measurement, per_fold, + per_observation, fitted_params_per_fold, + report_per_fold, train_test_rows, resampling, repeats +Extract: +┌────────────────────────┬───────────┬─────────────┬─────────┬────────────────── +│ measure │ operation │ measurement │ 1.96*SE │ per_fold ⋯ +├────────────────────────┼───────────┼─────────────┼─────────┼────────────────── +│ RootMeanSquaredError() │ predict │ 5.94 │ 2.58 │ [2.71, 4.44, 5. ⋯ +└────────────────────────┴───────────┴─────────────┴─────────┴────────────────── + 1 column omitted

      A simple probabilistic classifier

      The following probabilistic model simply fits a probability distribution to the MultiClass training target (i.e., ignores X) and returns this pdf for any new pattern:

      import MLJBase
      +import Distributions
      +
      +struct MyClassifier <: MLJBase.Probabilistic
      +end
      +
      +# `fit` ignores the inputs X and returns the training target y
      +# probability distribution:
      +function MLJBase.fit(model::MyClassifier, verbosity, X, y)
      +    fitresult = Distributions.fit(MLJBase.UnivariateFinite, y)
      +    cache = nothing
      +    report = nothing
      +    return fitresult, cache, report
      +end
      +
      +# `predict` returns the passed fitresult (pdf) for all new patterns:
      +MLJBase.predict(model::MyClassifier, fitresult, Xnew) =
      +    [fitresult for r in 1:nrows(Xnew)]
      julia> X, y = @load_iris
      +julia> mach = fit!(machine(MyClassifier(), X, y))
      +julia> predict(mach, selectrows(X, 1:2))
      +2-element Array{UnivariateFinite{String,UInt32,Float64},1}:
      + UnivariateFinite(setosa=>0.333, versicolor=>0.333, virginica=>0.333)
      + UnivariateFinite(setosa=>0.333, versicolor=>0.333, virginica=>0.333)
      diff --git a/v0.20.3/siteinfo.js b/v0.20.3/siteinfo.js new file mode 100644 index 000000000..5da3267ac --- /dev/null +++ b/v0.20.3/siteinfo.js @@ -0,0 +1 @@ +var DOCUMENTER_CURRENT_VERSION = "v0.20.3"; diff --git a/v0.20.3/target_transformations/index.html b/v0.20.3/target_transformations/index.html new file mode 100644 index 000000000..7430d204c --- /dev/null +++ b/v0.20.3/target_transformations/index.html @@ -0,0 +1,63 @@ + +Target Transformations · MLJ

      Target Transformations

      Some supervised models work best if the target variable has been standardized, i.e., rescaled to have zero mean and unit variance. Such a target transformation is learned from the values of the training target variable. In particular, one generally learns a different transformation when training on a proper subset of the training data. Good data hygiene prescribes that a new transformation should be computed each time the supervised model is trained on new data - for example in cross-validation.

      Additionally, one generally wants to inverse transform the predictions of the supervised model for the final target predictions to be on the original scale.

      All these concerns are addressed by wrapping the supervised model using TransformedTargetModel:

      Ridge = @load RidgeRegressor pkg=MLJLinearModels verbosity=0
      +ridge = Ridge(fit_intercept=false)
      +ridge2 = TransformedTargetModel(ridge, transformer=Standardizer())
      TransformedTargetModelDeterministic(
      +  model = RidgeRegressor(
      +        lambda = 1.0, 
      +        fit_intercept = false, 
      +        penalize_intercept = false, 
      +        scale_penalty_with_samples = true, 
      +        solver = nothing), 
      +  transformer = Standardizer(
      +        features = Symbol[], 
      +        ignore = false, 
      +        ordered_factor = false, 
      +        count = false), 
      +  inverse = nothing, 
      +  cache = true)

      Note that all the original hyperparameters, as well as those of the Standardizer, are accessible as nested hyper-parameters of the wrapped model, which can be trained or evaluated like any other:

      X, y = make_regression(rng=1234, intercept=false)
      +y = y*10^5
      +mach = machine(ridge2, X, y)
      +fit!(mach, rows=1:60, verbosity=0)
      +predict(mach, rows=61:62)
      2-element Vector{Float64}:
      +  -22108.94221844114
      + -158721.15783508556

      Training and predicting using ridge2 as above means:

      1. Standardizing the target y using the first 60 rows to get a new target z

      2. Training the original ridge model using the first 60 rows of X and z

      3. Calling predict on the machine trained in Step 2 on rows 61:62 of X

      4. Applying the inverse scaling learned in Step 1 to those predictions (to get the final output shown above)

      Since both ridge and ridge2 return predictions on the original scale, we can meaningfully compare the corresponding mean absolute errors, which are indeed different in this case.

      evaluate(ridge, X, y, measure=l1)
      PerformanceEvaluation object with these fields:
      +  model, measure, operation, measurement, per_fold,
      +  per_observation, fitted_params_per_fold,
      +  report_per_fold, train_test_rows, resampling, repeats
      +Extract:
      +┌──────────┬───────────┬─────────────┬─────────┬────────────────────────────────
      +│ measure  │ operation │ measurement │ 1.96*SE │ per_fold                      ⋯
      +├──────────┼───────────┼─────────────┼─────────┼────────────────────────────────
      +│ LPLoss(  │ predict   │ 81700.0     │ 20600.0 │ [67400.0, 74300.0, 112000.0,  ⋯
      +│   p = 1) │           │             │         │                               ⋯
      +└──────────┴───────────┴─────────────┴─────────┴────────────────────────────────
      +                                                                1 column omitted
      +
      evaluate(ridge2, X, y, measure=l1)
      PerformanceEvaluation object with these fields:
      +  model, measure, operation, measurement, per_fold,
      +  per_observation, fitted_params_per_fold,
      +  report_per_fold, train_test_rows, resampling, repeats
      +Extract:
      +┌──────────┬───────────┬─────────────┬─────────┬────────────────────────────────
      +│ measure  │ operation │ measurement │ 1.96*SE │ per_fold                      ⋯
      +├──────────┼───────────┼─────────────┼─────────┼────────────────────────────────
      +│ LPLoss(  │ predict   │ 83200.0     │ 19600.0 │ [81300.0, 74400.0, 112000.0,  ⋯
      +│   p = 1) │           │             │         │                               ⋯
      +└──────────┴───────────┴─────────────┴─────────┴────────────────────────────────
      +                                                                1 column omitted
      +

      Ordinary functions can also be used in target transformations but an inverse must be explicitly specified:

      ridge3 = TransformedTargetModel(ridge, transformer=y->log.(y), inverse=z->exp.(z))
      +X, y = @load_boston
      +evaluate(ridge3, X, y, measure=l1)
      PerformanceEvaluation object with these fields:
      +  model, measure, operation, measurement, per_fold,
      +  per_observation, fitted_params_per_fold,
      +  report_per_fold, train_test_rows, resampling, repeats
      +Extract:
      +┌──────────┬───────────┬─────────────┬─────────┬────────────────────────────────
      +│ measure  │ operation │ measurement │ 1.96*SE │ per_fold                      ⋯
      +├──────────┼───────────┼─────────────┼─────────┼────────────────────────────────
      +│ LPLoss(  │ predict   │ 6.33        │ 1.02    │ [5.33, 6.05, 7.38, 6.39, 7.93 ⋯
      +│   p = 1) │           │             │         │                               ⋯
      +└──────────┴───────────┴─────────────┴─────────┴────────────────────────────────
      +                                                                1 column omitted
      +

      Without the log transform (ie, using ridge) we get the poorer mean absolute error, l1, of 3.9.

      MLJBase.TransformedTargetModelFunction
      TransformedTargetModel(model; transformer=nothing, inverse=nothing, cache=true)

      Wrap the supervised or semi-supervised model in a transformation of the target variable.

      Here transformer one of the following:

      • The Unsupervised model that is to transform the training target. By default (inverse=nothing) the parameters learned by this transformer are also used to inverse-transform the predictions of model, which means transformer must implement the inverse_transform method. If this is not the case, specify inverse=identity to suppress inversion.

      • A callable object for transforming the target, such as y -> log.(y). In this case a callable inverse, such as z -> exp.(z), should be specified.

      Specify cache=false to prioritize memory over speed, or to guarantee data anonymity.

      Specify inverse=identity if model is a probabilistic predictor, as inverse-transforming sample spaces is not supported. Alternatively, replace model with a deterministic model, such as Pipeline(model, y -> mode.(y)).

      Examples

      A model that normalizes the target before applying ridge regression, with predictions returned on the original scale:

      @load RidgeRegressor pkg=MLJLinearModels
      +model = RidgeRegressor()
      +tmodel = TransformedTargetModel(model, transformer=Standardizer())

      A model that applies a static log transformation to the data, again returning predictions to the original scale:

      tmodel2 = TransformedTargetModel(model, transformer=y->log.(y), inverse=z->exp.(y))
      source
      diff --git a/v0.20.3/third_party_packages/index.html b/v0.20.3/third_party_packages/index.html new file mode 100644 index 000000000..8408843f8 --- /dev/null +++ b/v0.20.3/third_party_packages/index.html @@ -0,0 +1,2 @@ + +Third Party Packages · MLJ

      Third Party Packages

      A list of third-party packages with integration with MLJ.

      Last updated December 2020.

      Pull requests to update this list are very welcome. Otherwise, you may post an issue requesting this here.

      Packages providing models in the MLJ model registry

      See List of Supported Models

      Providing unregistered models:

      Packages providing other kinds of functionality:

      diff --git a/v0.20.3/transformers/index.html b/v0.20.3/transformers/index.html new file mode 100644 index 000000000..bf020d72d --- /dev/null +++ b/v0.20.3/transformers/index.html @@ -0,0 +1,426 @@ + +Transformers and Other Unsupervised models · MLJ

      Transformers and Other Unsupervised Models

      Several unsupervised models used to perform common transformations, such as one-hot encoding, are available in MLJ out-of-the-box. These are detailed in Built-in transformers below.

      A transformer is static if it has no learned parameters. While such a transformer is tantamount to an ordinary function, realizing it as an MLJ static transformer (a subtype of Static <: Unsupervised) can be useful, especially if the function depends on parameters the user would like to manipulate (which become hyper-parameters of the model). The necessary syntax for defining your own static transformers is described in Static transformers below.

      Some unsupervised models, such as clustering algorithms, have a predict method in addition to a transform method. We give an example of this in Transformers that also predict

      Finally, we note that models that fit a distribution, or more generally a sampler object, to some data, which are sometimes viewed as unsupervised, are treated in MLJ as supervised models. See Models that learn a probability distribution for an example.

      Built-in transformers

      MLJModels.StandardizerType
      Standardizer

      A model type for constructing a standardizer, based on MLJModels.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      Standardizer = @load Standardizer pkg=MLJModels

      Do model = Standardizer() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in Standardizer(features=...).

      Use this model to standardize (whiten) a Continuous vector, or relevant columns of a table. The rescalings applied by this transformer to new data are always those learned during the training phase, which are generally different from what would actually standardize the new data.

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, X)

      where

      • X: any Tables.jl compatible table or any abstract vector with Continuous element scitype (any abstract float vector). Only features in a table with Continuous scitype can be standardized; check column scitypes with schema(X).

      Train the machine using fit!(mach, rows=...).

      Hyper-parameters

      • features: one of the following, with the behavior indicated below:

        • [] (empty, the default): standardize all features (columns) having Continuous element scitype

        • non-empty vector of feature names (symbols): standardize only the Continuous features in the vector (if ignore=false) or Continuous features not named in the vector (ignore=true).

        • function or other callable: standardize a feature if the callable returns true on its name. For example, Standardizer(features = name -> name in [:x1, :x3], ignore = true, count=true) has the same effect as Standardizer(features = [:x1, :x3], ignore = true, count=true), namely to standardize all Continuous and Count features, with the exception of :x1 and :x3.

        Note this behavior is further modified if the ordered_factor or count flags are set to true; see below

      • ignore=false: whether to ignore or standardize specified features, as explained above

      • ordered_factor=false: if true, standardize any OrderedFactor feature wherever a Continuous feature would be standardized, as described above

      • count=false: if true, standardize any Count feature wherever a Continuous feature would be standardized, as described above

      Operations

      • transform(mach, Xnew): return Xnew with relevant features standardized according to the rescalings learned during fitting of mach.

      • inverse_transform(mach, Z): apply the inverse transformation to Z, so that inverse_transform(mach, transform(mach, Xnew)) is approximately the same as Xnew; unavailable if ordered_factor or count flags were set to true.

      Fitted parameters

      The fields of fitted_params(mach) are:

      • features_fit - the names of features that will be standardized

      • means - the corresponding untransformed mean values

      • stds - the corresponding untransformed standard deviations

      Report

      The fields of report(mach) are:

      • features_fit: the names of features that will be standardized

      Examples

      using MLJ
      +
      +X = (ordinal1 = [1, 2, 3],
      +     ordinal2 = coerce([:x, :y, :x], OrderedFactor),
      +     ordinal3 = [10.0, 20.0, 30.0],
      +     ordinal4 = [-20.0, -30.0, -40.0],
      +     nominal = coerce(["Your father", "he", "is"], Multiclass));
      +
      +julia> schema(X)
      +┌──────────┬──────────────────┐
      +│ names    │ scitypes         │
      +├──────────┼──────────────────┤
      +│ ordinal1 │ Count            │
      +│ ordinal2 │ OrderedFactor{2} │
      +│ ordinal3 │ Continuous       │
      +│ ordinal4 │ Continuous       │
      +│ nominal  │ Multiclass{3}    │
      +└──────────┴──────────────────┘
      +
      +stand1 = Standardizer();
      +
      +julia> transform(fit!(machine(stand1, X)), X)
      +(ordinal1 = [1, 2, 3],
      + ordinal2 = CategoricalValue{Symbol,UInt32}[:x, :y, :x],
      + ordinal3 = [-1.0, 0.0, 1.0],
      + ordinal4 = [1.0, 0.0, -1.0],
      + nominal = CategoricalValue{String,UInt32}["Your father", "he", "is"],)
      +
      +stand2 = Standardizer(features=[:ordinal3, ], ignore=true, count=true);
      +
      +julia> transform(fit!(machine(stand2, X)), X)
      +(ordinal1 = [-1.0, 0.0, 1.0],
      + ordinal2 = CategoricalValue{Symbol,UInt32}[:x, :y, :x],
      + ordinal3 = [10.0, 20.0, 30.0],
      + ordinal4 = [1.0, 0.0, -1.0],
      + nominal = CategoricalValue{String,UInt32}["Your father", "he", "is"],)

      See also OneHotEncoder, ContinuousEncoder.

      source
      MLJModels.OneHotEncoderType
      OneHotEncoder

      A model type for constructing a one-hot encoder, based on MLJModels.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      OneHotEncoder = @load OneHotEncoder pkg=MLJModels

      Do model = OneHotEncoder() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in OneHotEncoder(features=...).

      Use this model to one-hot encode the Multiclass and OrderedFactor features (columns) of some table, leaving other columns unchanged.

      New data to be transformed may lack features present in the fit data, but no new features can be present.

      Warning: This transformer assumes that levels(col) for any Multiclass or OrderedFactor column, col, is the same for training data and new data to be transformed.

      To ensure all features are transformed into Continuous features, or dropped, use ContinuousEncoder instead.

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, X)

      where

      • X: any Tables.jl compatible table. Columns can be of mixed type but only those with element scitype Multiclass or OrderedFactor can be encoded. Check column scitypes with schema(X).

      Train the machine using fit!(mach, rows=...).

      Hyper-parameters

      • features: a vector of symbols (column names). If empty (default) then all Multiclass and OrderedFactor features are encoded. Otherwise, encoding is further restricted to the specified features (ignore=false) or the unspecified features (ignore=true). This default behavior can be modified by the ordered_factor flag.

      • ordered_factor=false: when true, OrderedFactor features are universally excluded

      • drop_last=true: whether to drop the column corresponding to the final class of encoded features. For example, a three-class feature is spawned into three new features if drop_last=false, but just two features otherwise.

      Fitted parameters

      The fields of fitted_params(mach) are:

      • all_features: names of all features encountered in training

      • fitted_levels_given_feature: dictionary of the levels associated with each feature encoded, keyed on the feature name

      • ref_name_pairs_given_feature: dictionary of pairs r => ftr (such as 0x00000001 => :grad__A) where r is a CategoricalArrays.jl reference integer representing a level, and ftr the corresponding new feature name; the dictionary is keyed on the names of features that are encoded

      Report

      The fields of report(mach) are:

      • features_to_be_encoded: names of input features to be encoded

      • new_features: names of all output features

      Example

      using MLJ
      +
      +X = (name=categorical(["Danesh", "Lee", "Mary", "John"]),
      +     grade=categorical(["A", "B", "A", "C"], ordered=true),
      +     height=[1.85, 1.67, 1.5, 1.67],
      +     n_devices=[3, 2, 4, 3])
      +
      +julia> schema(X)
      +┌───────────┬──────────────────┐
      +│ names     │ scitypes         │
      +├───────────┼──────────────────┤
      +│ name      │ Multiclass{4}    │
      +│ grade     │ OrderedFactor{3} │
      +│ height    │ Continuous       │
      +│ n_devices │ Count            │
      +└───────────┴──────────────────┘
      +
      +hot = OneHotEncoder(drop_last=true)
      +mach = fit!(machine(hot, X))
      +W = transform(mach, X)
      +
      +julia> schema(W)
      +┌──────────────┬────────────┐
      +│ names        │ scitypes   │
      +├──────────────┼────────────┤
      +│ name__Danesh │ Continuous │
      +│ name__John   │ Continuous │
      +│ name__Lee    │ Continuous │
      +│ grade__A     │ Continuous │
      +│ grade__B     │ Continuous │
      +│ height       │ Continuous │
      +│ n_devices    │ Count      │
      +└──────────────┴────────────┘

      See also ContinuousEncoder.

      source
      MLJModels.ContinuousEncoderType
      ContinuousEncoder

      A model type for constructing a continuous encoder, based on MLJModels.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      ContinuousEncoder = @load ContinuousEncoder pkg=MLJModels

      Do model = ContinuousEncoder() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in ContinuousEncoder(drop_last=...).

      Use this model to arrange all features (columns) of a table to have Continuous element scitype, by applying the following protocol to each feature ftr:

      • If ftr is already Continuous retain it.

      • If ftr is Multiclass, one-hot encode it.

      • If ftr is OrderedFactor, replace it with coerce(ftr, Continuous) (vector of floating point integers), unless ordered_factors=false is specified, in which case one-hot encode it.

      • If ftr is Count, replace it with coerce(ftr, Continuous).

      • If ftr has some other element scitype, or was not observed in fitting the encoder, drop it from the table.

      Warning: This transformer assumes that levels(col) for any Multiclass or OrderedFactor column, col, is the same for training data and new data to be transformed.

      To selectively one-hot-encode categorical features (without dropping columns) use OneHotEncoder instead.

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, X)

      where

      • X: any Tables.jl compatible table. Columns can be of mixed type but only those with element scitype Multiclass or OrderedFactor can be encoded. Check column scitypes with schema(X).

      Train the machine using fit!(mach, rows=...).

      Hyper-parameters

      • drop_last=true: whether to drop the column corresponding to the final class of one-hot encoded features. For example, a three-class feature is spawned into three new features if drop_last=false, but two just features otherwise.

      • one_hot_ordered_factors=false: whether to one-hot any feature with OrderedFactor element scitype, or to instead coerce it directly to a (single) Continuous feature using the order

      Fitted parameters

      The fields of fitted_params(mach) are:

      • features_to_keep: names of features that will not be dropped from the table

      • one_hot_encoder: the OneHotEncoder model instance for handling the one-hot encoding

      • one_hot_encoder_fitresult: the fitted parameters of the OneHotEncoder model

      Report

      • features_to_keep: names of input features that will not be dropped from the table

      • new_features: names of all output features

      Example

      X = (name=categorical(["Danesh", "Lee", "Mary", "John"]),
      +     grade=categorical(["A", "B", "A", "C"], ordered=true),
      +     height=[1.85, 1.67, 1.5, 1.67],
      +     n_devices=[3, 2, 4, 3],
      +     comments=["the force", "be", "with you", "too"])
      +
      +julia> schema(X)
      +┌───────────┬──────────────────┐
      +│ names     │ scitypes         │
      +├───────────┼──────────────────┤
      +│ name      │ Multiclass{4}    │
      +│ grade     │ OrderedFactor{3} │
      +│ height    │ Continuous       │
      +│ n_devices │ Count            │
      +│ comments  │ Textual          │
      +└───────────┴──────────────────┘
      +
      +encoder = ContinuousEncoder(drop_last=true)
      +mach = fit!(machine(encoder, X))
      +W = transform(mach, X)
      +
      +julia> schema(W)
      +┌──────────────┬────────────┐
      +│ names        │ scitypes   │
      +├──────────────┼────────────┤
      +│ name__Danesh │ Continuous │
      +│ name__John   │ Continuous │
      +│ name__Lee    │ Continuous │
      +│ grade        │ Continuous │
      +│ height       │ Continuous │
      +│ n_devices    │ Continuous │
      +└──────────────┴────────────┘
      +
      +julia> setdiff(schema(X).names, report(mach).features_to_keep) # dropped features
      +1-element Vector{Symbol}:
      + :comments
      +

      See also OneHotEncoder

      source
      MLJModels.FillImputerType
      FillImputer

      A model type for constructing a fill imputer, based on MLJModels.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      FillImputer = @load FillImputer pkg=MLJModels

      Do model = FillImputer() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in FillImputer(features=...).

      Use this model to impute missing values in tabular data. A fixed "filler" value is learned from the training data, one for each column of the table.

      For imputing missing values in a vector, use UnivariateFillImputer instead.

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, X)

      where

      • X: any table of input features (eg, a DataFrame) whose columns each have element scitypes Union{Missing, T}, where T is a subtype of Continuous, Multiclass, OrderedFactor or Count. Check scitypes with schema(X).

      Train the machine using fit!(mach, rows=...).

      Hyper-parameters

      • features: a vector of names of features (symbols) for which imputation is to be attempted; default is empty, which is interpreted as "impute all".

      • continuous_fill: function or other callable to determine value to be imputed in the case of Continuous (abstract float) data; default is to apply median after skipping missing values

      • count_fill: function or other callable to determine value to be imputed in the case of Count (integer) data; default is to apply rounded median after skipping missing values

      • finite_fill: function or other callable to determine value to be imputed in the case of Multiclass or OrderedFactor data (categorical vectors); default is to apply mode after skipping missing values

      Operations

      • transform(mach, Xnew): return Xnew with missing values imputed with the fill values learned when fitting mach

      Fitted parameters

      The fields of fitted_params(mach) are:

      • features_seen_in_fit: the names of features (columns) encountered during training

      • univariate_transformer: the univariate model applied to determine the fillers (it's fields contain the functions defining the filler computations)

      • filler_given_feature: dictionary of filler values, keyed on feature (column) names

      Examples

      using MLJ
      +imputer = FillImputer()
      +
      +X = (a = [1.0, 2.0, missing, 3.0, missing],
      +     b = coerce(["y", "n", "y", missing, "y"], Multiclass),
      +     c = [1, 1, 2, missing, 3])
      +
      +schema(X)
      +julia> schema(X)
      +┌───────┬───────────────────────────────┐
      +│ names │ scitypes                      │
      +├───────┼───────────────────────────────┤
      +│ a     │ Union{Missing, Continuous}    │
      +│ b     │ Union{Missing, Multiclass{2}} │
      +│ c     │ Union{Missing, Count}         │
      +└───────┴───────────────────────────────┘
      +
      +mach = machine(imputer, X)
      +fit!(mach)
      +
      +julia> fitted_params(mach).filler_given_feature
      +(filler = 2.0,)
      +
      +julia> fitted_params(mach).filler_given_feature
      +Dict{Symbol, Any} with 3 entries:
      +  :a => 2.0
      +  :b => "y"
      +  :c => 2
      +
      +julia> transform(mach, X)
      +(a = [1.0, 2.0, 2.0, 3.0, 2.0],
      + b = CategoricalValue{String, UInt32}["y", "n", "y", "y", "y"],
      + c = [1, 1, 2, 2, 3],)

      See also UnivariateFillImputer.

      source
      MLJModels.UnivariateFillImputerType
      UnivariateFillImputer

      A model type for constructing a single variable fill imputer, based on MLJModels.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      UnivariateFillImputer = @load UnivariateFillImputer pkg=MLJModels

      Do model = UnivariateFillImputer() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in UnivariateFillImputer(continuous_fill=...).

      Use this model to imputing missing values in a vector with a fixed value learned from the non-missing values of training vector.

      For imputing missing values in tabular data, use FillImputer instead.

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, x)

      where

      • x: any abstract vector with element scitype Union{Missing, T} where T is a subtype of Continuous, Multiclass, OrderedFactor or Count; check scitype using scitype(x)

      Train the machine using fit!(mach, rows=...).

      Hyper-parameters

      • continuous_fill: function or other callable to determine value to be imputed in the case of Continuous (abstract float) data; default is to apply median after skipping missing values

      • count_fill: function or other callable to determine value to be imputed in the case of Count (integer) data; default is to apply rounded median after skipping missing values

      • finite_fill: function or other callable to determine value to be imputed in the case of Multiclass or OrderedFactor data (categorical vectors); default is to apply mode after skipping missing values

      Operations

      • transform(mach, xnew): return xnew with missing values imputed with the fill values learned when fitting mach

      Fitted parameters

      The fields of fitted_params(mach) are:

      • filler: the fill value to be imputed in all new data

      Examples

      using MLJ
      +imputer = UnivariateFillImputer()
      +
      +x_continuous = [1.0, 2.0, missing, 3.0]
      +x_multiclass = coerce(["y", "n", "y", missing, "y"], Multiclass)
      +x_count = [1, 1, 1, 2, missing, 3, 3]
      +
      +mach = machine(imputer, x_continuous)
      +fit!(mach)
      +
      +julia> fitted_params(mach)
      +(filler = 2.0,)
      +
      +julia> transform(mach, [missing, missing, 101.0])
      +3-element Vector{Float64}:
      + 2.0
      + 2.0
      + 101.0
      +
      +mach2 = machine(imputer, x_multiclass) |> fit!
      +
      +julia> transform(mach2, x_multiclass)
      +5-element CategoricalArray{String,1,UInt32}:
      + "y"
      + "n"
      + "y"
      + "y"
      + "y"
      +
      +mach3 = machine(imputer, x_count) |> fit!
      +
      +julia> transform(mach3, [missing, missing, 5])
      +3-element Vector{Int64}:
      + 2
      + 2
      + 5

      For imputing tabular data, use FillImputer.

      source
      MLJModels.FeatureSelectorType
      FeatureSelector

      A model type for constructing a feature selector, based on MLJModels.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      FeatureSelector = @load FeatureSelector pkg=MLJModels

      Do model = FeatureSelector() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in FeatureSelector(features=...).

      Use this model to select features (columns) of a table, usually as part of a model Pipeline.

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, X)

      where

      • X: any table of input features, where "table" is in the sense of Tables.jl

      Train the machine using fit!(mach, rows=...).

      Hyper-parameters

      • features: one of the following, with the behavior indicated:

        • [] (empty, the default): filter out all features (columns) which were not encountered in training

        • non-empty vector of feature names (symbols): keep only the specified features (ignore=false) or keep only unspecified features (ignore=true)

        • function or other callable: keep a feature if the callable returns true on its name. For example, specifying FeatureSelector(features = name -> name in [:x1, :x3], ignore = true) has the same effect as FeatureSelector(features = [:x1, :x3], ignore = true), namely to select all features, with the exception of :x1 and :x3.

      • ignore: whether to ignore or keep specified features, as explained above

      Operations

      • transform(mach, Xnew): select features from the table Xnew as specified by the model, taking features seen during training into account, if relevant

      Fitted parameters

      The fields of fitted_params(mach) are:

      • features_to_keep: the features that will be selected

      Example

      using MLJ
      +
      +X = (ordinal1 = [1, 2, 3],
      +     ordinal2 = coerce(["x", "y", "x"], OrderedFactor),
      +     ordinal3 = [10.0, 20.0, 30.0],
      +     ordinal4 = [-20.0, -30.0, -40.0],
      +     nominal = coerce(["Your father", "he", "is"], Multiclass));
      +
      +selector = FeatureSelector(features=[:ordinal3, ], ignore=true);
      +
      +julia> transform(fit!(machine(selector, X)), X)
      +(ordinal1 = [1, 2, 3],
      + ordinal2 = CategoricalValue{Symbol,UInt32}["x", "y", "x"],
      + ordinal4 = [-20.0, -30.0, -40.0],
      + nominal = CategoricalValue{String,UInt32}["Your father", "he", "is"],)
      +
      source
      MLJModels.UnivariateBoxCoxTransformerType
      UnivariateBoxCoxTransformer

      A model type for constructing a single variable Box-Cox transformer, based on MLJModels.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      UnivariateBoxCoxTransformer = @load UnivariateBoxCoxTransformer pkg=MLJModels

      Do model = UnivariateBoxCoxTransformer() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in UnivariateBoxCoxTransformer(n=...).

      Box-Cox transformations attempt to make data look more normally distributed. This can improve performance and assist in the interpretation of models which suppose that data is generated by a normal distribution.

      A Box-Cox transformation (with shift) is of the form

      x -> ((x + c)^λ - 1)/λ

      for some constant c and real λ, unless λ = 0, in which case the above is replaced with

      x -> log(x + c)

      Given user-specified hyper-parameters n::Integer and shift::Bool, the present implementation learns the parameters c and λ from the training data as follows: If shift=true and zeros are encountered in the data, then c is set to 0.2 times the data mean. If there are no zeros, then no shift is applied. Finally, n different values of λ between -0.4 and 3 are considered, with λ fixed to the value maximizing normality of the transformed data.

      Reference: Wikipedia entry for power transform.

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, x)

      where

      • x: any abstract vector with element scitype Continuous; check the scitype with scitype(x)

      Train the machine using fit!(mach, rows=...).

      Hyper-parameters

      • n=171: number of values of the exponent λ to try

      • shift=false: whether to include a preliminary constant translation in transformations, in the presence of zeros

      Operations

      • transform(mach, xnew): apply the Box-Cox transformation learned when fitting mach

      • inverse_transform(mach, z): reconstruct the vector z whose transformation learned by mach is z

      Fitted parameters

      The fields of fitted_params(mach) are:

      • λ: the learned Box-Cox exponent

      • c: the learned shift

      Examples

      using MLJ
      +using UnicodePlots
      +using Random
      +Random.seed!(123)
      +
      +transf = UnivariateBoxCoxTransformer()
      +
      +x = randn(1000).^2
      +
      +mach = machine(transf, x)
      +fit!(mach)
      +
      +z = transform(mach, x)
      +
      +julia> histogram(x)
      +                ┌                                        ┐
      +   [ 0.0,  2.0) ┤███████████████████████████████████  848
      +   [ 2.0,  4.0) ┤████▌ 109
      +   [ 4.0,  6.0) ┤█▍ 33
      +   [ 6.0,  8.0) ┤▍ 7
      +   [ 8.0, 10.0) ┤▏ 2
      +   [10.0, 12.0) ┤  0
      +   [12.0, 14.0) ┤▏ 1
      +                └                                        ┘
      +                                 Frequency
      +
      +julia> histogram(z)
      +                ┌                                        ┐
      +   [-5.0, -4.0) ┤█▎ 8
      +   [-4.0, -3.0) ┤████████▊ 64
      +   [-3.0, -2.0) ┤█████████████████████▊ 159
      +   [-2.0, -1.0) ┤█████████████████████████████▊ 216
      +   [-1.0,  0.0) ┤███████████████████████████████████  254
      +   [ 0.0,  1.0) ┤█████████████████████████▊ 188
      +   [ 1.0,  2.0) ┤████████████▍ 90
      +   [ 2.0,  3.0) ┤██▊ 20
      +   [ 3.0,  4.0) ┤▎ 1
      +                └                                        ┘
      +                                 Frequency
      +
      source
      MLJModels.UnivariateDiscretizerType
      UnivariateDiscretizer

      A model type for constructing a single variable discretizer, based on MLJModels.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      UnivariateDiscretizer = @load UnivariateDiscretizer pkg=MLJModels

      Do model = UnivariateDiscretizer() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in UnivariateDiscretizer(n_classes=...).

      Discretization converts a Continuous vector into an OrderedFactor vector. In particular, the output is a CategoricalVector (whose reference type is optimized).

      The transformation is chosen so that the vector on which the transformer is fit has, in transformed form, an approximately uniform distribution of values. Specifically, if n_classes is the level of discretization, then 2*n_classes - 1 ordered quantiles are computed, the odd quantiles being used for transforming (discretization) and the even quantiles for inverse transforming.

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, x)

      where

      • x: any abstract vector with Continuous element scitype; check scitype with scitype(x).

      Train the machine using fit!(mach, rows=...).

      Hyper-parameters

      • n_classes: number of discrete classes in the output

      Operations

      • transform(mach, xnew): discretize xnew according to the discretization learned when fitting mach

      • inverse_transform(mach, z): attempt to reconstruct from z a vector that transforms to give z

      Fitted parameters

      The fields of fitted_params(mach).fitesult include:

      • odd_quantiles: quantiles used for transforming (length is n_classes - 1)

      • even_quantiles: quantiles used for inverse transforming (length is n_classes)

      Example

      using MLJ
      +using Random
      +Random.seed!(123)
      +
      +discretizer = UnivariateDiscretizer(n_classes=100)
      +mach = machine(discretizer, randn(1000))
      +fit!(mach)
      +
      +julia> x = rand(5)
      +5-element Vector{Float64}:
      + 0.8585244609846809
      + 0.37541692370451396
      + 0.6767070590395461
      + 0.9208844241267105
      + 0.7064611415680901
      +
      +julia> z = transform(mach, x)
      +5-element CategoricalArrays.CategoricalArray{UInt8,1,UInt8}:
      + 0x52
      + 0x42
      + 0x4d
      + 0x54
      + 0x4e
      +
      +x_approx = inverse_transform(mach, z)
      +julia> x - x_approx
      +5-element Vector{Float64}:
      + 0.008224506144777322
      + 0.012731354778359405
      + 0.0056265330571125816
      + 0.005738175684445124
      + 0.006835652575801987
      source
      MLJModels.UnivariateTimeTypeToContinuousType
      UnivariateTimeTypeToContinuous

      A model type for constructing a single variable transformer that creates continuous representations of temporally typed data, based on MLJModels.jl, and implementing the MLJ model interface.

      From MLJ, the type can be imported using

      UnivariateTimeTypeToContinuous = @load UnivariateTimeTypeToContinuous pkg=MLJModels

      Do model = UnivariateTimeTypeToContinuous() to construct an instance with default hyper-parameters. Provide keyword arguments to override hyper-parameter defaults, as in UnivariateTimeTypeToContinuous(zero_time=...).

      Use this model to convert vectors with a TimeType element type to vectors of Float64 type (Continuous element scitype).

      Training data

      In MLJ or MLJBase, bind an instance model to data with

      mach = machine(model, x)

      where

      • x: any abstract vector whose element type is a subtype of Dates.TimeType

      Train the machine using fit!(mach, rows=...).

      Hyper-parameters

      • zero_time: the time that is to correspond to 0.0 under transformations, with the type coinciding with the training data element type. If unspecified, the earliest time encountered in training is used.

      • step::Period=Hour(24): time interval to correspond to one unit under transformation

      Operations

      • transform(mach, xnew): apply the encoding inferred when mach was fit

      Fitted parameters

      fitted_params(mach).fitresult is the tuple (zero_time, step) actually used in transformations, which may differ from the user-specified hyper-parameters.

      Example

      using MLJ
      +using Dates
      +
      +x = [Date(2001, 1, 1) + Day(i) for i in 0:4]
      +
      +encoder = UnivariateTimeTypeToContinuous(zero_time=Date(2000, 1, 1),
      +                                         step=Week(1))
      +
      +mach = machine(encoder, x)
      +fit!(mach)
      +julia> transform(mach, x)
      +5-element Vector{Float64}:
      + 52.285714285714285
      + 52.42857142857143
      + 52.57142857142857
      + 52.714285714285715
      + 52.857142
      source

      Static transformers

      A static transformer is a model for transforming data that does not generalize to new data (does not "learn") but which nevertheless has hyperparameters. For example, the DBSAN clustering model from Clustering.jl can assign labels to some collection of observations, cannot directly assign a label to some new observation.

      The general user may define their own static models. The main use-case is insertion into a Linear Pipelines some parameter-dependent transformation. (If a static transformer has no hyper-parameters, it is tantamount to an ordinary function. An ordinary function can be inserted directly into a pipeline; the situation for learning networks is only slightly more complicated.

      The following example defines a new model type Averager to perform the weighted average of two vectors (target predictions, for example). We suppose the weighting is normalized, and therefore controlled by a single hyper-parameter, mix.

      mutable struct Averager <: Static
      +    mix::Float64
      +end
      +
      +MLJ.transform(a::Averager, _, y1, y2) = (1 - a.mix)*y1 + a.mix*y2

      Important. Note the sub-typing <: Static.

      Such static transformers with (unlearned) parameters can have arbitrarily many inputs, but only one output. In the single input case, an inverse_transform can also be defined. Since they have no real learned parameters, you bind a static transformer to a machine without specifying training arguments; there is no need to fit! the machine:

      mach = machine(Averager(0.5))
      +transform(mach, [1, 2, 3], [3, 2, 1])
      3-element Vector{Float64}:
      + 2.0
      + 2.0
      + 2.0

      Let's see how we can include our Averager in a learning network to mix the predictions of two regressors, with one-hot encoding of the inputs. Here's two regressors for mixing, and some dummy data for testing our learning network:

      ridge = (@load RidgeRegressor pkg=MultivariateStats)()
      +knn = (@load KNNRegressor)()
      +
      +import Random.seed!
      +seed!(112)
      +X = (
      +    x1=coerce(rand("ab", 100), Multiclass),
      +    x2=rand(100),
      +)
      +y = X.x2 + 0.05*rand(100)
      +schema(X)
      ┌───────┬───────────────┬────────────────────────────────┐
      +│ names │ scitypes      │ types                          │
      +├───────┼───────────────┼────────────────────────────────┤
      +│ x1    │ Multiclass{2} │ CategoricalValue{Char, UInt32} │
      +│ x2    │ Continuous    │ Float64                        │
      +└───────┴───────────────┴────────────────────────────────┘
      +

      And the learning network:

      Xs = source(X)
      +ys = source(y)
      +
      +averager = Averager(0.5)
      +
      +mach0 = machine(OneHotEncoder(), Xs)
      +W = transform(mach0, Xs) # one-hot encode the input
      +
      +mach1 = machine(ridge, W, ys)
      +y1 = predict(mach1, W)
      +
      +mach2 = machine(knn, W, ys)
      +y2 = predict(mach2, W)
      +
      +mach4= machine(averager)
      +yhat = transform(mach4, y1, y2)
      +
      +# test:
      +fit!(yhat)
      +Xnew = selectrows(X, 1:3)
      +yhat(Xnew)
      3-element Vector{Float64}:
      + 0.6403223210037916
      + 0.9607694439597683
      + 0.8159225346205365

      We next "export" the learning network as a standalone composite model type. First we need a struct for the composite model. Since we are restricting to Deterministic component regressors, the composite will also make deterministic predictions, and so gets the supertype DeterministicNetworkComposite:

      mutable struct DoubleRegressor <: DeterministicNetworkComposite
      +    regressor1
      +    regressor2
      +    averager
      +end

      As described in Learning Networks, we next paste the learning network into a prefit declaration, replace the component models with symbolic placeholders, and add a learning network "interface":

      import MLJBase
      +function MLJBase.prefit(composite::DoubleRegressor, verbosity, X, y)
      +    Xs = source(X)
      +    ys = source(y)
      +
      +    mach0 = machine(OneHotEncoder(), Xs)
      +    W = transform(mach0, Xs) # one-hot encode the input
      +
      +    mach1 = machine(:regressor1, W, ys)
      +    y1 = predict(mach1, W)
      +
      +    mach2 = machine(:regressor2, W, ys)
      +    y2 = predict(mach2, W)
      +
      +    mach4= machine(:averager)
      +    yhat = transform(mach4, y1, y2)
      +
      +    # learning network interface:
      +    (; predict=yhat)
      +end

      The new model type can be evaluated like any other supervised model:

      X, y = @load_reduced_ames;
      +composite = DoubleRegressor(ridge, knn, Averager(0.5))
      DoubleRegressor(
      +  regressor1 = RidgeRegressor(
      +        lambda = 1.0, 
      +        bias = true), 
      +  regressor2 = KNNRegressor(
      +        K = 5, 
      +        algorithm = :kdtree, 
      +        metric = Distances.Euclidean(0.0), 
      +        leafsize = 10, 
      +        reorder = true, 
      +        weights = NearestNeighborModels.Uniform()), 
      +  averager = Averager(
      +        mix = 0.5))
      composite.averager.mix = 0.25 # adjust mix from default of 0.5
      +evaluate(composite, X, y, measure=l1)
      PerformanceEvaluation object with these fields:
      +  model, measure, operation, measurement, per_fold,
      +  per_observation, fitted_params_per_fold,
      +  report_per_fold, train_test_rows, resampling, repeats
      +Extract:
      +┌──────────┬───────────┬─────────────┬─────────┬────────────────────────────────
      +│ measure  │ operation │ measurement │ 1.96*SE │ per_fold                      ⋯
      +├──────────┼───────────┼─────────────┼─────────┼────────────────────────────────
      +│ LPLoss(  │ predict   │ 17200.0     │ 1350.0  │ [15200.0, 15800.0, 18500.0, 1 ⋯
      +│   p = 1) │           │             │         │                               ⋯
      +└──────────┴───────────┴─────────────┴─────────┴────────────────────────────────
      +                                                                1 column omitted
      +

      A static transformer can also expose byproducts of the transform computation in the report of any associated machine. See Static models (models that do not generalize) for details.

      Transformers that also predict

      Some clustering algorithms learn to label data by identifying a collection of "centroids" in the training data. Any new input observation is labeled with the cluster to which it is closest (this is the output of predict) while the vector of all distances from the centroids defines a lower-dimensional representation of the observation (the output of transform). In the following example a K-means clustering algorithm assigns one of three labels 1, 2, 3 to the input features of the iris data set and compares them with the actual species recorded in the target (not seen by the algorithm).

      import Random.seed!
      +seed!(123)
      +
      +X, y = @load_iris;
      +KMeans = @load KMeans pkg=ParallelKMeans
      +kmeans = KMeans()
      +mach = machine(kmeans, X) |> fit!
      +
      +# transforming:
      +Xsmall = transform(mach);
      +selectrows(Xsmall, 1:4) |> pretty
      +julia> selectrows(Xsmall, 1:4) |> pretty
      +┌─────────────────────┬────────────────────┬────────────────────┐
      +│ x1                  │ x2                 │ x3                 │
      +│ Float64             │ Float64            │ Float64            │
      +│ Continuous          │ Continuous         │ Continuous         │
      +├─────────────────────┼────────────────────┼────────────────────┤
      +│ 0.0215920000000267  │ 25.314260355029603 │ 11.645232464391299 │
      +│ 0.19199200000001326 │ 25.882721893491123 │ 11.489658693899486 │
      +│ 0.1699920000000077  │ 27.58656804733728  │ 12.674412792260142 │
      +│ 0.26919199999998966 │ 26.28656804733727  │ 11.64392098898145  │
      +└─────────────────────┴────────────────────┴────────────────────┘
      +
      +# predicting:
      +yhat = predict(mach);
      +compare = zip(yhat, y) |> collect;
      +compare[1:8]
      +8-element Array{Tuple{CategoricalValue{Int64,UInt32},CategoricalString{UInt32}},1}:
      + (1, "setosa")
      + (1, "setosa")
      + (1, "setosa")
      + (1, "setosa")
      + (1, "setosa")
      + (1, "setosa")
      + (1, "setosa")
      + (1, "setosa")
      +
      +compare[51:58]
      +8-element Array{Tuple{CategoricalValue{Int64,UInt32},CategoricalString{UInt32}},1}:
      + (2, "versicolor")
      + (3, "versicolor")
      + (2, "versicolor")
      + (3, "versicolor")
      + (3, "versicolor")
      + (3, "versicolor")
      + (3, "versicolor")
      + (3, "versicolor")
      +
      +compare[101:108]
      +8-element Array{Tuple{CategoricalValue{Int64,UInt32},CategoricalString{UInt32}},1}:
      + (2, "virginica")
      + (3, "virginica")
      + (2, "virginica")
      + (2, "virginica")
      + (2, "virginica")
      + (2, "virginica")
      + (3, "virginica")
      + (2, "virginica")
      diff --git a/v0.20.3/tuning_models/index.html b/v0.20.3/tuning_models/index.html new file mode 100644 index 000000000..98cd44d6b --- /dev/null +++ b/v0.20.3/tuning_models/index.html @@ -0,0 +1,338 @@ + +Tuning Models · MLJ

      Tuning Models

      MLJ provides several built-in and third-party options for optimizing a model's hyper-parameters. The quick-reference table below omits some advanced keyword options.

      tuning strategynotespackage to importpackage providing the core algorithm
      Grid(goal=nothing, resolution=10)shuffled by default; goal is upper bound for number of grid pointsMLJ.jl or MLJTuning.jlMLJTuning.jl
      RandomSearch(rng=GLOBAL_RNG)with customizable priorsMLJ.jl or MLJTuning.jlMLJTuning.jl
      LatinHypercube(rng=GLOBAL_RNG)with discrete parameter supportMLJ.jl or MLJTuning.jlLatinHypercubeSampling
      MLJTreeParzenTuning()See this example for usageTreeParzen.jlTreeParzen.jl (port to Julia of hyperopt)
      ParticleSwarm(n_particles=3, rng=GLOBAL_RNG)Standard Kennedy-Eberhart algorithm, plus discrete parameter supportMLJParticleSwarmOptimization.jlMLJParticleSwarmOptimization.jl
      AdaptiveParticleSwarm(n_particles=3, rng=GLOBAL_RNG)Zhan et al. variant with automated swarm coefficient updates, plus discrete parameter supportMLJParticleSwarmOptimization.jlMLJParticleSwarmOptimization.jl
      Explicit()For an explicit list of models of varying typeMLJ.jl or MLJTuning.jlMLJTuning.jl

      Below we illustrate hyperparameter optimization using the Grid, RandomSearch, LatinHypercube and Explicit tuning strategies.

      Overview

      In MLJ model tuning is implemented as a model wrapper. After wrapping a model in a tuning strategy and binding the wrapped model to data in a machine called mach, calling fit!(mach) instigates a search for optimal model hyperparameters, within a specified range, and then uses all supplied data to train the best model. To predict using that model, one then calls predict(mach, Xnew). In this way, the wrapped model may be viewed as a "self-tuning" version of the unwrapped model. That is, wrapping the model simply transforms certain hyper-parameters into learned parameters.

      A corollary of the tuning-as-wrapper approach is that the evaluation of the performance of a TunedModel instance using evaluate! implies nested resampling. This approach is inspired by MLR. See also below.

      In MLJ, tuning is an iterative procedure, with an iteration parameter n, the total number of model instances to be evaluated. Accordingly, tuning can be controlled using MLJ's IteratedModel wrapper. After familiarizing oneself with the TunedModel wrapper described below, see Controlling model tuning for more on this advanced feature.

      For a more in-depth overview of tuning in MLJ, or for implementation details, see the MLJTuning documentation. For a complete list of options see the TunedModel doc-string below.

      Tuning a single hyperparameter using a grid search (regression example)

      using MLJ
      +X = MLJ.table(rand(100, 10));
      +y = 2X.x1 - X.x2 + 0.05*rand(100);
      +Tree = @load DecisionTreeRegressor pkg=DecisionTree verbosity=0;
      +tree = Tree()
      DecisionTreeRegressor(
      +  max_depth = -1, 
      +  min_samples_leaf = 5, 
      +  min_samples_split = 2, 
      +  min_purity_increase = 0.0, 
      +  n_subfeatures = 0, 
      +  post_prune = false, 
      +  merge_purity_threshold = 1.0, 
      +  feature_importance = :impurity, 
      +  rng = Random._GLOBAL_RNG())

      Let's tune min_purity_increase in the model above, using a grid-search. To do so we will use the simplest range object, a one-dimensional range object constructed using the range method:

      r = range(tree, :min_purity_increase, lower=0.001, upper=1.0, scale=:log);
      +self_tuning_tree = TunedModel(
      +    model=tree,
      +    resampling=CV(nfolds=3),
      +    tuning=Grid(resolution=10),
      +    range=r,
      +    measure=rms
      +);
      DeterministicTunedModel(
      +  model = DecisionTreeRegressor(
      +        max_depth = -1, 
      +        min_samples_leaf = 5, 
      +        min_samples_split = 2, 
      +        min_purity_increase = 0.0, 
      +        n_subfeatures = 0, 
      +        post_prune = false, 
      +        merge_purity_threshold = 1.0, 
      +        feature_importance = :impurity, 
      +        rng = Random._GLOBAL_RNG()), 
      +  tuning = Grid(
      +        goal = nothing, 
      +        resolution = 10, 
      +        shuffle = true, 
      +        rng = Random._GLOBAL_RNG()), 
      +  resampling = CV(
      +        nfolds = 3, 
      +        shuffle = false, 
      +        rng = Random._GLOBAL_RNG()), 
      +  measure = RootMeanSquaredError(), 
      +  weights = nothing, 
      +  class_weights = nothing, 
      +  operation = nothing, 
      +  range = NumericRange(0.001 ≤ min_purity_increase ≤ 1.0; origin=0.5005, unit=0.4995; on log scale), 
      +  selection_heuristic = MLJTuning.NaiveSelection(nothing), 
      +  train_best = true, 
      +  repeats = 1, 
      +  n = nothing, 
      +  acceleration = CPU1{Nothing}(nothing), 
      +  acceleration_resampling = CPU1{Nothing}(nothing), 
      +  check_measure = true, 
      +  cache = true)

      Incidentally, a grid is generated internally "over the range" by calling the iterator method with an appropriate resolution:

      iterator(r, 5)
      5-element Vector{Float64}:
      + 0.0010000000000000002
      + 0.005623413251903492
      + 0.0316227766016838
      + 0.1778279410038923
      + 1.0

      Non-numeric hyperparameters are handled a little differently:

      selector = FeatureSelector();
      +r2 = range(selector, :features, values = [[:x1,], [:x1, :x2]]);
      +iterator(r2)
      2-element Vector{Vector{Symbol}}:
      + [:x1]
      + [:x1, :x2]

      Unbounded ranges are also permitted. See the range and iterator docstrings below for details, and the sampler docstring for generating random samples from one-dimensional ranges (used internally by the RandomSearch strategy).

      Returning to the wrapped tree model:

      mach = machine(self_tuning_tree, X, y);
      +fit!(mach, verbosity=0)
      trained Machine; does not cache data
      +  model: DeterministicTunedModel(model = DecisionTreeRegressor(max_depth = -1, …), …)
      +  args: 
      +    1:	Source @026 ⏎ Table{AbstractVector{Continuous}}
      +    2:	Source @744 ⏎ AbstractVector{Continuous}
      +

      We can inspect the detailed results of the grid search with report(mach) or just retrieve the optimal model, as here:

      fitted_params(mach).best_model
      DecisionTreeRegressor(
      +  max_depth = -1, 
      +  min_samples_leaf = 5, 
      +  min_samples_split = 2, 
      +  min_purity_increase = 0.0021544346900318843, 
      +  n_subfeatures = 0, 
      +  post_prune = false, 
      +  merge_purity_threshold = 1.0, 
      +  feature_importance = :impurity, 
      +  rng = Random._GLOBAL_RNG())

      For more detailed information, we can look at report(mach), for example:

      entry = report(mach).best_history_entry
      (model = DecisionTreeRegressor(max_depth = -1, …),
      + measure = StatisticalMeasuresBase.RobustMeasure{StatisticalMeasuresBase.FussyMeasure{StatisticalMeasuresBase.RobustMeasure{StatisticalMeasuresBase.Multimeasure{StatisticalMeasuresBase.SupportsMissingsMeasure{StatisticalMeasures.RootMeanSquaredErrorOnScalars}, Nothing, StatisticalMeasuresBase.RootMean{Int64}, typeof(identity)}}, Nothing}}[RootMeanSquaredError()],
      + measurement = [0.24315885564675355],
      + per_fold = [[0.23497325986087308, 0.1907463204704172, 0.2930882043027645]],)

      Predicting on new input observations using the optimal model, trained on all the data bound to mach:

      Xnew  = MLJ.table(rand(3, 10));
      +predict(mach, Xnew)
      3-element Vector{Float64}:
      +  0.5876731958281213
      + -0.11262620502317926
      +  1.5001919496386296

      Or predicting on some subset of the observations bound to mach:

      test = 1:3
      +predict(mach, rows=test)
      3-element Vector{Float64}:
      + -0.0436042437015236
      +  1.1495316705361969
      +  1.1495316705361969

      For tuning using only a subset train of all observation indices, specify rows=train in the above fit! call. In that case, the above predict calls would be based on training the optimal model on all train rows.

      A probabilistic classifier example

      Tuning a classifier is not essentially different from tuning a regressor. A common gotcha however is to overlook the distinction between supervised models that make point predictions (subtypes of Deterministic) and those that make probabilistic predictions (subtypes of Probabilistic). The DecisionTreeRegressor model in the preceding illustration was deterministic, so this example will consider a probabilistic classifier:

      info("KNNClassifier").prediction_type
      :probabilistic
      X, y = @load_iris
      +KNN = @load KNNClassifier verbosity=0
      +knn = KNN()
      KNNClassifier(
      +  K = 5, 
      +  algorithm = :kdtree, 
      +  metric = Distances.Euclidean(0.0), 
      +  leafsize = 10, 
      +  reorder = true, 
      +  weights = NearestNeighborModels.Uniform())

      We'll tune the hyperparameter K in the model above, using a grid-search once more:

      K_range = range(knn, :K, lower=5, upper=20);
      NumericRange(5 ≤ K ≤ 20; origin=12.5, unit=7.5)

      Since the model is probabilistic, we can choose either: (i) a probabilistic measure, such as brier_loss; or (ii) use a deterministic measure, such as misclassification_rate (which means predict_mean is called instead of predict under the hood).

      Case (i) - probabilistic measure:

      self_tuning_knn = TunedModel(
      +    model=knn,
      +    resampling = CV(nfolds=4, rng=1234),
      +    tuning = Grid(resolution=5),
      +    range = K_range,
      +    measure=BrierLoss()
      +);
      +
      +mach = machine(self_tuning_knn, X, y);
      +fit!(mach, verbosity=0);
      trained Machine; does not cache data
      +  model: ProbabilisticTunedModel(model = KNNClassifier(K = 5, …), …)
      +  args: 
      +    1:	Source @021 ⏎ Table{AbstractVector{Continuous}}
      +    2:	Source @788 ⏎ AbstractVector{Multiclass{3}}
      +

      Case (ii) - deterministic measure:

      self_tuning_knn = TunedModel(
      +    model=knn,
      +    resampling = CV(nfolds=4, rng=1234),
      +    tuning = Grid(resolution=5),
      +    range = K_range,
      +    measure=MisclassificationRate()
      +)
      +
      +mach = machine(self_tuning_knn, X, y);
      +fit!(mach, verbosity=0);
      trained Machine; does not cache data
      +  model: ProbabilisticTunedModel(model = KNNClassifier(K = 5, …), …)
      +  args: 
      +    1:	Source @020 ⏎ Table{AbstractVector{Continuous}}
      +    2:	Source @333 ⏎ AbstractVector{Multiclass{3}}
      +

      Let's inspect the best model and corresponding evaluation of the metric in case (ii):

      entry = report(mach).best_history_entry
      (model = KNNClassifier(K = 5, …),
      + measure = StatisticalMeasuresBase.RobustMeasure{StatisticalMeasuresBase.FussyMeasure{StatisticalMeasuresBase.RobustMeasure{StatisticalMeasuresBase.Multimeasure{StatisticalMeasuresBase.SupportsMissingsMeasure{StatisticalMeasures.MisclassificationRateOnScalars}, Nothing, StatisticalMeasuresBase.Mean, typeof(identity)}}, Nothing}}[MisclassificationRate()],
      + measurement = [0.02666666666666667],
      + per_fold = [[0.0, 0.02631578947368421, 0.0, 0.08108108108108109]],)
      entry.model.K
      5

      Recall that fitting mach also retrains the optimal model on all available data. The following is therefore an optimal model prediction based on all available data:

      predict(mach, rows=148:150)
      3-element UnivariateFiniteVector{Multiclass{3}, String, UInt32, Float64}:
      + UnivariateFinite{Multiclass{3}}(setosa=>0.0, versicolor=>0.0, virginica=>1.0)
      + UnivariateFinite{Multiclass{3}}(setosa=>0.0, versicolor=>0.0, virginica=>1.0)
      + UnivariateFinite{Multiclass{3}}(setosa=>0.0, versicolor=>0.0, virginica=>1.0)

      Specifying a custom measure

      Users may specify a custom loss or scoring function, so long as it complies with the StatisticalMeasuresBase.jl API and implements the appropriate orientation trait (Score() or Loss()) from that package. For example, we suppose define a "new" scoring function custom_accuracy by

      custom_accuracy(yhat, y) = mean(y .== yhat); # yhat - prediction, y - ground truth
      custom_accuracy (generic function with 1 method)

      In tuning, scores are maximised, while losses are minimised. So here we declare

      import StatisticalMeasuresBase as SMB
      +SMB.orientation(::typeof(custom_accuracy)) = SMB.Score()

      For full details on constructing custom measures, see StatisticalMeasuresBase.jl.

      self_tuning_knn = TunedModel(
      +    model=knn,
      +    resampling = CV(nfolds=4),
      +    tuning = Grid(resolution=5),
      +    range = K_range,
      +    measure = [custom_accuracy, MulticlassFScore()],
      +    operation = predict_mode
      +);
      +
      +mach = machine(self_tuning_knn, X, y)
      +fit!(mach, verbosity=0)
      +entry = report(mach).best_history_entry
      (model = KNNClassifier(K = 5, …),
      + measure = StatisticalMeasuresBase.RobustMeasure[Main.custom_accuracy, MulticlassFScore(beta = 1.0, …)],
      + measurement = [0.8866666666666667, 0.4660896415303711],
      + per_fold = [[1.0, 0.9210526315789472, 0.918918918918919, 0.7027027027027027], [0.33333333333333337, 0.6462585034013605, 0.6083530338849489, 0.2751322751322752]],)
      entry.model.K
      5

      Tuning multiple nested hyperparameters

      The forest model below has another model, namely a DecisionTreeRegressor, as a hyperparameter:

      tree = Tree() # defined above
      +forest = EnsembleModel(model=tree)
      DeterministicEnsembleModel(
      +  model = DecisionTreeRegressor(
      +        max_depth = -1, 
      +        min_samples_leaf = 5, 
      +        min_samples_split = 2, 
      +        min_purity_increase = 0.0, 
      +        n_subfeatures = 0, 
      +        post_prune = false, 
      +        merge_purity_threshold = 1.0, 
      +        feature_importance = :impurity, 
      +        rng = Random._GLOBAL_RNG()), 
      +  atomic_weights = Float64[], 
      +  bagging_fraction = 0.8, 
      +  rng = Random._GLOBAL_RNG(), 
      +  n = 100, 
      +  acceleration = CPU1{Nothing}(nothing), 
      +  out_of_bag_measure = Any[])

      Ranges for nested hyperparameters are specified using dot syntax. In this case, we will specify a goal for the total number of grid points:

      r1 = range(forest, :(model.n_subfeatures), lower=1, upper=9);
      +r2 = range(forest, :bagging_fraction, lower=0.4, upper=1.0);
      +self_tuning_forest = TunedModel(
      +    model=forest,
      +    tuning=Grid(goal=30),
      +    resampling=CV(nfolds=6),
      +    range=[r1, r2],
      +    measure=rms);
      +
      +X = MLJ.table(rand(100, 10));
      +y = 2X.x1 - X.x2 + 0.05*rand(100);
      +
      +mach = machine(self_tuning_forest, X, y);
      +fit!(mach, verbosity=0);
      trained Machine; does not cache data
      +  model: DeterministicTunedModel(model = DeterministicEnsembleModel(model = DecisionTreeRegressor(max_depth = -1, …), …), …)
      +  args: 
      +    1:	Source @409 ⏎ Table{AbstractVector{Continuous}}
      +    2:	Source @566 ⏎ AbstractVector{Continuous}
      +

      We can plot the grid search results:

      using Plots
      +plot(mach)

      Instead of specifying a goal, we can declare a global resolution, which is overridden for a particular parameter by pairing its range with the resolution desired. In the next example, the default resolution=100 is applied to the r2 field, but a resolution of 3 is applied to the r1 field. Additionally, we ask that the grid points be randomly traversed and the total number of evaluations be limited to 25.

      tuning = Grid(resolution=100, shuffle=true, rng=1234)
      +self_tuning_forest = TunedModel(
      +    model=forest,
      +    tuning=tuning,
      +    resampling=CV(nfolds=6),
      +    range=[(r1, 3), r2],
      +    measure=rms,
      +    n=25
      +);
      +fit!(machine(self_tuning_forest, X, y), verbosity=0);
      trained Machine; does not cache data
      +  model: DeterministicTunedModel(model = DeterministicEnsembleModel(model = DecisionTreeRegressor(max_depth = -1, …), …), …)
      +  args: 
      +    1:	Source @277 ⏎ Table{AbstractVector{Continuous}}
      +    2:	Source @657 ⏎ AbstractVector{Continuous}
      +

      For more options for a grid search, see Grid below.

      Let's attempt to tune the same hyperparameters using a RandomSearch tuning strategy. By default, bounded numeric ranges like r1 and r2 are sampled uniformly (before rounding, in the case of the integer range r1). Positive unbounded ranges are sampled using a Gamma distribution by default, and all others using a (truncated) normal distribution.

      self_tuning_forest = TunedModel(
      +    model=forest,
      +    tuning=RandomSearch(),
      +    resampling=CV(nfolds=6),
      +    range=[r1, r2],
      +    measure=rms,
      +    n=25
      +);
      +X = MLJ.table(rand(100, 10));
      +y = 2X.x1 - X.x2 + 0.05*rand(100);
      +mach = machine(self_tuning_forest, X, y);
      +fit!(mach, verbosity=0)
      trained Machine; does not cache data
      +  model: DeterministicTunedModel(model = DeterministicEnsembleModel(model = DecisionTreeRegressor(max_depth = -1, …), …), …)
      +  args: 
      +    1:	Source @956 ⏎ Table{AbstractVector{Continuous}}
      +    2:	Source @549 ⏎ AbstractVector{Continuous}
      +
      using Plots
      +plot(mach)

      The prior distributions used for sampling each hyperparameter can be customized, as can the global fallbacks. See the RandomSearch doc-string below for details.

      Tuning using Latin hypercube sampling

      One can also tune the hyperparameters using the LatinHypercube tuning strategy. This method uses a genetic-based optimization algorithm based on the inverse of the Audze-Eglais function, using the library LatinHypercubeSampling.jl.

      We'll work with the data X, y and ranges r1 and r2 defined above and instantiate a Latin hypercube resampling strategy:

      latin = LatinHypercube(gens=2, popsize=120)
      LatinHypercube(
      +  gens = 2, 
      +  popsize = 120, 
      +  ntour = 2, 
      +  ptour = 0.8, 
      +  interSampleWeight = 1.0, 
      +  ae_power = 2, 
      +  periodic_ae = false, 
      +  rng = Random._GLOBAL_RNG())

      Here gens is the number of generations to run the optimisation for and popsize is the population size in the genetic algorithm. For more on these and other LatinHypercube parameters refer to the LatinHypercubeSampling.jl documentation. Pay attention that gens and popsize are not to be confused with the iteration parameter n in the construction of a corresponding TunedModel instance, which specifies the total number of models to be evaluated, independent of the tuning strategy.

      For this illustration we'll add a third, nominal, hyper-parameter:

      r3 = range(forest, :(model.post_prune), values=[true, false]);
      +self_tuning_forest = TunedModel(
      +    model=forest,
      +    tuning=latin,
      +    resampling=CV(nfolds=6),
      +    range=[r1, r2, r3],
      +    measure=rms,
      +    n=25
      +);
      +mach = machine(self_tuning_forest, X, y);
      +fit!(mach, verbosity=0)
      trained Machine; does not cache data
      +  model: DeterministicTunedModel(model = DeterministicEnsembleModel(model = DecisionTreeRegressor(max_depth = -1, …), …), …)
      +  args: 
      +    1:	Source @241 ⏎ Table{AbstractVector{Continuous}}
      +    2:	Source @508 ⏎ AbstractVector{Continuous}
      +
      using Plots
      +plot(mach)

      Comparing models of different type and nested cross-validation

      Instead of mutating hyperparameters of a fixed model, one can instead optimise over an explicit list of models, whose types are allowed to vary. As with other tuning strategies, evaluating the resulting TunedModel itself implies nested resampling (e.g., nested cross-validation) which we now examine in a bit more detail.

      tree = (@load DecisionTreeClassifier pkg=DecisionTree verbosity=0)()
      +knn = (@load KNNClassifier pkg=NearestNeighborModels verbosity=0)()
      +models = [tree, knn]

      The following model is equivalent to the best in models by using 3-fold cross-validation:

      multi_model = TunedModel(
      +    models=models,
      +    resampling=CV(nfolds=3),
      +    measure=log_loss,
      +    check_measure=false
      +)

      Note that there is no need to specify a tuning strategy or range but we do specify models (plural) instead of model. Evaluating multi_model implies nested cross-validation (each model gets evaluated 2 x 3 times):

      X, y = make_blobs()
      +
      +e = evaluate(multi_model, X, y, resampling=CV(nfolds=2), measure=log_loss, verbosity=6)
      PerformanceEvaluation object with these fields:
      +  model, measure, operation, measurement, per_fold,
      +  per_observation, fitted_params_per_fold,
      +  report_per_fold, train_test_rows, resampling, repeats
      +Extract:
      +┌──────────────────────┬───────────┬─────────────┬─────────┬────────────────────
      +│ measure              │ operation │ measurement │ 1.96*SE │ per_fold          ⋯
      +├──────────────────────┼───────────┼─────────────┼─────────┼────────────────────
      +│ LogLoss(             │ predict   │ 0.36        │ 0.999   │ [0.721, 2.22e-16] ⋯
      +│   tol = 2.22045e-16) │           │             │         │                   ⋯
      +└──────────────────────┴───────────┴─────────────┴─────────┴────────────────────
      +

      Now, for example, we can get the best model for the first fold out of the two folds:

      e.report_per_fold[1].best_model
      KNNClassifier(
      +  K = 5, 
      +  algorithm = :kdtree, 
      +  metric = Distances.Euclidean(0.0), 
      +  leafsize = 10, 
      +  reorder = true, 
      +  weights = NearestNeighborModels.Uniform())

      And the losses in the outer loop (these still have to be matched to the best performing model):

      e.per_fold
      1-element Vector{Vector{Float64}}:
      + [0.7208730677823432, 2.220446049250313e-16]

      It is also possible to get the results for the nested evaluations. For example, for the first fold of the outer loop and the second model:

      e.report_per_fold[2].history[1]
      (model = DecisionTreeClassifier(max_depth = -1, …),
      + measure = StatisticalMeasuresBase.RobustMeasure{StatisticalMeasuresBase.FussyMeasure{StatisticalMeasuresBase.RobustMeasure{StatisticalMeasures._LogLossType{Float64}}, typeof(StatisticalMeasures.l2_check)}}[LogLoss(tol = 2.22045e-16)],
      + measurement = [2.1626192033470293],
      + per_fold = [[2.1202149052421855, 2.1202149052421855, 2.252728336819822]],)

      Reference

      Base.rangeFunction
      r = range(model, :hyper; values=nothing)

      Define a one-dimensional NominalRange object for a field hyper of model. Note that r is not directly iterable but iterator(r) is.

      A nested hyperparameter is specified using dot notation. For example, :(atom.max_depth) specifies the max_depth hyperparameter of the submodel model.atom.

      r = range(model, :hyper; upper=nothing, lower=nothing,
      +          scale=nothing, values=nothing)

      Assuming values is not specified, define a one-dimensional NumericRange object for a Real field hyper of model. Note that r is not directly iteratable but iterator(r, n)is an iterator of length n. To generate random elements from r, instead apply rand methods to sampler(r). The supported scales are :linear,:log, :logminus, :log10, :log10minus, :log2, or a callable object.

      Note that r is not directly iterable, but iterator(r, n) is, for given resolution (length) n.

      By default, the behaviour of the constructed object depends on the type of the value of the hyperparameter :hyper at model at the time of construction. To override this behaviour (for instance if model is not available) specify a type in place of model so the behaviour is determined by the value of the specified type.

      A nested hyperparameter is specified using dot notation (see above).

      If scale is unspecified, it is set to :linear, :log, :log10minus, or :linear, according to whether the interval (lower, upper) is bounded, right-unbounded, left-unbounded, or doubly unbounded, respectively. Note upper=Inf and lower=-Inf are allowed.

      If values is specified, the other keyword arguments are ignored and a NominalRange object is returned (see above).

      See also: iterator, sampler

      source
      MLJBase.iteratorFunction
      iterator([rng, ], r::NominalRange, [,n])
      +iterator([rng, ], r::NumericRange, n)

      Return an iterator (currently a vector) for a ParamRange object r. In the first case iteration is over all values stored in the range (or just the first n, if n is specified). In the second case, the iteration is over approximately n ordered values, generated as follows:

      (i) First, exactly n values are generated between U and L, with a spacing determined by r.scale (uniform if scale=:linear) where U and L are given by the following table:

      r.lowerr.upperLU
      finitefiniter.lowerr.upper
      -Inffiniter.upper - 2r.unitr.upper
      finiteInfr.lowerr.lower + 2r.unit
      -InfInfr.origin - r.unitr.origin + r.unit

      (ii) If a callable f is provided as scale, then a uniform spacing is always applied in (i) but f is broadcast over the results. (Unlike ordinary scales, this alters the effective range of values generated, instead of just altering the spacing.)

      (iii) If r is a discrete numeric range (r isa NumericRange{<:Integer}) then the values are additionally rounded, with any duplicate values removed. Otherwise all the values are used (and there are exacltly n of them).

      (iv) Finally, if a random number generator rng is specified, then the values are returned in random order (sampling without replacement), and otherwise they are returned in numeric order, or in the order provided to the range constructor, in the case of a NominalRange.

      source
      Distributions.samplerFunction
      sampler(r::NominalRange, probs::AbstractVector{<:Real})
      +sampler(r::NominalRange)
      +sampler(r::NumericRange{T}, d)

      Construct an object s which can be used to generate random samples from a ParamRange object r (a one-dimensional range) using one of the following calls:

      rand(s)             # for one sample
      +rand(s, n)          # for n samples
      +rand(rng, s [, n])  # to specify an RNG

      The argument probs can be any probability vector with the same length as r.values. The second sampler method above calls the first with a uniform probs vector.

      The argument d can be either an arbitrary instance of UnivariateDistribution from the Distributions.jl package, or one of a Distributions.jl types for which fit(d, ::NumericRange) is defined. These include: Arcsine, Uniform, Biweight, Cosine, Epanechnikov, SymTriangularDist, Triweight, Normal, Gamma, InverseGaussian, Logistic, LogNormal, Cauchy, Gumbel, Laplace, and Poisson; but see the doc-string for Distributions.fit for an up-to-date list.

      If d is an instance, then sampling is from a truncated form of the supplied distribution d, the truncation bounds being r.lower and r.upper (the attributes r.origin and r.unit attributes are ignored). For discrete numeric ranges (T <: Integer) the samples are rounded.

      If d is a type then a suitably truncated distribution is automatically generated using Distributions.fit(d, r).

      Important. Values are generated with no regard to r.scale, except in the special case r.scale is a callable object f. In that case, f is applied to all values generated by rand as described above (prior to rounding, in the case of discrete numeric ranges).

      Examples

      r = range(Char, :letter, values=collect("abc"))
      +s = sampler(r, [0.1, 0.2, 0.7])
      +samples =  rand(s, 1000);
      +StatsBase.countmap(samples)
      +Dict{Char,Int64} with 3 entries:
      +  'a' => 107
      +  'b' => 205
      +  'c' => 688
      +
      +r = range(Int, :k, lower=2, upper=6) # numeric but discrete
      +s = sampler(r, Normal)
      +samples = rand(s, 1000);
      +UnicodePlots.histogram(samples)
      +           ┌                                        ┐
      +[2.0, 2.5) ┤▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 119
      +[2.5, 3.0) ┤ 0
      +[3.0, 3.5) ┤▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 296
      +[3.5, 4.0) ┤ 0
      +[4.0, 4.5) ┤▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 275
      +[4.5, 5.0) ┤ 0
      +[5.0, 5.5) ┤▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇▇ 221
      +[5.5, 6.0) ┤ 0
      +[6.0, 6.5) ┤▇▇▇▇▇▇▇▇▇▇▇ 89
      +           └                                        ┘
      source
      StatsAPI.fitMethod
      Distributions.fit(D, r::MLJBase.NumericRange)

      Fit and return a distribution d of type D to the one-dimensional range r.

      Only types D in the table below are supported.

      The distribution d is constructed in two stages. First, a distributon d0, characterized by the conditions in the second column of the table, is fit to r. Then d0 is truncated between r.lower and r.upper to obtain d.

      Distribution type DCharacterization of d0
      Arcsine, Uniform, Biweight, Cosine, Epanechnikov, SymTriangularDist, Triweightminimum(d) = r.lower, maximum(d) = r.upper
      Normal, Gamma, InverseGaussian, Logistic, LogNormalmean(d) = r.origin, std(d) = r.unit
      Cauchy, Gumbel, Laplace, (Normal)Dist.location(d) = r.origin, Dist.scale(d) = r.unit
      PoissonDist.mean(d) = r.unit

      Here Dist = Distributions.

      source
      MLJTuning.TunedModelFunction
      tuned_model = TunedModel(; model=<model to be mutated>,
      +                         tuning=RandomSearch(),
      +                         resampling=Holdout(),
      +                         range=nothing,
      +                         measure=nothing,
      +                         n=default_n(tuning, range),
      +                         operation=nothing,
      +                         other_options...)

      Construct a model wrapper for hyper-parameter optimization of a supervised learner, specifying the tuning strategy and model whose hyper-parameters are to be mutated.

      tuned_model = TunedModel(; models=<models to be compared>,
      +                         resampling=Holdout(),
      +                         measure=nothing,
      +                         n=length(models),
      +                         operation=nothing,
      +                         other_options...)

      Construct a wrapper for multiple models, for selection of an optimal one (equivalent to specifying tuning=Explicit() and range=models above). Elements of the iterator models need not have a common type, but they must all be Deterministic or all be Probabilistic and this is not checked but inferred from the first element generated.

      See below for a complete list of options.

      Training

      Calling fit!(mach) on a machine mach=machine(tuned_model, X, y) or mach=machine(tuned_model, X, y, w) will:

      • Instigate a search, over clones of model, with the hyperparameter mutations specified by range, for a model optimizing the specified measure, using performance evaluations carried out using the specified tuning strategy and resampling strategy. In the case models is explictly listed, the search is instead over the models generated by the iterator models.

      • Fit an internal machine, based on the optimal model fitted_params(mach).best_model, wrapping the optimal model object in all the provided data X, y(, w). Calling predict(mach, Xnew) then returns predictions on Xnew of this internal machine. The final train can be supressed by setting train_best=false.

      Search space

      The range objects supported depend on the tuning strategy specified. Query the strategy docstring for details. To optimize over an explicit list v of models of the same type, use strategy=Explicit() and specify model=v[1] and range=v.

      The number of models searched is specified by n. If unspecified, then MLJTuning.default_n(tuning, range) is used. When n is increased and fit!(mach) called again, the old search history is re-instated and the search continues where it left off.

      Measures (metrics)

      If more than one measure is specified, then only the first is optimized (unless strategy is multi-objective) but the performance against every measure specified will be computed and reported in report(mach).best_performance and other relevant attributes of the generated report. Options exist to pass per-observation weights or class weights to measures; see below.

      Important. If a custom measure, my_measure is used, and the measure is a score, rather than a loss, be sure to check that MLJ.orientation(my_measure) == :score to ensure maximization of the measure, rather than minimization. Override an incorrect value with MLJ.orientation(::typeof(my_measure)) = :score.

      Accessing the fitted parameters and other training (tuning) outcomes

      A Plots.jl plot of performance estimates is returned by plot(mach) or heatmap(mach).

      Once a tuning machine mach has bee trained as above, then fitted_params(mach) has these keys/values:

      keyvalue
      best_modeloptimal model instance
      best_fitted_paramslearned parameters of the optimal model

      The named tuple report(mach) includes these keys/values:

      keyvalue
      best_modeloptimal model instance
      best_history_entrycorresponding entry in the history, including performance estimate
      best_reportreport generated by fitting the optimal model to all data
      historytuning strategy-specific history of all evaluations

      plus other key/value pairs specific to the tuning strategy.

      Complete list of key-word options

      • model: Supervised model prototype that is cloned and mutated to generate models for evaluation

      • models: Alternatively, an iterator of MLJ models to be explicitly evaluated. These may have varying types.

      • tuning=RandomSearch(): tuning strategy to be applied (eg, Grid()). See the Tuning Models section of the MLJ manual for a complete list of options.

      • resampling=Holdout(): resampling strategy (eg, Holdout(), CV()), StratifiedCV()) to be applied in performance evaluations

      • measure: measure or measures to be applied in performance evaluations; only the first used in optimization (unless the strategy is multi-objective) but all reported to the history

      • weights: per-observation weights to be passed the measure(s) in performance evaluations, where supported. Check support with supports_weights(measure).

      • class_weights: class weights to be passed the measure(s) in performance evaluations, where supported. Check support with supports_class_weights(measure).

      • repeats=1: for generating train/test sets multiple times in resampling ("Monte Carlo" resampling); see evaluate! for details

      • operation/operations - One of predict, predict_mean, predict_mode, predict_median, or predict_joint, or a vector of these of the same length as measure/measures. Automatically inferred if left unspecified.

      • range: range object; tuning strategy documentation describes supported types

      • selection_heuristic: the rule determining how the best model is decided. According to the default heuristic, NaiveSelection(), measure (or the first element of measure) is evaluated for each resample and these per-fold measurements are aggregrated. The model with the lowest (resp. highest) aggregate is chosen if the measure is a :loss (resp. a :score).

      • n: number of iterations (ie, models to be evaluated); set by tuning strategy if left unspecified

      • train_best=true: whether to train the optimal model

      • acceleration=default_resource(): mode of parallelization for tuning strategies that support this

      • acceleration_resampling=CPU1(): mode of parallelization for resampling

      • check_measure=true: whether to check measure is compatible with the specified model and operation)

      • cache=true: whether to cache model-specific representations of user-suplied data; set to false to conserve memory. Speed gains likely limited to the case resampling isa Holdout.

      source
      MLJTuning.GridType
      Grid(goal=nothing, resolution=10, rng=Random.GLOBAL_RNG, shuffle=true)

      Instantiate a Cartesian grid-based hyperparameter tuning strategy with a specified number of grid points as goal, or using a specified default resolution in each numeric dimension.

      Supported ranges:

      A single one-dimensional range or vector of one-dimensioinal ranges can be specified. Specifically, in Grid search, the range field of a TunedModel instance can be:

      • A single one-dimensional range - ie, ParamRange object - r, or pair of the form (r, res) where res specifies a resolution to override the default resolution.

      • Any vector of objects of the above form

      Two elements of a range vector may share the same field attribute, with the effect that their grids are combined, as in Example 3 below.

      ParamRange objects are constructed using the range method.

      Example 1:

      range(model, :hyper1, lower=1, origin=2, unit=1)

      Example 2:

      [(range(model, :hyper1, lower=1, upper=10), 15),
      +  range(model, :hyper2, lower=2, upper=4),
      +  range(model, :hyper3, values=[:ball, :tree])]

      Example 3:

      # a range generating the grid `[1, 2, 10, 20, 30]` for `:hyper1`:
      +[range(model, :hyper1, values=[1, 2]),
      + (range(model, :hyper1, lower= 10, upper=30), 3)]

      Note: All the field values of the ParamRange objects (:hyper1, :hyper2, :hyper3 in the preceding example) must refer to field names a of single model (the model specified during TunedModel construction).

      Algorithm

      This is a standard grid search with the following specifics: In all cases all values of each specified NominalRange are exhausted. If goal is specified, then all resolutions are ignored, and a global resolution is applied to the NumericRange objects that maximizes the number of grid points, subject to the restriction that this not exceed goal. (This assumes no field appears twice in the range vector.) Otherwise the default resolution and any parameter-specific resolutions apply.

      In all cases the models generated are shuffled using rng, unless shuffle=false.

      See also TunedModel, range.

      source
      MLJTuning.RandomSearchType
      RandomSearch(bounded=Distributions.Uniform,
      +             positive_unbounded=Distributions.Gamma,
      +             other=Distributions.Normal,
      +             rng=Random.GLOBAL_RNG)

      Instantiate a random search tuning strategy, for searching over Cartesian hyperparameter domains, with customizable priors in each dimension.

      Supported ranges

      A single one-dimensional range or vector of one-dimensioinal ranges can be specified. If not paired with a prior, then one is fitted, according to fallback distribution types specified by the tuning strategy hyperparameters. Specifically, in RandomSearch, the range field of a TunedModel instance can be:

      • a single one-dimensional range (ParamRange object) r

      • a pair of the form (r, d), with r as above and where d is:

        • a probability vector of the same length as r.values (r a NominalRange)

        • any Distributions.UnivariateDistribution instance (r a NumericRange)

        • one of the subtypes of Distributions.UnivariateDistribution listed in the table below, for automatic fitting using Distributions.fit(d, r), a distribution whose support always lies between r.lower and r.upper (r a NumericRange)

      • any pair of the form (field, s), where field is the (possibly nested) name of a field of the model to be tuned, and s an arbitrary sampler object for that field. This means only that rand(rng, s) is defined and returns valid values for the field.

      • any vector of objects of the above form

      A range vector may contain multiple entries for the same model field, as in range = [(:lambda, s1), (:alpha, s), (:lambda, s2)]. In that case the entry used in each iteration is random.

      distribution typesfor fitting to ranges of this type
      Arcsine, Uniform, Biweight, Cosine, Epanechnikov, SymTriangularDist, Triweightbounded
      Gamma, InverseGaussian, Poissonpositive (bounded or unbounded)
      Normal, Logistic, LogNormal, Cauchy, Gumbel, Laplaceany

      ParamRange objects are constructed using the range method.

      Examples

      using Distributions
      +
      +range1 = range(model, :hyper1, lower=0, upper=1)
      +
      +range2 = [(range(model, :hyper1, lower=1, upper=10), Arcsine),
      +          range(model, :hyper2, lower=2, upper=Inf, unit=1, origin=3),
      +          (range(model, :hyper2, lower=2, upper=4), Normal(0, 3)),
      +          (range(model, :hyper3, values=[:ball, :tree]), [0.3, 0.7])]
      +
      +# uniform sampling of :(atom.λ) from [0, 1] without defining a NumericRange:
      +struct MySampler end
      +Base.rand(rng::Random.AbstractRNG, ::MySampler) = rand(rng)
      +range3 = (:(atom.λ), MySampler())

      Algorithm

      In each iteration, a model is generated for evaluation by mutating the fields of a deep copy of model. The range vector is shuffled and the fields sampled according to the new order (repeated fields being mutated more than once). For a range entry of the form (field, s) the algorithm calls rand(rng, s) and mutates the field field of the model clone to have this value. For an entry of the form (r, d), s is substituted with sampler(r, d). If no d is specified, then sampling is uniform (with replacement) if r is a NominalRange, and is otherwise given by the defaults specified by the tuning strategy parameters bounded, positive_unbounded, and other, depending on the field values of the NumericRange object r.

      See also TunedModel, range, sampler.

      source
      MLJTuning.LatinHypercubeType
      LatinHypercube(gens = 1,
      +               popsize = 100,
      +               ntour = 2,
      +               ptour = 0.8.,
      +               interSampleWeight = 1.0,
      +               ae_power = 2,
      +               periodic_ae = false,
      +               rng=Random.GLOBAL_RNG)

      Instantiate grid-based hyperparameter tuning strategy using the library LatinHypercubeSampling.jl.

      An optimised Latin Hypercube sampling plan is created using a genetic based optimization algorithm based on the inverse of the Audze-Eglais function. The optimization is run for nGenerations and creates n models for evaluation, where n is specified by a corresponding TunedModel instance, as in

      tuned_model = TunedModel(model=...,
      +                         tuning=LatinHypercube(...),
      +                         range=...,
      +                         measures=...,
      +                         n=...)

      (See TunedModel for complete options.)

      To use a periodic version of the Audze-Eglais function (to reduce clustering along the boundaries) specify periodic_ae = true.

      Supported ranges:

      A single one-dimensional range or vector of one-dimensioinal ranges can be specified. Specifically, in LatinHypercubeSampling search, the range field of a TunedModel instance can be:

      • A single one-dimensional range - ie, ParamRange object - r, constructed

      using the range method.

      • Any vector of objects of the above form

      Both NumericRanges and NominalRanges are supported, and hyper-parameter values are sampled on a scale specified by the range (eg, r.scale = :log).

      source
      diff --git a/v0.20.3/weights/index.html b/v0.20.3/weights/index.html new file mode 100644 index 000000000..68b88eb01 --- /dev/null +++ b/v0.20.3/weights/index.html @@ -0,0 +1,12 @@ + +Weights · MLJ

      Weights

      In machine learning it is possible to assign each observation an independent significance, or weight, either in training or in performance evaluation, or both.

      There are two kinds of weights in use in MLJ:

      • per observation weights (also just called weights) refer to weight vectors of the same length as the number of observations

      • class weights refer to dictionaries keyed on the target classes (levels) for use in classification problems

      Specifying weights in training

      To specify weights in training you bind the weights to the model along with the data when constructing a machine. For supervised models the weights are specified last:

      KNNRegressor = @load KNNRegressor
      +model = KNNRegressor()
      +X, y = make_regression(10, 3)
      +w = rand(length(y))
      +
      +mach = machine(model, X, y, w) |> fit!

      Note that model supports per observation weights if supports_weights(model) is true. To list all such models, do

      models() do m
      +    m.supports_weights
      +end

      The model model supports class weights if supports_class_weights(model) is true.

      Specifying weights in performance evaluation

      When calling a measure (metric) that supports weights, provide the weights as the last argument, as in

      _, y = @load_iris
      +ŷ = shuffle(y)
      +w = Dict("versicolor" => 1, "setosa" => 2, "virginica"=> 3)
      +macro_f1score(ŷ, y, w)

      Some measures also support specification of a class weight dictionary. For details see the StatisticalMeasures.jl tutorial.

      To pass weights to all the measures listed in an evaluate!/evaluate call, use the keyword specifiers weights=... or class_weights=.... For details, see Evaluating Model Performance.

      diff --git a/v0.20.3/working_with_categorical_data/index.html b/v0.20.3/working_with_categorical_data/index.html new file mode 100644 index 000000000..9a6002e3f --- /dev/null +++ b/v0.20.3/working_with_categorical_data/index.html @@ -0,0 +1,110 @@ + +Working with Categorical Data · MLJ

      Working with Categorical Data

      Scientific types for discrete data

      Recall that models articulate their data requirements using scientific types (see Getting Started or the ScientificTypes.jl documentation). There are three scientific types discrete data can have: Count, OrderedFactor and Multiclass.

      Count data

      In MLJ you cannot use integers to represent (finite) categorical data. Integers are reserved for discrete data you want interpreted as Count <: Infinite:

      scitype([1, 4, 5, 6])
      AbstractVector{Count} (alias for AbstractArray{Count, 1})

      The Count scientific type includes things like the number of phone calls, or city populations, and other "frequency" data of a generally unbounded nature.

      That said, you may have data that is theoretically Count, but which you coerce to OrderedFactor to enable the use of more models, trusting to your knowledge of how those models work to inform an appropriate interpretation.

      OrderedFactor and Multiclass data

      Other integer data, such as the number of an animal's legs, or number of rooms in homes, are, generally, coerced to OrderedFactor <: Finite. The other categorical scientific type is Multiclass <: Finite, which is for unordered categorical data. Coercing data to one of these two forms is discussed under Detecting and coercing improperly represented categorical data below.

      Binary data

      There is no separate scientific type for binary data. Binary data is either OrderedFactor{2} if ordered, and Multiclass{2} otherwise. Data with type OrderedFactor{2} is considered to have an intrinsic "positive" class, e.g., the outcome of a medical test, and the "pass/fail" outcome of an exam. MLJ measures, such as true_positive assume the second class in the ordering is the "positive" class. Inspecting and changing order are discussed in the next section.

      If data has type Bool it is considered Count data (as Bool <: Integer) and, generally, users will want to coerce such data to Multiclass or OrderedFactor.

      Detecting and coercing improperly represented categorical data

      One inspects the scientific type of data using scitype as shown above. To inspect all column scientific types in a table simultaneously, use schema. (The scitype(X) of a table X contains a condensed form of this information used in type dispatch; see here.)

      import DataFrames.DataFrame
      +X = DataFrame(
      +                 name       = ["Siri", "Robo", "Alexa", "Cortana"],
      +                 gender     = ["male", "male", "Female", "female"],
      +                 likes_soup = [true, false, false, true],
      +                 height     = [152, missing, 148, 163],
      +                 rating     = [2, 5, 2, 1],
      +                 outcome    = ["rejected", "accepted", "accepted", "rejected"])
      +schema(X)
      ┌────────────┬───────────────────────┬───────────────────────┐
      +│ names      │ scitypes              │ types                 │
      +├────────────┼───────────────────────┼───────────────────────┤
      +│ name       │ Textual               │ String                │
      +│ gender     │ Textual               │ String                │
      +│ likes_soup │ Count                 │ Bool                  │
      +│ height     │ Union{Missing, Count} │ Union{Missing, Int64} │
      +│ rating     │ Count                 │ Int64                 │
      +│ outcome    │ Textual               │ String                │
      +└────────────┴───────────────────────┴───────────────────────┘
      +

      Coercing a single column:

      X.outcome = coerce(X.outcome, OrderedFactor)
      4-element CategoricalArray{String,1,UInt32}:
      + "rejected"
      + "accepted"
      + "accepted"
      + "rejected"

      The machine type of the result is a CategoricalArray. For more on this type see Under the hood: CategoricalValue and CategoricalArray below.

      Inspecting the order of the levels:

      levels(X.outcome)
      2-element Vector{String}:
      + "accepted"
      + "rejected"

      Since we wish to regard "accepted" as the positive class, it should appear second, which we correct with the levels! function:

      levels!(X.outcome, ["rejected", "accepted"])
      +levels(X.outcome)
      2-element Vector{String}:
      + "rejected"
      + "accepted"
      Changing levels of categorical data

      The order of levels should generally be changed early in your data science workflow and then not again. Similar remarks apply to adding levels (which is possible; see the CategorialArrays.jl documentation). MLJ supervised and unsupervised models assume levels and their order do not change.

      Coercing all remaining types simultaneously:

      Xnew = coerce(X, :gender     => Multiclass,
      +                 :likes_soup => OrderedFactor,
      +                 :height     => Continuous,
      +                 :rating     => OrderedFactor)
      +schema(Xnew)
      ┌────────────┬────────────────────────────┬──────────────────────────────────┐
      +│ names      │ scitypes                   │ types                            │
      +├────────────┼────────────────────────────┼──────────────────────────────────┤
      +│ name       │ Textual                    │ String                           │
      +│ gender     │ Multiclass{3}              │ CategoricalValue{String, UInt32} │
      +│ likes_soup │ OrderedFactor{2}           │ CategoricalValue{Bool, UInt32}   │
      +│ height     │ Union{Missing, Continuous} │ Union{Missing, Float64}          │
      +│ rating     │ OrderedFactor{3}           │ CategoricalValue{Int64, UInt32}  │
      +│ outcome    │ OrderedFactor{2}           │ CategoricalValue{String, UInt32} │
      +└────────────┴────────────────────────────┴──────────────────────────────────┘
      +

      For DataFrames there is also in-place coercion, using coerce!.

      Tracking all levels

      The key property of vectors of scientific type OrderedFactor and Multiclass is that the pool of all levels is not lost when separating out one or more elements:

      v = Xnew.rating
      4-element CategoricalArray{Int64,1,UInt32}:
      + 2
      + 5
      + 2
      + 1
      levels(v)
      3-element Vector{Int64}:
      + 1
      + 2
      + 5
      levels(v[1:2])
      3-element Vector{Int64}:
      + 1
      + 2
      + 5
      levels(v[2])
      3-element Vector{Int64}:
      + 1
      + 2
      + 5

      By tracking all classes in this way, MLJ avoids common pain points around categorical data, such as evaluating models on an evaluation set, only to crash your code because classes appear there which were not seen during training.

      By drawing test, validation and training data from a common data structure (as described in Getting Started, for example) one ensures that all possible classes of categorical variables are tracked at all times. However, this does not mitigate problems with new production data, if categorical features there are missing classes or contain previously unseen classes.

      New or missing levels in production data

      Warning

      Unpredictable behavior may result whenever Finite categorical data presents in a production set with different classes (levels) from those presented during training

      Consider, for example, the following naive workflow:

      # train a one-hot encoder on some data:
      +x = coerce(["black", "white", "white", "black"], Multiclass)
      +X = DataFrame(x=x)
      +
      +model = OneHotEncoder()
      +mach = machine(model, X) |> fit!
      +
      +# one-hot encode new data with missing classes:
      +xproduction = coerce(["white", "white"], Multiclass)
      +Xproduction = DataFrame(x=xproduction)
      +Xproduction == X[2:3,:]
      true

      So far, so good. But the following operation throws an error:

      julia> transform(mach, Xproduction) == transform(mach, X[2:3,:])
      +ERROR: Found category level mismatch in feature `x`. Consider using `levels!` to ensure fitted and transforming features have the same category levels.

      The problem here is that levels(X.x) and levels(Xproduction.x) are different:

      levels(X.x)
      2-element Vector{String}:
      + "black"
      + "white"
      levels(Xproduction.x)
      1-element Vector{String}:
      + "white"

      This could be anticipated by the fact that the training and production data have different schema:

      schema(X)
      ┌───────┬───────────────┬──────────────────────────────────┐
      +│ names │ scitypes      │ types                            │
      +├───────┼───────────────┼──────────────────────────────────┤
      +│ x     │ Multiclass{2} │ CategoricalValue{String, UInt32} │
      +└───────┴───────────────┴──────────────────────────────────┘
      +
      schema(Xproduction)
      ┌───────┬───────────────┬──────────────────────────────────┐
      +│ names │ scitypes      │ types                            │
      +├───────┼───────────────┼──────────────────────────────────┤
      +│ x     │ Multiclass{1} │ CategoricalValue{String, UInt32} │
      +└───────┴───────────────┴──────────────────────────────────┘
      +

      One fix is to manually correct the levels of the production data:

      levels!(Xproduction.x, levels(x))
      +transform(mach, Xproduction) == transform(mach, X[2:3,:])
      true

      Another solution is to pack all production data with dummy rows based on the training data (subsequently dropped) to ensure there are no missing classes. Currently, MLJ contains no general tooling to check and fix categorical levels in production data (although one can check that training data and production data have the same schema, to ensure the number of classes in categorical data is consistent).

      Extracting an integer representation of Finite data

      Occasionally, you may really want an integer representation of data that currently has scitype Finite. For example, you are a developer wrapping an algorithm from an external package for use in MLJ, and that algorithm uses integer representations. Use the int method for this purpose, and use decoder to construct decoders for reversing the transformation:

      v = coerce(["one", "two", "three", "one"], OrderedFactor);
      +levels!(v, ["one", "two", "three"]);
      +v_int = int(v)
      4-element Vector{UInt32}:
      + 0x00000001
      + 0x00000002
      + 0x00000003
      + 0x00000001
      d = decoder(v); # or decoder(v[1])
      +d.(v_int)
      4-element CategoricalArray{String,1,UInt32}:
      + "one"
      + "two"
      + "three"
      + "one"

      Under the hood: CategoricalValue and CategoricalArray

      In MLJ the objects with OrderedFactor or Multiclass scientific type have machine type CategoricalValue, from the CategoricalArrays.jl package. In some sense CategoricalValues are an implementation detail users can ignore for the most part, as shown above. However, you may want some basic understanding of these types, and those implementing MLJ's model interface for new algorithms will have to understand them. For the complete API, see the CategoricalArrays.jl documentation. Here are the basics:

      To construct an OrderedFactor or Multiclass vector directly from raw labels, one uses categorical:

      v = categorical(['A', 'B', 'A', 'A', 'C'])
      +typeof(v)
      CategoricalVector{Char, UInt32, Char, CategoricalValue{Char, UInt32}, Union{}} (alias for CategoricalArray{Char, 1, UInt32, Char, CategoricalValue{Char, UInt32}, Union{}})

      (Equivalent to the idiomatically MLJ v = coerce(['A', 'B', 'A', 'A', 'C']), Multiclass).)

      scitype(v)
      AbstractVector{Multiclass{3}} (alias for AbstractArray{Multiclass{3}, 1})
      v = categorical(['A', 'B', 'A', 'A', 'C'], ordered=true, compress=true)
      5-element CategoricalArray{Char,1,UInt8}:
      + 'A'
      + 'B'
      + 'A'
      + 'A'
      + 'C'
      scitype(v)
      AbstractVector{OrderedFactor{3}} (alias for AbstractArray{OrderedFactor{3}, 1})

      When you index a CategoricalVector you don't get a raw label, but instead an instance of CategoricalValue. As explained above, this value knows the complete pool of levels from the vector from which it came. Use get(val) to extract the raw label from a value val.

      Despite the distinction that exists between a value (element) and a label, the two are the same, from the point of == and in:

      v[1] == 'A' # true
      +'A' in v    # true

      Probabilistic predictions of categorical data

      Recall from Getting Started that probabilistic classifiers ordinarily predict UnivariateFinite distributions, not raw probabilities (which are instead accessed using the pdf method.) Here's how to construct such a distribution yourself:

      v = coerce(["yes", "no", "yes", "yes", "maybe"], Multiclass)
      +d = UnivariateFinite([v[2], v[1]], [0.9, 0.1])
      UnivariateFinite{Multiclass{3}}(no=>0.9, yes=>0.1)

      Or, equivalently,

      d = UnivariateFinite(["no", "yes"], [0.9, 0.1], pool=v)
      UnivariateFinite{Multiclass{3}}(no=>0.9, yes=>0.1)

      This distribution tracks all levels, not just the ones to which you have assigned probabilities:

      pdf(d, "maybe")
      0.0

      However, pdf(d, "dunno") will throw an error.

      You can declare pool=missing, but then "maybe" will not be tracked:

      d = UnivariateFinite(["no", "yes"], [0.9, 0.1], pool=missing)
      +levels(d)
      2-element Vector{String}:
      + "no"
      + "yes"

      To construct a whole vector of UnivariateFinite distributions, simply give the constructor a matrix of probabilities:

      yes_probs = rand(5)
      +probs = hcat(1 .- yes_probs, yes_probs)
      +d_vec = UnivariateFinite(["no", "yes"], probs, pool=v)
      5-element UnivariateFiniteVector{Multiclass{3}, String, UInt32, Float64}:
      + UnivariateFinite{Multiclass{3}}(no=>0.52, yes=>0.48)
      + UnivariateFinite{Multiclass{3}}(no=>0.274, yes=>0.726)
      + UnivariateFinite{Multiclass{3}}(no=>0.891, yes=>0.109)
      + UnivariateFinite{Multiclass{3}}(no=>0.756, yes=>0.244)
      + UnivariateFinite{Multiclass{3}}(no=>0.0396, yes=>0.96)

      Or, equivalently:

      d_vec = UnivariateFinite(["no", "yes"], yes_probs, augment=true, pool=v)

      For more options, see UnivariateFinite.

      diff --git a/versions.js b/versions.js index c771d7d34..7cacea5d9 100644 --- a/versions.js +++ b/versions.js @@ -21,5 +21,5 @@ var DOC_VERSIONS = [ "v0.1", "dev", ]; -var DOCUMENTER_NEWEST = "v0.20.2"; +var DOCUMENTER_NEWEST = "v0.20.3"; var DOCUMENTER_STABLE = "stable";

      @P+!}lJZ zau$w<1rq-C`e~)D26aDrhzY9la%G)z6~3nQD9#T-;0zbXtl|U&wwUgGxqNM^+s*jf zMUU}E&CA`g6!c{+6uOZlS`2lD?QeeB)z7F-59M|bh8d5Y?aUg-sOYR#Ex*i8(O8~e z3CP4V3P%GHpQDGArkVNnN8i3*&FGp+kT%EhG$)~}gw~BRD@r>TJl89(Gtqka&CB8X z{L!#f5^nt^A74@z6-~_)J-(<;g#w0~c~~d0&AB>i+D0q{6g+9nX6RlCNnU(JZ-7SW&H+wMt*U^|Im__Uh5pFNPmLEYYKm^Z6~ z&W0Y|k^;IB#BK{lu?b7Hrbt!U^_W@Kb@1#0&2f)IU{Ti?7WDzC!otE9vL58`Z??O& zPFSuD4pPxdO}l7o0L9*-X=9^=rMmexXTRHf3q}|_HTG@tU5om|x`9IrhXu{V?w!;A zZ`#)?USBiz*soM3`MVmt?o^c{+)5i&$;Qr`TAZKfvf#GmNnz}%TObI=kKyg*f~m!! zn-EM=DGm7>ji8Avmz3>4QG~@1>6@-xaJH`t_F;wG3~)tG^Y7vZlEk*8~zzL3+Lda}|KzT{NjpyS-PQ$`zBho7&})c3YauR~d8DQK*g4 z8I#irFyW+`ZwbfSy9#JyU2r6FaOS+a?2_6SUEBms8)lBxRQ zcK2ux90W4qQC>;SrXR7ZGD>7f%eI3y1^dFmA9%;1sV_Cb#KL~<{L9Gjsw^)vft&+5#Z}pTmA!b{iBp=x?5a`Qa^cmkkg`+0{u&dx&xv0 zUH}iX!4^R>2e^WtwOgy$FDun>oP1NefbN1{!w|Y}DdBj&dpGogLj#v4YhWe8)sFE8gcT60@H#R_~tjYODI z93s>jNY%prHSha*P4RbrqN3-YA0TD2jK{Ln|J|qr`~-hM1ZXj;PAa~4Pt4=oft&+SWozKDaW^znk_=H6C5wC^uDrka15}=0rabZM zui~nwpb_BnLy#_j*|F&ka>5huSl&GxcNWGA<#n-dK_5vZq0r#dw)YrOQBw&-^fIrq zb&E{zp|ARi!WXMkC}{D!7J^~fl_{l%P>)IkHBzaVPTCf5Tn_Dbx~C zWE@9?KunFcR2S=Isn&Ws3i`Q{&(f!2epxkgojK{$HGdOSgt=4_kG@K0>yx%0A52oj z7t?9+#4WU&W|eCek*QYcG}ZVrB(YUU#*CilJX zhVzE6`z@ zjur2Tc{5GZqiS}P1i1hiyfHwyJ?QLX*y0IkRZ*OIG)#`4@wptKKKrMQ!wMz>dK*dA zTMw71I(7*d3Oqf8RyvDWWnBAC7F={qg^&)`r`8@!@_c>-|CO|`pf{2KL^E3NX0_N4 z_Z(8o*r;VEiKm1{)aE5pjjNgD;2h3B!aR7*MLCH?1r<8_JS@mAQkXu#8rWWb@H-a< zJ}#MXszVYw5yevS`mww69~)9rj+=(j1&J+lKztRe&FXWNV;W&h{4Tn^GT|~tR?>3f z>0W=z$yZfx{>=4DjH=C~QMG0fBbasDd&2saMckDLK-Hz1fa*edB4Q$3K+)>j*h_Ia zmx<%NYC-?9z16$y3dVI(X**^^8VdtkG43dBo5200GAjv98q=)z)?6lIAo&(2r8^JW z!&kx^H`n=VD@z?UOeMHMGf?b629k!%ZR6?DuMha#Rk4MP^J`191i0z>5#T-LIgjMO z>6q=uzxCmbL;645h$SIxiWdUdRHimoEwqL<8d3fx^%kvw$WsMXYo~_XJ^YLuh8o}tN5nJES3#7$@C>J z{J^^%g~55_g$S~25oxC5ubNhag00cauI57b@@bhQ&`!2 zlhc5g<)2`hmRuih&j=H{B|6rl4n^6buDmB5N7L11uP}1xUmW6Ijt{A~{Kh{n7kn_a zIz>XK9HJQi07UKcS}~a$0L0$n{bZ*J-X+4+N~b)lSj+wy+~!z^O~DXgrK|pfx4-)k z$!>g%kLx4$#@vy9%FWf)d+tX>kiy({nSxz2E~W9Kw~V+}@{ZlQhFu$>9G=Iu_4GfH zj9Qy$zB(S6qalhkOEx2@p4Bo!K69j8&LVk^+YFY274sUb&|2N=d5XzB$mBF1hA(zWF%ivjsyIonue# z5n2~SXYx=z4nAqT%Pr$W9)Sk+6x~U)6g&x_6)#+$5#7U%H zf8QSGyRxkfE;ojPp%zT^^13KLzjv+F*SUwL%8bPmlnYl|T_+n7RG_BU@D*sHx zOe1!ZHjm5XQzY8CUWvaujdEsjFhQ3tr^CVTaGQv$9DMR&r-kL^G1C%v;(~)+_UpnojLfZ|Y&sUOZ&?76=`k@~W$Rw|AtB6)= z-E>eHyjb)-yY~6*_40hB&7T*3A|byp=4@3K;BL;?k=H9M>N{||$#(92%D6cLfHzLj zP|;5Ryx@}tioUC}eCTNosDV`4_kLG#;YWelo^@&4u9sUKcdn(GQdOi$0WrRiB!u+e zfM7nK6BarzWAn@5W(qIv3X_WqSXkOaNq7aQRquGfr1#lBD*4QvNtr&lhzWIpZKJq> zjBg%4+W|E)_#caAV+)$@UZh{6&x9-gHe2(0$feEoN903bJ1A!Y=f2U6`s05xQwbh& zaMe=1-1lP&nbi^x*PjHfGG9vuOu4zK$P~87@J5%)F~J&q*vPL+(+DVGtXxSf)NbsS zz}F=Uu_1=61Hs!OkkA|mAmj?O)&cL_b*8fo5=*42L2DK4LpjmYCd2zT(qIF zG^W%y!Mk8%DV?TMk|AVab)lyNy$o*iP+h-?io}E(1%2-e+6QB>2k&FBs_CDN{DLoi z5|_tu!Uy)za=G_e1OkTD**;5B^2Wuw9(y%RSIUZSsu>_kr#a6wEQ9oCv{*~ zeY>zVEqOy<%UGxq6RlP@u=3M<_1obly64TZIEQMr)z!J5+wuoIk`7paj!obw6F2LM zLWoIa1^x4Pvzd^(qNVy{67!cEC(nlm>J&o_?u&#HpIPOIkYbdvOf-G?1{Q~3D&*>2 zK?onGg*FLlce>Vp6Yv@RLG(T?EN*C;1q=OA#BD%BN+ymLK%bG8E0UpsG{(9TPCF1? z1dKsCfHC97B~DNueGR8W2Ju9W69%%=yo`&Dbu;Z9f!_bsBmV0fJca`xEWNLWYWD48=omi%QA`2J$A6v*3^?o+V*%>q@ z-=b%I>9o6&85volvy3=h-5G?@%p}HO8Z6IN?m66hC&fP*twhFh6#oQz;&*3;($S5O zSthqC;%`asXW%RWbo91q8TVNI*9>rnt?JEL(Rea?1zMLk^wt-{8TE!H%(1jlvz$?3d56f*q@5Rq+=lk@H$hzE3estlP%{7EgrDhVt zSb7RJU*EX%UaLqUmqaX5uKsVE7Z%W_Wi!=i(vk0QZaS^LwYY-JY(r$vb^HZ6XN z*yBZ3)z0?ax{CrL9;fA=!}U?r)UdE!i%cRu2l%c9%vPZu5$=dn95OaH_S16_%tg7z z`k~NLHLCARTe;@~%BI5eJJ1Pa?NQxV3I%fUC}lsCG_7=>`)PPULmW%40CdTJT;(0y z)hiliK%GaRCVf6J47p?ZIumg|7!aQtSSY47=imFylL7hS$NFMZWc#<=5^`WCYFoi- z7*`9xpg5l`Sh7=?HaiS`<)_OG{tAnBh)EY)FRb~Lv5lNQ2KYScz;fI1WX6w{oo)j) znw|)NKXG$4{eB!9V#D<|?U_oOctkJzBDsdSYhG`ii=RcOSXZ@45iQ*sl&VX* z!?|MDa)@~4TE#Ri4@_cX%!FX;ANUoFh}&V z)H(WT$JtE5*|9F=PXu`4PD0)RjTBt%h+zq???nHS3)9DXp^WL?^rqR|;pgb1)TfbqH-JZs}(9Fx>9c1XM+g(CL31VAhCDh{a?gP@HL5wZRa zed?pMoe8*~Bdlz{V9F3MDsR6&=`y)0lGCnqv(eP^30-;E@ZSmkWIkGc=r4s~D|rIa zCOu;z3G_nq4xW9B4QaJ8CyZecaQ@uS<`0ZQx}<*oe(Vo zt$K?Depo_iIkz5-a;;4@-cd>n;ZN_ELJ%gL(*APnX%rjDLUqYN)N6AnoCVxRJb`gm zmGjO=gO-EH<5@Ea+mt+GT`1y?yH*1KOI$izE2^f@-6KX%2n^01iW#B8XTV}3FGx2nUafEBadhs54V2u98N*;KHsZSkx-Y^1U<^fTXB-i#&1Mnz4R?Uxy#vVX&qMy&tFA-wOU%3$x_MlD^fdOYG(?87fNcz~J96QQ?}p;cJ12aq#P@@kcT5H7F| z5s)uPKK_^Wm^CD6m-dl;@cc_Hlg_ds+Yx=*aDCERsVRvem`S+RThkrV$#N=_>2cEP zH2MIT^k)jf=1rK)!fTkbE!7A-E!pO`Us~T~UB!KbuVrkIRBVbKR$Dhlcf|fWuIO(|Clxksx*tu(yK7JaD zy#Y3rkFq~)*g%oL9-UAvW-{xJ>2rS*+_hD=o6*M#sS{o6o7#F@2^0c)b=?`k73}97 z7m3+kdk^3uUs+QSS4)t$@WX~}2D@|Xire}&+s+0`Y=#Drw&Nit)ZRBw6Vq{BC(^gT z;xTZmxTMFpr{z-exJ^eN72=ItxEU9+BbQ--Em~GY4;cCB%%BVlS_%!4U$?p5w`cGk zSZUm__&G%6J$?hLCJ1Tme+i!E?zHrlcH@q&I5on4xnRC;oKwdFv?aOd0w{D-2>UYIvBY z<^IhM$mXKDSQ%6-)u&CadW~u{gn4cE5fg0YfZ5~~grCT$3uWZ&NJftSo_8=0@iufT z=%I}N^cGEu|6B0qSVuh>6H`NWYxX0bQl_47);Vxl*+;NzZ_R)iMQ8#S93!gr_|xz^ zy9IeAu$elh_DM_EQZShdGV0ZLe--;WU7!?&HJ$|BddKwd;O>VE2EV(Y5+(GfBSQ-L z*OvvsNu=%cDPbqapFTTGxPC|4=n;_Fy~z>V1a;BIi8Q|E9&W^UmtmEycH0`3=^KlG zlqLKqVdwa{;2cE*^wfUM^vvqw^!{>p9Yh{4Ji2 z_&Sjg57+N;7Q$H3a&@Z`3TBRJWap-oC5kibY%OeMh4!&y$>fokHO2t8-p)ZPSZgZD zNh$K{0Gpxcm-TfjES+#$vQ6I-q};+o;4-cTk6fK(3;icF96z${^|SVP?-mwrGn`*F zZD}Q6bohLb1Gml6kxit`H_ScwlXyOtUdHAn-rZ%28fRVR&Q1a@mvq$|Buz-k$+=Un z1H0&JR)N` z|7QV+1bIe1IGtZ0K3O>%02TK_pk~h${z>z;qS|A2GiR2!)Gyj5J@>ahQj>Qx!f%K1 zt504f7unkEyAZB;f1frW*=~#N39lSuyi`_-%!6w(+z-{c`B8n4_9jvRnAWgonr`7y zy0X)gui~he;Bb@i{F&36@Xs_)XY-cL`2tI4&y5S^%rlN5wUpop(aeD_?F7SFN>j$e zqqPO!tvSB;BZPg_nLjX|tMstG`u+I5TkVPG7zx4EkEUFr9$?;iPpyyS`h@%2a4PWdzRfvs;oo@~KO70ws)wW6HIq!{T~=MwNa%!2`tE@vWJw+E z%!ew*zo4zuU;AhM1LAyJ%jDf1&HS8X+f5weo~}cfOns{OLnF1E_=S32`6?Jx{znAx z#wJ(rn#G|>=Nx3rQ>tk}XiJ@hDm=pFTdCCKCeDps{FD7n^|~A1Uqu}D>9!kud{bR4 zKN-j!{1GDWVSySO#r`%9LNfK`AQh5u!ko{@sw*`5o_%|>m17rRe*#jPZNGf%YLnsV ziAI55ZFY~gtjwJX*}1s1wsQ|bim7Ir<#AYbM0 z{OyN}7;lzd>1oZzB`aupYwT(R)h6LWQ+h%)_?TSb-sYbI1y4pWZmklx!Sd=16ik;s zHth*|(dec44{B)3h-O~1`f-~2q!o7h=@LV8!a`3rD5PO^?1Xs|k4Bqr7n+y#@hs%q z)Co7t!_vw{A1VifDQN zL0|)1YM3A}ML|yTSvyA+V`usU{VvOZ!J(0&1iOP>t#_3#l$6mDJ&CF{FrhWDrgF5D zsZgnM9i?HL{0aKo-q5D3;n4t9gCqp~9T73;7>Tb-^h)A}qIOPUvDz_;LxE|{woyH7 zf#2=d(cukj)hk*S5tTKqq^TPni<%DMw(Rme*h8NneNUwQ$~MbRmxYDZk@kav(S`<1 z($7RRTB*f=#D4crh!3i2!gcSo)#FKYIttJm2$99`^hXj}EIAD`iJoj*JNOz3>Q7ESbfZfmd4a;GP32m%Z8qF4lltt{W2}gX zlI*&V=_Vxr{S^*G4@Zzt{P`DSIQ_hR!g?`t+Q=2V$r|2jf@aDujS2LE^}vrgr*ech zYD2jE5^_CKw)3k`0;@6W=_%x6^EQc!LlyCyS$nd_p{(Ev^}OqY1-VR$A*>xEPYuoN zU==2EcO9;Xv}geQgw^ylQN1?+TbS^c6TY66$L;&>l002u6{URAM83W}Nc`bf7Tz@X zvBZ4B^`NWiW~7{D(o*xU_0Ytz-2UA&x!M@^f~Y|2%#QhlW=Spp zjP2aRiz5;7*)MuJ=NW#|;58ksY@CAPV*a*_zL}Q>)LD3FmVX#ja%gIHzVAqC!}S4? zW=5(CeAmR^r;|xjW&-p>?j600vb%epXORqYjutF%a8y=(shpRK*KecU*V6^6S42!! zC5NBW6dl(q2vSlE9Ct6eo^V)@|8Q3rFzIzjcM>&e`QP*+qLnQ&A{Rx7`9>;DkG!OG4{=NIsp7DNw#zHox!GWhHZukCUG0N<5*x5dm0 zj+eU)+1raxEb>7DQ^FBi|K7!=Xhclgr3{b?HcWS^Gkyu3s8!?Ul0dmBK$|mnN79K! z^)T%ZwF34lnPxME2FE^e&XScjt$E1`Jq|mYZ8ga^SfF>I0laBKXa#r#wwMq8B4I`# znR;c3w~J*|a{0+W zGzh-@uC1pQvF+nIfx=IV0Li#GsALi>uzuC3TBhle(w6+ zR;a{&)=g~8&qpTL+9aJ?<2)yIheE3mvQURq_ImT{dGjcj-&wVBTp4q+FV=29eJe6m zSIvJsyxuP1pnxYz(?n`WLq7Xs#?5J_lHVww`30V32Ei#8GYz|MBUwQExE&~sPhhiK z^J+)))5C*hXe^|0I`9%bCxi9UK^DXmE_lB)0pHFN@gTr>UXb0LRxvg&n{#UkIsWz)lDU%kQZmho(v&T%NyOE=qWIo7|U zwKiw=lvYOdn`SwnizR(Jki8#{r;NGenv1?7&1wV|nz-5vnV5M+tUPM{)juDTEY`-A zcmdj5;%c~#6$e4ZNyPyfs?h~p`kiR8X~C(WQ4Vau(6OrIk&#w?|H}vmDM2g0LBkXS zlm}W%Uqmi(YcKzb?$(8s_W*7f&*bCf|E>hs(6AbZgL1Q#m_WjdUquCod|Vyc4Rjeg9fmO|bzJKU7B^t%y{MKeQZZZxlf-PCXJ9L&-hlD&RMWI5ej*Ow_z zml7d*c?}R^UAqHr-gGIig9={u3Jq>= zIS=|%U{$=~VTl{l=a~Vg^(_D4a6iVKgV&1L(3-}vrgBdWuGo)XW6+Iv{xE&qEtZZ- zv6Q1@Kcg!nl5p_n_AJeIl0u@^s}oYuHZ6&LzzJMyl4XCe*#g=~Y+_)SB*>}I*^W4{ z-zTdf9?w0Q2G~kUplw_J`7^#gz}MWp92*{Xh*os5^QwZd9(NH2`Y81mVa)$-u#|9!IJ7eR@(gx^$un@ayFUWC z$4)R+O^<48F1Op8W*`T+BI4|5lsCQN3l%`imyJ?plX#{1biNfyUc=$guK-`K?Wm3j zwQ2)PGT&ziq1wZ@*9cjL4F zZ6$Oy=)0wVyuP%kRDcq?*SWkJOo23pxbjbu+qE_!Z*Mn$Z%wt&_=w*jF~6XFZM|M2 z6%myZ^+I^q&&v9k7Qg1a7?R~YnOm`bwwSY+kuwlP%+xcd2e1Ca=wJEKC1$el*eR)% z@7r@5zMrVWoegmUM!nC6<{y_;ldongu?R@ri-gD9ca+O4tO8mU_)Zd_%2*DbifJkW z=>hvOw~f3yvad@g+M5zo;bA@HfGWWQjAGoxKN1Og?cf#CVvi?}#J|$MO()%wKK!#Y zp*oi|I}#!QoX*#l2Og;~f+G5%d5+w}t-Xw@7~=C7;TM@ig)f^q(>om_vS7D6#2;ab zVjlN%VFpsh9uNCIVMUe51~NnT*-?}LB`=-bJhm~asmXmn`h%>t0{#S<;U^kl7zM)u zf)PFm0u%(b8*A*p>KXY3Gf@Kp;^kU!%VEjUULV7U8XaHyisHzcwFw(fC^E zg<^y0cbTcNL0nqqpGst4#<}Aeu6idI%)QwF@4^dB)f5flLH~!be^M<6ztn$AKIK1hk$QiRJiuFHGAsWdKWP; z$V+I~cG{RnzKDhe86^@UK|(Ass!S}lqwE@YPu}l4=VYdTWG2%|_P(FB*S*$tUB8!H8ZE}X z0yKqsAG@{2a?WF;;T0t`!@R)-fzC8qWf+XorN|By%bq2#WoVMUMHA%^2x^+IVkF4( zVj^u0^WEWx_-asV%WShRR4w$1s$?TXdiq^e+$@7o822^-_ufmS2U1^%SXAMsX!}36 z?aBEelXX?Qf4yl}qA_|SFHVk>KBPKsju48XZqWiZC?P909e7X(_`<-~xxR5QRjll$ z-(lWj!?wI$Dt5MB$rUuB0bAW_-*7IxJ@W6gl#J{o`?L#b_XycJ9?o*|^ed-`i_--5 zat9s<8{pd}^jPwZnhGPM&A}|Zpfye?jeT$504;nWIbi~n7Dbhr^OXjhg$H19G*fa# zuawOeEz(IWSB8XWwAG!qgwQ8Ja!-+y|K@OQKt*o3MX7wM!HlJ1Nyp|C`lAba5Vy>+ z52K^BC;t#t(_RL{D7!crbGM<3>2FrcZfg0WJ>eG}Nvt^dA*o7K%{KEZQO5YIKy$@~ z@bh0yC3m_v<%XGUhLBd>Den4Q~G!3xqjs*{`**)+a^O!K%8K@lZXnlM1AH(VA zAlYV~%oDgwUxA!io zNG31TO&W`*G+^kzC4Xd!iw?*B1{XM&4J0f$pi1NbHC?wP2NQO<%uf)@ML{RFH^?a$ z9+XCO<@IorG)CwYsm#`xO^SGvuT{sVZ7?JiZF03Iy2_Zmw3Ca%vf=Xk=9eC+IKR+v zhBf3RjzV%HA zyO;eNIW@pqYSo%%|t0bON4sFh$dHfb% z_?pL=NEx?)L2u%F|MGalDhSBULoucTs-aaO%vr$b2E?maORIvh*C;G3w>T*Y&J&%D*#qzY+DSGt$enWjCZa zG#?kL5&fr!a*Lv3NZ4w%oq*uwA3ZsoWx#J<61*q~j}sZuUyXLB#sr4%1r8&U!e@zn zu7|#6i5-riOj(A8uv6~#RA>i1JZ4T&2W?*bE^v3DTYpyuT!hwsTu>QBz$vG3ISB$~ zPj~h?%%=)z=ur+iTZNFsj_9}nb9C<0Kn06eyu1+)sdYrXQ!V|-jw_$qboxTZM4jQw za-((0DWjw3^Z1pSWluzO6PXT)n;)j)*)!X=k1IO6axfp5qMQ7e~rW(xRD=1E}yXf5>D zr`w)Fc@n$x#Hz-V@fL=gzLLof2b9*kF=WdXyc_^2z=_h4ebVhIet6U1xklG|zeAFMdN0HWx_V@Sxz zC?6{H4iFW%aF^Lox{z{ud0hW8V5o6MC+Qd1^JIF0*R}G>k4Sk}7s{;+NO`Y6eV#=g z`BV4mIlCdOtHzp|L2n)mAxUd1nK)O%w!I#x?cdUY+d}-h=|9jwQGJ zeEm0Z61IQi*@l!l(|UZR7Q5B5A3)AA=nXSU8YFd03ImjV?rKGfsKdPzOTPsFDNJgWZ&1UEFy!5eQ{?>32Q9OvL>?2Fx-NQKOL0B>oPp*n^(XA% z8oG(wY?su}gw33b1bxR2wxGWb`4L0-oasZxPh!@&_RV@N*SKunl_}1+owfXInbP3= zAnYkDBI4j}cD1Ga@oy#A80d%I@2@UlSj_nlAbc!3`A6w+5z%SU*~rww33#1+RY8g! zm6t6DAh-J`>9%ztfilL5^C_B{HaTSK68N+8MPKxu#Z;LCj<6^zvJzL+>bIUchZ8y) z^;T@WWmd6gnX)R$^NQ9h#s?T7*82qhsWK^zSs1wf2wLu{?Tv6~_^-kf=VzM~p!obK z=K8qGS_%k+!WCpYk9}g2M?#+0&f+)c2{DTyL-Hm%<|DDoRhBVvMNso%fF}} zCwOjPKf)aU24^Q@uOywXb6fu~ZDa6*^4Q55dCvSf^Qo1~iV~e}gGvn6kSb=>ALih0 zGAQq%NBcrN@Qlm@b`e~Gm_t~{RDj@+qUl&0HO#TThPH=1pfXol^V6~_B}k;N(tg81@9 zLZi><^cvJ+9ERKXI`4vt1vh_VQ$w;0Z2GODoki7~7khNIZcy`cBcp${6d>c!)FP}g`K|7?3Qhp}~#G~-CnjYH4t-{&(iaOptb z$%-Osa6E89Sgj_-Vu?x#QU()EcR@N12pIbM7OePrMEnoMnnxX7=}#xS zm-t5#dGY+O?7D?v?9 znQE=>%F5qoCYI|g3Z5l_2>vXU;=3dfzrsTCzQpODK*3_#N`-H{_!FU8XGXC8Y7L`Y zxj0d;43!_Go+B<>^R_oqGly^tUqK zr!6PDie4C)a$ni}w_oLWDSD%WZ;j9MQhov{Ss=9B)jZl7bU%V3N2~~DmYa0i>DO(J zfP$U%@A?}}4hP%9HF`i#{~1G|b5HSkkuG(&lpE=HtKXO4U)zXadEqYl=jG46<)Y~Z zKBq{fOL0Ld?n7CX!=<*$d{b2__T`b`NZkUfscIWJkqpv?uF5o8x!#& z3U8l3FSd9ToMJTLEv$N0Sem1kC~SK_+Ht8j9R0fA@ceus^N4Tqk09)w?+O`+Vhbjh zPRTM#aaQHH!1xGd#Dp5iYNcpsDeRFLhVjMTptDZ*-`?Rt&F@f-Wf3a}adbey*Wx2!@{sa80BYgkD6h#Zn%to=9 z*30dM^-HZQ_ddyucI#wgcY?4!r%ksOU7{5F~;d{QowLaY?BnQZ^%mq%r>G4PL z{U}}3anz+Jru97mD4k9|*x?(lZf-@*^;iR;;@$1_A6-4iM^~RTlpD|>l@xN32#qUo zxn9*B|GqnMPpY4~gDCX&HFSUj)*$t!?TVYVJk`-cudg56be&Kf+ChhCy;0(EcNrxI z#942+`4y`6$?8WPgv(-j1(%tzw5l9Zf^Xk=8ekexi9u59Po&`IK2tDht zFuTEKf)4a}=fV;ZCOG>0^|-@rhbz_Tjq;^;GSFs1(9_`t%2AKQ%h zb>P?_6O_bK3Oj6f2;6gA)b!w^Qm=67X30@YQBk%!s|ET8kuMcMbE-Dk8=V*1h>Ajf z0HRrJ&KdSMR}tJMSX`~hOn|)v?w>Oc1cR<&-b6+lJRUt z-SqCdf*VIwM!$LgJ7nzbr!dGsTwb}y>u+x~9He7ST`FqF{kp1*S@=n$_lkD!y-kV1 zkd?`;hvs6n3FU01L8VR_TUxipdZQh$Kdjav1<^pVAmE=FImjO3ZOs$0gMKCaONldy z;&N4zXDHH^6QVZzC%*MnmG5Unu$5*?StEGpd%3UCF*8*9cj)t@3C(FV0SX_{b}K2vtjRX~hXxsYw@H+u38C|JKHk1x`9T*fN9`pl0@`;A8#n2^sQQeh|sd&5uLqLPGkQ@mP`TCF315Pqo*q_$ zn7}6N{o!4+7p+J0<7T}9qFPf$Svb~7p6aU!4VUQyyFh*03zCU!m5rLqQv;V0cWuQ` zWo(v4>p}+gSHw1YB(c0So?d%`w7^Z9FmW0TDesB4Cn9XtqEIb+QF%5jc02yDhpz|W z9jHjs*t!ejZG|YPpeLj;_4=s?@>Iz2n?UMz%|06}q%gQ@Y<~5148=em`H|Nt#M04> z*MGnjuMd!!Ml5K1z^Q23`QOP3d1l!4V+DnTOfx0Aj(=ftnzRv`wpjnJ>SOR+V6YRR zYPsfsX|Y?mx)j<#B!o$pTn&GD>(@}|64jZrY*1Wl@&|DxFlHKwP}pflj-b{G^A}PZ zi6WKY4sQRR3gpeWwybDpt%0E6ALKVnsdHwSZ{L0flMIj|L<#OXS*jpEPCv>IY z?2x7_xm;4w9t!dE5jpvI|NYLRMueEhnY{cE7OnlHTE&5u*jSb%lVgM3%Jv7n`HTnm zr;5Y1Hh^%^1dEZ*F1*v*-*3LE!kx)m=>@J*$seXQ0+hWoF}d|d!Aw_oiz*9nbLkf5L-A|__gtSX*Att9Tu-US;QLCsGkdR+2D}Km2Ro>8i8Dj zeGOtzY?*t%_~M`#?R2Asuv3SDKOi;1*jABdUB7QT`}qG2z}riDUA=1@5jtqD;5k zr`J2#ZvM|`zjlf03&Cs$lyb!yu}60E_R@gOT)pRY(>>A>DcD8F8{x5--&R?|MNeSH zc&Cj6#?JA`!1nP-KQYl2biL(D_-~WjZf8dED*00IySP_HfA~%jrC+cym=~P29`Jv# z;V@1zm{c(~RmH|#QZWEdfqPuKeFi~V64bXK^ETR#zD_7J9u}GmhpdG16PN4Aj|bon z3QgJp@!Zb-q~p%4Du3ZbSmg0AI+-uq3P!u{=f4zjWPM3=0>S{#iP5{FBZphh(0tO zYDMdnyyz9pFGn0aC){sIzgtF%q!b*#lqyG&rt zLDzeX?r@BG{nbS>Nm|W`KAJ{*WK`5FeZO81syKe6dO%oj;GsV9Ehz)FJS% z{q@0Me8RjjTco7^kl(eju;n4>w!=xa{hy5=yJ^t6*E4m`$zM>K>W>JpRUwh>o3$e; zyPHEcQ);N&WUl=(DT$2GW2pUSqh=Og=@A2MmErAT)boQODHtTJe~($s8Bq)Q2y?w#h4G+xSo^yra<2R7r0zPGeED%6f!CiQ$rqP@F(IU~ISP*aH5|S*ClOa~iET zW`iWqwB-G2A&ZTDMso+b6yT!g6G0i<+-y)V^&-_fa)MCZ$V7E6Xc`WmY>OFFZ7P1^A>(HVd^)ttGiOopkV2c{!N&9 z-syODist47>K{otupv3f$gGXEUT<}4x+f27|9-$9F)@s&D;%^k_S&z5s;d0frB@UeNKlNnnZu{Iw^Jc!uI}!Dm0_@9F&hvM z3CAPmd(0WDJT;9j-F2OA{)aTyLU4s+A?E%?{x*s_67ZzdVRK~?oaigJ6~UuZnd^`x zP0aWQ6O${}kO&O@Q|OB{GYQdgZ{Fb}6qR2CM{(yv=TPioXQWW?ecSOMVRC_ zeCX$?MMO9{oZ0lGe&)w4&ru=`w4ki49o0##CSROfkI*kLCQfT{>6@?bl@)ez=SuKH zu(ly_EAvFoK6p`;A;rXTj)oPAU9Zp{$+kbPa*W?|9#yKy*~ifahSzv5OZ}tTv6XS9 zY|8I4Ie1=m3{@kS%tQq*LLrkk3x3WkrzaxZwueI5`$R?0!v$$i6A~GzLVjG{at<~Z zir0nr`jjg^-vca`Y|S7vts_x2M%kDK2gs?G_Ri z#v^dNtLZbJ6qA!gC>1KO&T>8CJ74#ZBOE3Q*8aGU(`J&+W9YD0sW&t$z>zN4Uge~^GiDyGOcDQf9qQ3t9!mT852~@VKts`DZ&$%#5dX`NX>Y2K6^UIOJs)6u~dA-sWquUmI`( zYIKEHnsoT~pzMqd{jCUaHCmQa*Wl4SF%d}z`tD?{(7!?Bwl~>(vP_B*ZN?D-^cRQQ zK^KEPkLi4F8F*Qvn{LY;{k#p~D)H z!?|RKCAN1PPg{(6JONTY%PJ7^rgO{u`qcaK-=kzpMYw5S>=nuPR48d_dK9{^WO4lI zK-0RID*BR;NK&i^DHYaCdV)hH(a|`Q{VmRItO9mbgVkglckQ6N+7mDbN^(pw5vDot z&gF!>CED9oV6nFzIFU+zzhyoS^?dIHzu#%K!_b|Ir*VA*p7e`&b7GA@^WtK#rcUPT z{5X|HJyc<53!tZ;T_p=5O3|;s&t3o!;U;iP9fjjFog`cZQWv{}oD~hoR}DVs8k9BW zuyBcYgW-lY<~(4h2)G%pe2S6yyoz9M&u0xKB62J1XbY{7D?s*>W*uhO7WxeSK4IDiy zKY2_0fZ4!r;`!%~BnGxD0}QAU25)AnI>08X(+Sa*Q#1t1S;0Rj3^iDgIe0dg+yOuF z{qcZ+b@zu-JK)N}0dUGwZL*~ZC&rpi>UHdU+j9)gENwD%LfG&@VPT3+!jj|<>9Hix`q*bSJE$j_=#ghio){6ote~J^yKGiaB#(OM57_qU zX-`yc5E(WwFepv^qZG8#QH&HLsk8YPcw(vYd-qea)fvdXulAVtGtSTfN}1HG!C{bK zcRSg&Mo7+)eda@zE7IX^;rl*8)AG}NF|9{{1yt7s;zB;ETFWi35N{VT z2X-JsPl2j6A(jX{K4u(Ia+SFUjTc#A)j{}Hn2FqR{8lAW@~!7ZV)gm?c?Yl`D-&<% zjt0k~6LJ2=zJIxy(AzC^V6?HS2KBaM&K96C^|jh-2ggeI9^vqSu>G~L=Eap*(|Hz< zrBFZ_dz3Z$PSx z@6niu#}w~u{JTggLfs%a?EYZjp4x|suRCmjGRmD$Xv$jd_gHUgp6+4ML=m-NRJHu0 zUFtx9_F1)=;b*WurRKPA@$YfzJK-h|s9{wqfCzk*QN=LP0CSJk$_7)vmC#Icex`$)4gyPBPXUIUIN|V2aRB{2svh{&M5Q zkGVm+)C_rL-)N#e@ZdxLfES8BuJm)iM>GN@)ZLpxx_$hW6@%W)!i5nkHPunL^ug^g zD|b~TEx zsNd{{ym!0)4txH*%LpzK(ZVG&7^L5yw5)x3SwzucE?+l|LF}>_ODco?4R2`L6#gpf z3WrHv^I^wZhKt*nlcCy&7Oa2^49(BS~bb=%3qo$We#_rtZLQl zBfD||?8YTVg4$+^J&lAuvZTXA<>N7oC!G#S$LyAV?Aq>MGuqz+7zB<42j#LIQ=1sA zWSRGPRQxAu#YG(U&CH5iU-KyN_Q&=<$skSS%Duh?or)F)Vs%$UB z2WOaJNkkT;YVG|U4>5Cc8!%xqdfcWll8UX#?Opfdn(tuYM<#T%+)3kh`E#%|Da=2| zIV7Cqo%wrLR{Crq|K-k|2`t}+fX`F4BkjvK^g_Gf%}re^AMcZSC44aBmpnA-WWfS5 zh4l9Uo2gdnMi!fp)6_8vpLM zD(KW}=*C@8JpFG~xRm5Q7Vzt*+mqWL8pZK;I;bi(Zjh)@4qPfkT&f5%KT0uRj;jm? zsCot+3L7Pd!w&>h0hFs+^fY0cbULFQ_L+;zt_Kmy2N^G(JiC5m2=);#YIv*58g2k@=a{zUl5_+-Uio&#@++T%?K+#3lhPvx=jb`+wx2ZhD` z^;yZRaM)LAk9^-MYG6@uY0R%H5J|9l1NpbRe4Tr`yA`9~y1&sc@_PBZz&_6t+e#8| z-W77F6mh~Zm%A%e3l+f!b2cA~SK4J7H$7-H+7dOpTtwLI^dB`zvDA#+JMKx?n*N#F zML4lK7I)%jiYTSNpST2JA#akyR-o;H@C{R4D~Xe$09JzDRJI7m^Jz`oWzHUFGGabS zR+qgwkJ~=0#JHJyBOl~h%A9WlZY8zA?3Jws6E8vgE0gCdc%{J#`KsG1gx5m8mI-Gr z+b3Xzcr{)!u|064qrXMaK}kO5fAnNE54-AdvuCID(0duL~`XN%v4*w3C53SKV@};j#3o1Wi1!$M|%)W%a=3o zXszFoKQq@~r7J`}pSQ?%&^joiK8v_<<(rUZ*88A^(_HD}*lmUM`!t$_1UB7YZ0KNI zq^?vP&T3No6>&3ePZ0yaFcqct1RZZR1T^-Y4vV?v`Th@eC<+*pZKGezXPJ|$UKClw z4ci{LrRwhdiT(~QrRgRR|JwtJ+1%+nkb^aXK1WZRkN9P-s1b+|{;8u)A)8+68kDSM z-9GNR=N{h^E}=pW8jCqY#KKCVeTM$0rU+jnf}O{i#KdJ{v38aInChL^7CAx+}%b|qNhv}^0lol$|i2rHy)>$}k=>T0p@7U7I{&8Cyw>o+PVT`g4#uAgED?g_i?rB_nX z32+*>&>Ri-Kz2x7##kz64d{zO@;EVxHAWeQg1VP{ahJCEJ0e1Y&*i3+hM|hA7%@L#5HwUiL_e<3u^HYXNsCR_ zp`&r6fOC<)%U(tzStJjWW=D*`D_0c5cr-C9@br39w zNoMq`IVZb_HFxX%x`#Ph6jfZexIm(vmcYSV8Srii)fi!2bM0W@Z-)kP;Ri)XjN*{q zKaPGDLQ<#44Nn#>+UUQvKy0(&Z%@6(Cr+so6c^|93-CW_F_L;iX632+9v4G#VW!#r z@7p>`1qFU1jC79r@UXq9^=hBbSOTxYuY?8z3fIVWlOUE+otZNScOMpuDfL_ny(bze z1XSU)%{vJ^Y{~y#(E${&xXmo!75y6rrw)h6a3&pUTkACUMz+}yoZvFI#l>EOY1O0a^bYt!uAACq&TI~&(V){ftaC96!YnEh*lOj@2j3z;`fp;O$pA}A0W{@BB8|WrUfdwb zMT9i!!U8)hs|$lmJ#9x75eFxlAb7M)LQ&i4z2vJ04_6@T+qLKjM_iCIPKC?VZBoGO6ILy>=X=hJj(0RW% zS5HoVjspCoSc&^T3L%k!9)qTCEku;O-}b^>TIzuY2fu}T$;fk@$A`&0vP$zT7?K(9 zum258mnfY$7g9f$H{TxmN}s4mwfQ{`p^o@$V+*q=cQdfEe!0bAi3iW$*-Q8#qp?vC z%G%-gz!AoEp>MfXE907F*) z181`=N}isyn_5!2J*uEl1Abw3n95~$oYrL}9a5J!3g#rb`1eg*q&?0j(5Pi^e4m)w zX-D%e(0s`x&G4YYVKL2w{M`5;FmN&7yiKB;NXb;l5=>Vd-3TN_TxGn!h=tSIf_5ip zI#K!ej`5;z8E-y? z=8GW$Mm)U^d@|dC?s)Jh{QQ7CGYU8R({wc|Lbn%iHDvwa)@Yiym<9I)rFs;pE^PId>XF<8u95&8^9={| z?*7&NjuXXTKo<4eujk95=ZYh4?8?PP=eTwHH;>=gX|C_#82{YPj`tM!+U8!At(F7J zeq6a?*gJEzk8*XWXE$0!R%c_$(N zr4#|!^4aH#MdG{XvBVvRLw=j-D7JI5ochp5Q;7mIcgzT*b1FJA`gxd~3; zh|e;}E7sppdXzxcP6;mUi2x+d-*4F7#)3B|r4C28m-MqzIce^l&zIqR#QX9>FKnu5 z-?AM|jdIR?m8g}M9(=%vi-c+L?D1~B?SWk7D8~SMF^Y^4&xr^kHQQL%BKG>)1F2{F z=Z_E~*>mj63z+chMWJNjb}!506YG07Js~3CxyGpD4_ta?i4qqHt>g2#s@NbD3?Afd zaUT5KW^IZ&C0w?J7s4BK8(p4}!LKg4+S&PuF$bT7Dn#Nb9kV&~Rx&<{pgBH{Y-B6C z`lYwEt8qWsBDS+YX%JW_pa})QPBHt0`HxladQc6pCjR{Pt2kPJ5FD5X zbQwhQ2FRpwQLvMHSnW^*)H)a zh|IQ3Yo`b#fwLxk$A6L}4*_p6%KmBtSv-qWP_ES?Sc+4u2hWtVswwks&K!KeK!%qb zm}NKUT)NtbVRCCp88#tr^k>-_aa-sx;t~=*j&sY(u>y5OR+OSv8}kVc)hs5c(yu`Q zGW5dJf606VnL2i)ujoH+|* zN{5Rfzl77og(}i*|w?8fR8MFfLqTs*Aw6O zpnDzFc$bsdMN8z;!85p z^|pT`kCP)~6TnIBOhkSjB?=Knfq(TiRBQ%UhtR1 z#8cY=gpK)Lm@pM>yWOc#2bHYJbdEcH+1>5Q0=i&BKj-_(>%iGpD#fykpcK zeoZq{6dRAlKk8}D0$zwL=8GCDX=iP)f@APw+z!+_+!>g}pUqf)WDqB>oD%r4P*8`Cy$Dm0t}Gh;+*(NG^RN}ztN`TlRdlbc zaJyvEQ>A6G<6AaFLs&hCNznMpjPLij!)U$__?f-mrDRv_7sUSPKdY4 zT*}%098h1@ODaCqiwqd7b}H*PO%SPZbJpHgL303I32sG(_Y>uL$GbP6NYS`wn64m( zRzN;l^%WWFVxHj1p)#g{yU8*7fepJEDTc*>d(Sy-w^~_#3Y`d~b*;L9Jbm6COEKr-Mgj#&wz3YkxImq#)Ivd@5@+j5TSsp+G^=xhml0#n8ECw>)B4+hJ!WXgW zwp#`2WVsF+kM;hy$|w+SMtkIr>S}!D87BK0>sj4GECW0O!b1Bn<8Nfh6Dp|B%=nQhesXI zrqO`)33M7^6>TfF6L;H2Z$Ze$ffb|J>ckSU(4Z)>^#&7M?Ja4r1j?Wh;Adl^qbpk_ zeq!dlw_NySaNoDHlhNC}n24{hz(eR#@pDP;yT+-(AA+Pp-@pjn>R2mK1@u45i$xp# zVeNWcOG_1zPQ*-r!h$i*d#|Zu=W5_pOp_<~m_Vu0odf*k zokk=r#g4<65rrYw)dqSM_6qG zbcJ!4<%Lyah^MjSo`78;S!JDs7PG&3SgaGvWrlr_)GjKNKRvF|WCH(mtp#Jl`?)N+ zlszvD*_V_=G;C66VMs2jYCFiTRuWcS zP2;GYu21!(`$v38Ion}|O97t|=`7wLZvr_tZVHu^3E@Ty*fXnjFe4AKF#WityD876 zGVXUkK5{%A`Bzi^M5vpH$~=BK+$Ql-XS0FNvzg%Gx3jY|w@p>H2n^o}=UU-#*mU=X zm8pu z*>9dHfUuW8(rn&-Ys=YaOL`)JuynOBj?#5~gXNKPBi|+bT32;<_k8tr1(+{z#F+;id=5@Xm zq|fnH%w(p7stg!SDBj{tw$h!6?+L(S3Yss|*cqm-P~?57F3|L*#QgIIcEoQB4-x4& zaHvLQC+rT1BgcdJ;n|hw#xx!oUXb`}lq#Rww=?STDtlR__S{)V8qIpRZ}aDUxiu*U zx)tApDrpjQDKj*!sLBI=J@ywWi1>n?PVgkgMMK_{S08I)B5A1^RX(B;e6CknXWPM* zFez_6z{NsE?Oh?pdrq|@w{oa+C>7|}1_Ug>hebR0Fe%|n$HA?QKAT_XeFYEpC?=U) z`rNLs-nl7x3y%-~xbgmm(ed_FQjDVnipZ0Sd;Rkd94kSfEeCOl0s7*-68*%vUtjfN z-xLdruGfz{BG{2i3dVn$Sux{GCow?Qnrt)A&}t=Z<=_w=@(OQ2Zl9Z#ycu`u@i;bv}TCAW;Uo~!k+ z#{y*I`tlDS@2wbcnu=O|9YolDiD;C8*~7eV;mw%9#)FFETY>o#q>ef~3CS(+IbBG` zzoVmNi-i3HFA0N*2|;Te%7l8((@i=t_5J$D|y!0G^cT z!2!j3Od8%-Qf934T+8mkVUO;VKQ*kXfVE$Zmw4+ydQOZZe7 z{rVJ7U3AAAqenN*y>Jm*1p+KTBdPX=569 z;(H!$)<0IU57lK&SBYV>I5w@nXJojg=R{J2yRIji94P??aMR!L1uxPhpP zFg$MMo@6_^jiC9N|NlFF{}2Kxk)w#*RyQ_s@JgozUS7Ax9M_8t%ssPRAWv3^|MW%ifI*ojnp<&!Xgvxnw+17YAL>eUc z>^<`$1CPt`3s_3%%@)dkzspU8& zVq_C;9t##seZ$|PRBv`3XAbiMtG?^3? z7HdkHNaj9ar%m!V5(x>3Mqr*i$F%{_P?=pr0~looVuv)-NOo@>yxHzsuMdb!u&tJ=MJ6J!0LB9O4WP36 zb5G!TDPUdgwv%)K^g(b{RaMyI4|}4;n>g6do5x%ThnO_Z;r+`2&_dxPC#o!`A;{ z>YL;8Z2SLLZE4xIYvIaX%eJ*_+s2h`8_QU3*{-dnW!v8G?0)X=^Lzde=j%F;>o|_j zn?Q#8oEe_xde0AI%VC|FKtkLM-qRK+w^bMHH$&G`ZcAxon60Bp#%|03%>XX**LQDf zMIbrB-)z*_*qBCG)HK3E!6x<;c>2Lad4ci=B{Feo{l*MV$8CwGcZ2lljib)ce}Q|h zMe~Y?-P`Sd{qqk78vD9!fNNAq7GSKHRvmOdpb2@g`gcdif&lU~c5uo^<0NnceE_Yj z`9ejq?X+ns^K0^NeywKf^55~AWDd@NVD^xeQgm%?&65#kU*a{k{qbXpGLbzj{nTa6GHmEg!AE-ph15$*7L3*E# zQ~?X9O)OXl(ou3~g~_7C2=?9CJ!Uqs@4z!N3_L6>XEI%n_3*C>tCzs2J7MyUzEEC8 zh}|XyE8q(%b3&QTa&v$_dZZY-0pHnR?M!5eNi;^*e1c^$ckpNrs z*U3*6bYE7t0mvMXruBxjT6z_VMJo}wb0DmC`8-`qxd@Vr@&b(w7XUsINIwq%(0RoI zZNOeAk0_^b+22q!%#eQnmg;dDviVBW2HbRz-(0R;Pp7=euH~iAZ*FVn4;}SOecM*m zz^V81MSu4M|gCxLjfvpFC3>{$kKl%s)1a&Kx*ux~xzgxxIZY~w;-?XIu-qm>Y zlAJ4QI}Dm|U!oD`Kmk=!ZTJX~>XeTXE!+(sDn>5ycf|`<-f9!Xm;> zl@6WXsK%F@Z9bm*z#}4lP;Y>OgiLBJSfYrwUhzDtYU7OZm>%v2RJ1iW zn`^WnmLywtn9cS(E<C# z>8r`$rZ@ibZIk=w$;14mK}r<2(M2WBwOocQgr~H?TTf_pOirEb_!UNXFFY7ltz0T+ z`K}j5l7U(_`zrNU-8dUf&s7RS;;_3h7h~&of+4yVn@&7-A=W1?e?c>K%%~&_Y^hlq z8vspPhlf4q-MQ;O`x)in(arRD#os-luIKULtm~<|f>b&ASBKLF25Z89@0kDyumIbV zWeWp_M?=}!0QQICL&s2egN%;S$k^Clm86R|`S7pjd^Byitfpn~L~u09dT6p8oPv9s zGgBx=p)H#W4>=pMI(wCHL-a>eZXMLO9T~Ng530BPM`CV(R%vu{X&10|Gry-1+U2(6 zTl4>uX;5%KF_V54cW0bO1qNV30giLM^daD4r}~{$CYUupnUi>r zc{kgMaJ&=FtarN;?^~3gfky^&vIF~0i6y$O!HfTyhOJ(9ANL=1v2+_EtbksW^*cFo zR5pUXpc0E-@0At@9=W>iOy6-ffv02Ex9f%H*YDi+-)kFBc>nj6#=L!Zg8fD66g^nq zyGZ7-$h=oIFn15wqnx3ELau(9@_Q{bV@=0gp9S3I^%z(w9{5nGAt9B+ZqStxuOrJ4S2~L zO~3T!h{4!s>})^2hMy;t=<97{7gMR~rd(Didlb)AsxviF;h>`zhH}97g~^~JDxX_3 z<4xtkWBP>*(AGFEIRl89sQ(sPEEcm_>rCFqA@Wyq{zNY_rj~Wc4uHz@q+y?Hl2LJ( zY1;VzD-HDaOrXkUpgACsOsKbzt)Cf;TmrOZ1-o7EE2@@ntkG1ziv@1(pT2Ulf+&N! zyuYND*zP1c-N=UwvYPkAVSS7=jF9n)B^K1{^94kwAyRb}ekV3?x~LE_W-4buyZyo; z?t-DfIZfbO{>1C^gQa(9BvcC6aX|0uk@3F=+ro#panL*^3uY3aU@&`ndPd(RLtCSf zP-y(aQf1Q8(n{i%|I+uKt^W(uXw9RM7RwG;&`Z0k$t^GH zNX5Q;V!?L8V$jGFCwNn_ajrU}11EEl7nSU2GZQ`qa4G-4DG4*^ZKY6v${siO_DY|0 z%MR7G#1G~Mu=T&q2;Q$lA>e{UqPeGoWUvU;!>cNNVdKJ!3R#e<$Cn$5D)jMxqa`B+ zG+F)H1X9I+b!Mm>Z1pVZuobYrS!v5P6JO$SQyIkm=PXhof@E|f zvGe$%{@qqLnQce>+kB0_ego*em&dNv9G&2MJPvCOU-0jwe@Cy+vz zoDmai|9%2;fP#`-Rbp;WybPxxq-5&ATq|?({wU~0c?TA7%vs_d0z#D?le!PJuu0UA z1gSIu^+s|Y1(tLFe^44ht&LJ4O~Vu`i%UEB-9QZ>OQp0JSj}+^$CdD3z5*M3255N% zIG5;2zYgK4Tgqx<|GO=ejlgP%RU}mnVHyzKw^=S!D3yFIRHcA4E&{Oh4|`6obRfb1 z`MUn|9r;ln;%`zU^?OYRQ{bd=uGac@{lC8jlobvF?S)Pg^UJEw5hBYaZ#>Pv)11HP z`n>QDkW#U^lhM}#U`o*d6k+ab7`g-H@rwMizADO<2ZBF^{Y1Wb6JAjFC1qqpyp88h zdy^Rf7B!a>6D6kq`^SLsfQ}p(4?vAw-QC@T98X1~+3_G5bW2HrOLVII!M`%P;RHwO zF_Y`vCSzFr0|Pr5;fzBJ)=K~L{{Hi1qU%Xq;uRgsOJ(4TxHaHXLI}9JzW_I|F;09C zu5ngBA#X3*}RC7$}&_=m&?WmJ`(a7kNs5~RJ5jG zsb`A%DGU<9bP>?s`%QgLZzs7+cIbcB`+q)rV6?f0K-*QpE)=mmUaY&nni6yXY%rAu zP=$JG5ytXSWFE#P1EjWl-W10V5b3nq8>g6%Zm~SQN%I}1W0R9#)Wje`b6e}DtFSI) zVkAWte(sMUd-IvI<@GFKy9%?E(kbiB%eFz>FyHuaX`5@}z0;rNths>67qA+kK~DbR zI~us?ABQN(@#feH^sxIs0P`Os5_>FJjj7EHK)tO1P62nIc}by0uM?DV%Ejv7KBdo9 zd;51?Z5c6NHndmJI53NFD`v^owl$#wWUAIW69U2Q%JA{`rF6`nphaE5t@BA*SSu&` zL$T~nCZy;t9j}z12*5aK%)n6Uz8(N`4v1A!*uZ3cnS5!nF2MWwQ6BJFC`xfHB&j~? zqp3AFJ4#bi<3H=DI_RI3mts15JXac9QVXmOM5KZ zMAuq+ehB$a`_rV~^g8i;eSP<5i$C)5@i|j2U58zcPL&t|pPmXoif%2 zkLd95={nzm2uw>%Hga+(*$lRsy0ER!K-gAK(2t2U#VoDGi$X4%-^~9ky}U=#uhFte z^|Isr*cceWZz%$Q033i;c@#{A&at|o`!NLEhAi987wtJAhfN#FTOVs`Uzo(Kq{-b) zP!i!FcR%x|gH;`U@L?6~`@5ioX0w3LLK5d)HALj&>Nm}X=_V`n;Ly+%MpE!Vql~9auw|C@86mH$ zc^eco!Zd5Yp4SO`T=*vM-g)ulqF5s=M!-9-2e{mh!ewmKu`b3>kk8FVMn*SYdo8`> zOff|K@OE}~47zO?Z%HC|NphV4eCa6xOk~UKyfOHbTVH(d2G0Pe!C2F)mV&12V&Iy3 zSNE;hNOOrEJuypW;oGq1`L-V+*o=as(f5ITl^8(JtVZ3dgt|}^W!x(#zeJ6eAwFGu zvkD7q1wI~krp*otcayN9sblGWhCh9dxtpAP$iBc9r1kq$@p1HqV#w{(_OUBPTi}d6 zaD>|K5y@{;QM!nuzHTf&roA0R6qS*k4*e%?(0u-th(PBKRtd=tRUa(QC$%oaJAM2B z==Fku@BtVB0RgFg=D!I%pBj|;3VW{}k7|qnI695bqn_%mVTO!{r>$sY`5Y_2ko-rN zkBo0<_|F=d*IF;218NMvdJ%WgKZNYFPOJSTK!WatMuNdJULcL`zpuPvF$7>hj%)CSFt7cWjctHZ`gJJslh?_ynQ-{j8^z zeDX_f*J@mH3L{@KvtX}?Ns-xlc5K%|r|d6%4+uVf{_PwWQz@iW{YUB!bVI|9TMGFwZj1NhJ{PBLof?hijb4z-!8tr`m{JpaXg}+k!#vQoZ535@Pe@E$_%RHKS!()~ zdT5t8u>ulEf7m4kXFpWCv*+ULn;4sJn5P8QZSrzHsPXf!C@j-vjm$(rQGMF7tDzKM z&vKfCz?OSsmG}UU!TwAkOp9QO4Au}LW3vK}?DwxdkZs-}?A1#mlsq=3;#4GQW*BroG6xeJ8& zb$qT8_gC^MAFt0oa8?wCA4VvcJJ#Cp*tR}-G+M90ZVe#`1r5W_W9>?(3^(a~Z3S&} z4_Q3+TGx#a%?6_;fyOiMkf9N}f4=Y8nvxT{-X&J(R`e+kB4?!Do2Du?dWyh~$7q0f zy;+i+p#{?u37b%esT{TM7er21sM1eN-S3&|4I_?Co;O~bsil0kU9AwA0-!k-%Xu1; zg0%Oz3a8-q^}vrPXlP0j6}QQ6BHMFi{>jhW0*g_FL$$0t5Kl%i>k8f=L$ulWG*1kR zQJDj3c)Qpo*mDPQ^E~V?LEefrw(kn?eg?#*CT^>vQ={s1)GsOwp{}P%iH+zEPH5Y8 zVqWb{sk5IZB_-+5s_N}h^79h{dtpED{SE_gyYgnpg?CY$jQljE<(i3 zr~d+~xgH>F4_-xbGU4JS-7IIOZ?N@mqByTT6IQYQ_@7n;20Oj(E%wdE^{?}|gDbq< zfC0rvyw?GVJ0(3h_@y$A;5(G&!PAW!1O3E>9(f&lLt;8TfA-mfwT|zr$IGPvYjs2J zRj1^yhm1d7<-9BFyQ;{xIUDEj;h(div)6b2FqiJ5h21p$2g$=g2V$Nu(aQ<~b=@~Y z=-@UgOs;k(BH?qMa{73}PPT=Yjq8z6j=UXj71c_N43i-EmD8fhB}YclKS1A;(sE;t zjL@3Ti$MFP>$usjhP06iou|5u%6NE2lwYeHuePesFL#&}R98A7-_gY%@OYoc6{QA3 z-w1D@?#GtM;+bm(20_L-aG}B=4USOg>d(14)kOdCbJyn3Y#%$c!UF z#&e<}iYD(>^&EiwiDtO(`KFfi|@BJtgxu#rSq!`k=op6NFTuChptzzAtZ@W+;@;J9-oyFaKd7t(fsl* zVfHpJA>H7#ZE_$INa@g5}6q{vm6i8Q@t|a61)||d-}OlE;}i{;C+@w+BI9? z9I@0A_h}Rk%NHMRDu;kD6Ot_~6qLM=LLS(0X*o_A$5yjyZln598>L`60GP z{%xVvanPW4eh8$@UdC;s_z6-q=51?Qd>{I5Ph=2Nw3_SnuDc*dO=d4VH@Y!6`n-Hj zi;7Vy0YqrjDzi>YYvF#Jepl=IqIP|<0Q3x>^{fAR2`wk)kA8VFa(H@S+IS1ry1_mb z4QH0s=CJM?%Khsf#1h7`HT2+q^1#btJ5GY?*t(S)LVrH^q(9b`gX4U4Tk{J`#AhF*F*pBN+??L65^2}wJm91joBUrGS~CJP;(-x}`s4+ItAx!nm~h`ot#n zMfF39Xa|XQ3Qk3YziC{k$!Qa+(A9`zVO+Xc-}eH0f8LsLu+))^79f~Z6tkSn;s(Ns ziYqplWdW0y&2EWf#aZ0DP1XgQZw;_X7!Y%R?g{&;i??zYyv~OzUKX))Vbw%*uz02;y2nmW<5iI}JoYqf{}rz9jMz!sHvwrxfI2{E^m~WbAk^ zV|J_MfRMj6gV=bRnrrl`0z zrIZ+vomVLir^54(HicK|z8dUJzRfGZmH2ZpDKN)5`e!7ffRBOW zT>owXh=5R}o=K!C4ZKW%qorq6RLg*(2sZMFMJJ||arpu{Q{jQ$@%R&)tIABA@9{@d zuZqp!K&qgB@<+iT7(2dTdOL4OPF*L`M-KR*hTi>-zZVFKxtUc;g%^pZLorQ+pDomK z8<_g$k#oOXm5J@Me@!@!JdB-g%fb+qmB2|qs%}@nEq9}q?qv*Z$f(N4)hAhzr{;<2 zE@^GmztVgOQQ|qiFvu$|xOk$$VBeLQl@Z;KBsZ>i*L`){ofU5SW6@D68!_)m;n0Em zDj;mS00KTMH7^af^gdrIDk_djD1tXG6R=BL{5yjgqvpA ztf+SuD!zNqgUWLa=X;~NX&1lb?%ln>80DypQ3yfZc9VKIa9N@w=&H_<_NTT@D0s)Q zo}>`wYZM4CUK~R|b?Y?hZJB&^+K~IaMAnaU;$blkuym2CbVMpjZ>?Ad?f;_RiR*fu zG>y75Xf;YuDxGy)ArX0>_n?0UJ*8~|8lYSyt4U;Mo@!p6&(*}Z_&i2Mz0CZF2iHSU zR&3Ti#CPU1h77m`fJ&0ysh?k62ex1fXTYl>la(Qzt%7l`$#zvs6@4Vl2N;6sgjf%b z2q=6KExp>Yx@n$Fzvqdbm2RIqD;lMA?K1~%e_dm*b9^kcde6Ox0>y~L#e0?W+GHHV zZ6MUc{gvgk7z;Et{JgPS9h&_tY}@<#$*In0rYW4~Mfe^%QVsm_IU=A4(GVErFznDH z2FexQkN5uqu*3!JJw4jC4mzw5Dy8)?t%AUfQoD9I<=JK%C&fhRA>2|RM^zx(#U-8z zd0?dIlUH$dhckkn$0_6ZgAvP5o0XD_SrfteP+8vV9B6XS+kOAyD?w3WgK;j8*=*LL z8j0+J>l}2i`Z5&IxA`wc>IJ=|2$A1WJe>gW#!R@R9L3DmCU11{=EVyKl7s3m#3qJMsZ1HL%!<8 zgngFE4nSKrdj)iny!k;ZxQt6$m;egY*L=P#-c&en42UBJq#Z>3 zHLEyFrIgiN-(fsmE$L>dnN*j@k_JL$9!e~lKi~=oZ(g*Qz<=H2KuzPS z=Vip1M)IqU?ja|Bjy5?eY)VT=nvTF+woCUF-BZNaHSv(|@Nj*D5MccL{7gO%N5$nA z6!KiWS+8P}L}nCiVg&KB^7^TzuuR`B&_3cW1{Lq;vH`uQC%e%(kYrNjRk5E=Y-%d7 z!HXWYQz%8eM|6r@-1rQ@Orsi#JjrQkWojSFZ70b|iTs`&ya5`BZl)lE4EGC2C#i#2 zUOzHqze%5j)dRDjO_xbVMR(?W>s4b266Y#;Qlnb%bhYY}6M~{LsW?5E>=40vqS)b#w6>aU=2`h3mWy61g&R)`!~zyOCjL0o^#j zP9&n%qLn+E^CO*SEpp9VT9Y0QJ5k1|rE?t|6e(*6kRdu=!D2wUdQr~*ZE(Z%*HmA9 z!u`gJ6I2&%!`vp#gCV4VfPxRz{_5&>Wab_ClD_yU96m5Wf}CVF3dj0KID~Y{v0u*B zsqNYBU?2qk%EvQbm$TM8f$o;O6=je+2KDdM%Lfp*;V4!pO)e7NwzH)cG>NOY>F44K zLS+$moHdl2&XhXE$IS2cAxLpgw;z?sElL+9grF-iz*Ug6mga&3?_hz4ExzC5Z*Oz8pWEBV))3BHTAyQ43PJufz_&~BEY zAKM0d#VHiL|I|BOsuA9EO(8n9SvHA*MJx$!_-P}I_|VP&4>U+F>GJ&P zGyW(5p_N^c8cm7VRXc|YYawiQn$M7__87VCmz9}KwtbpQE~|wXBUK!vQ2=UL4 z`@u9QitY&}H+8QZUCQ#4M7jad$Fq-_;TdhQcJZq>(*!2wpp5$kdhQH%(vy?$)n=X(tAu- zx!?=>asLqB8T8E8AMid=Dl$0zX=;pky>H65w?b5ZbL4MF;jiKQ{)i#|k~_gyB67JC zYC3y8GDWG>k$^>dNisrY-Ta^moK~q$M3a!|zCPtx+H4R*PJ6ri`@|04t~Jp9$oeM_ zd`?IAnq@LeKcOCodNL{KDwL`%L;GcZrU-N)_3?X~fF;j(IFILmqidccwcdiYwB$C3 zH3RYxDs=+n3IB|vsH1*1Kt{0o^Ex(PoJm_JyZ8m2nlp*(0&h#nVF2*0jHb9vJ5a2PZ~T^p%ldm=2|p1Dg8f&bw$I6E(y@%s8oS6489 z_^n~G7f`B0EhQ;^io2P;GWYiGs3?|CJBP3Cw&hdmH>Dx5e(xPv7;Iob6N9#cIcK|f zo3hGjlQ4NykkxwX-hpVe?e5xG1^T%i{lYfMyFB7_WZxljFq5%! zG~M+}L&t9cr{noJTXcg1d6_T6$pYA0M^1FnXob3xH-P@N?9l0R_|sTM{M7~r^ndOBfsii?{?B|{fRB4!pin0 zxyawrTA;Pq_UJ9MWcaTW>BmF{yQ&OpMZ~^9!{X|IZi_c#xuw~r>ivFhObKFb zVyq$CGWb{IZ1&kxestN6OBMjoz&jA={3${y4)wtz?}~ zW5b97hnY+I;3tAMa_AwNmF93BbN&K`|gW_fZ1*3Tnu^ouZL_-9PZpLfvH5HO@YKKfv3KHt5EP} z*8%m|rP-+EF5W8(8@O9CR3M$#ZRSf}A^cJjXr?(rW`4aVh-G8vFBx6i2Un_UF}Xv& zV3AuYY>kW%l(y?vsT-abMMQaXB~1CLtBf*7Z0cPzy`0os@0J_1W#dsm*#M*yCMq;7 zn?N&mjL_w@(7FMole4!}20blz&11~ZNnJ=yPWj89kLdH|Grnk{cMfNGsy+!#+R81) zn2i0}jCSirwa98PLhcGm5IVfT+?|;#<-Von85C|_p!$bOQ9y-cyo~2!>_U4qNMRzS z6q-jYCE`)v9Dj&-$Az}1xu@&4TKsql=&3@aSdmtd)*!Jd7=54~a4S*fZXEe{Q(yeu zPnHww<#Az<60%?&n0Gk^#pj<@lqg_S%B_Aj)>SN>tTo85W-1pnnj9gCNR3ko8dNy; ze|l`Ye}D6Sk&00(i|U@#;K;Q&o~h0P3I0mVGi&8xl#bBm(`Bm?rT&9+%@WD!ceFZ&2dQA6fgW~; zfnURd`_XA#cf5~`e#puD4~GD)qwMvrV$MEL0Uzv&#I|Vajg4;J%CWD7M@&EK`6vLkTB)?efH0a#C@j7dzbN zSbt1Sny;58iy_jS4kE$s)fX;TNWQLUVxTGfC(He1-w4;=%cqOIDLZn_FAfS)&Vv?E z#XhzgH2JJQ?G^)(PPFo>wqIO+k`el~$m6f8;QCD_{kRMeGLD0oI6wp^55_3H;zc)Q>puCixK?uM7viXhx#z>JNlHDFGSa- z1uZfWXx#-Z472meRrG7&XB)XjX1XQv$qdG4-iYrVKu_>RnJn1{6UJhhbev!?5@qC!3aTmjz|1CRWYiZ(zh9I8QeIL-XK zQuVWcEDN<6Lf+G4k{?ByEh!@k)u?mk4v^&6=Of<}9XAsVXK!Jp!F|pf&g*;Rl2f`)UvxOh@wuK>+<8jWo4fxNt0; zva1wyHi|pvh-jl~IOkqO9+-VoU2sfYk1T2t6SEJge}Gc^4s<8%=V)wEsC=KAt2jAv zVaKym&vO&L^F^!hgbSm2kMA1~aKshY7B#_p`|h(LhPh3|tiIW?!_B4SqFAfL60Dui zk7o5W-%+o5#%l2qjTxj@QW*3Mi7{&${~WLi?H*)Bp7wHqvtf~OAzbJr~jMH-J=d1Qqpv*fzPqSR=jE+a0x z^wp6&{XP9ciR#DI5m%$^#Zx@VT{?+A{Co3yDGRq4Rj+k^>9lZ*&4=8BNqqZR0DX#jL+d6W#Pc#K7~ihWGF;#h4IO{3P@OhyhD{RkpAwvl^|Sr4M3~-k z-Z(ibqPVH8Jyls4didAmCCM-`oySQ6=3%^L^9t9B;3tXW$@Di2{kbp6kgR;~EGDvf zu>m|D6HY|WAx^UpPgf=FGz=*@xIAVv<-xsq^E%8&7O{{R>0@GBZFc ze`l<6*dtWm%u*D-b`M)5R?J|t+2unlo)N{m#<_l(F%y{1yS(1aXyn(V)Vy2eCK85r zMyHCH@-LLD$e*)7MQ@_pHV_rpBr!JB_I`Ppmhbop>b#$?YS}ofYwLD$gLfRLn=O(r zIb<CbjAk;CB9NY3O^kc5xgkFIrP`g(K;Q)ks@joW@nkyhOj?1IT} z|GEt|wTS&Z6H>{&tClo*qcg&en-+1IZWRNH@ph~$>d04*XfEu?@2PLoms@i@u=0{x z8WZ4p6oQ)-DxzQz>79K-UD?!Qiy++fFNTE~5*pFp9E8K9wgkAUnkA-er%7%~TuY1C zyUAxua1o^wB-z)stDPga&CCaIi?N3I`w#AwJh=}7Dlk~Htmtb?87p;!#w9@F=;2B} zVJ)*SvFS_Gz|6OZE@jbZQ_W^@g@;P%T9Z|(XK^70c$R`{3n@!mDrdicAggppno6pC z-y`wR#M%3Y0ABg~VqFzcBar)Hz>BiY`*7kjT>fiHYU-o!Nkh*i8M#|I@D>lY*b$jd zdwVh$siYBftF4!arUitF29CnGu?lHBYkzAn1iz9Fao$t-WrjicUug4op z@`O=_Z<^hjSBiu+!rxufSaw|AheL~0GxW2tY>Njla52xoeGv9ic*&9^mTm6a# zEZ6q_UmhEI0whR+I4;t>QCxnCnYCR&(_+7duC6S1)ABmbkd`Ic`_@0}M-eT;pFMX#J1F*qn;(~skL7o<>2DOidot7N6?ZH^y`s=J3+#dZt5D5?L>ymg+KJ7lai6X^nd`I>97&D^BFO#} zj(+F&UsCsg@<(+qj?GE0{`>)65@;{iN&DSG0;3qdUn(Ej?~?gKp$C(9X_d)_8chs0 zujmM(-G`n;4G|c7KFY5}9NGFvo!aEDnAyQ7lRubW%^Q7o^C5yCplnSchxkd!(IwM3 zK$&P-qV4o_7CKT#8^3mA$aKPqAuzm~R<{=nJN#}pA+GC?L?D8e=mQBo4aMrH-5^<7M@ z`NW8H8dif$pbP=1eJG{>0|?Voz!i2=Kj3Ed0EY%%5f zfC$0bXthRO1u|@_sLS<;&;7gMb<6W5h49xBrTGlK_5~8j^m`(v;KNWGZ4S}BiFTHq zSa%&iTh44Y^=GwFGYP=JoYido8}06opJ0yfqlId#OUc9wvEOr@M&iDi#{Xpj1U>`m zoeGs;C7KeTI*J}D8PIKE(yM*FIb6WYjsC?oQ|gnf%K+r?%Xi$#B*BJ+hFY%B#fSp% zBpnrx^&)eY&(HMm2#b)(sz8ducH??(l~_%Q2bW=&9Lp|rG-fX`S^CRH^!mnmIiXq*TsFkF+97t}A*ae= zdrZ4H{;du5U57nyzJ z7ko(Vo^I84+YZk*gi`!&;#i{Wnma_4W4qYQj+)6?J>*YX8orcWhA`H@NU@tAJ$l-C z0E|W5_Gcc1GmJk`w<zl`Jb~JDg;Mm;Q(!eNFh-33R8`k4Mx&~Lxa318BxDvt zFIH4JDRhqbBLUKE^fCTyO-xKY;l=mYp-aDMMt+vH{J68wlFbj0jxV&hBP?aoS2zK_ zomn!bj&n5It^+)4?aKs-K!4MdDKsy2#i~RRw@rM}!0Q1{!AdRNp<9QxVrq?w%_hqk z0({QM<^W~|eo-_wnU1>|HH(QhSu`S}JkO+;ox2Z)Q_eZ>$!&v1{``#~_R9&fS$5m{ ziO-k>I=PsH`6HS2K){9$BNnABv6O_OYkz8Ql8{oP0b^(2{djXw!2;(-afbDMKU&4d z&gU7+7>svdf*t*(k?YIj=fj+fBfj`>&-Gm^rq4C6t95dGA7@)dOUhEvs62@Xye7-f z^PMORaCG}06bExA7TY%2`;pwSb6yqc1`@<*pl@UUkw4S?Bk~|q8XKAnqS%sN1(IVW zBqXNaC5K9u)k?9F5Tz13;0|!D5f|7&6EEp2@LvKn@DIP&E8^^TJX>0%zPEv*>onv) z)Kg-w!vw2Dl%%TLcuLzZB9x#MS!X{3gb75hhn#PDbN_;0VQBxzS&*AfWR0K~aiSSK z`9de}hYtw^lZt7%LOxC3m__gkWvi74Ba}ZcC9YigZOxed7->1Kpzl|8wh)t|mE>I& zrL=m?S}+_+053BgN=8>WCVKoeM;A?I1_?Wd$&G{_2>+JsAQiFmRY5Ln1cP_9n$RRA z_9nm5K+V#t5wnKG%vJSV*td)GJmXUROxN@DYzy6#kdf&J zBh+K(xQ0X(KY^{$WB*u!tI0av{36|I{nJM)vKh2KdU21G9jm?$nOV7+K6-8}+C=A{ z7v!><_R~CUH9AIK!D=ZIS^GjM&if4DI$ZW#O_8sel93NP1U7vfjJ}9^(a`ytdlyL)(XfMy{ta9)&i>Q@t8k7m*caHHGx=ktAu&W*IT((Db%<7oLQigps^Kxnq|VZM9?SCM)vDt(5gq~2>5*T8 zn{ZFetDKBvo=$%LBktuQCF!e^6~}{M{o&AbUS67`bl0uh<~-9ABZC36J(`FB`F!~6 zF4s|Bl-&M9HU~Nep2(aFh*~R3S-8-}sVcDGwo!CFjg&;0t;%cX(fHoei-eCsVT&De z3O-V65KQ;S3&buj;EeA|LAg1PYG%;Wp*cJcmXL)MtdFIp}t`(=V5|6bMJlST?-QLTwVBp|`ftC(CFvh3bNz zmg}6{z&9KUnfYt2@?rqlqHBCi^ct%lQf7Mm& z-aVo*(QDLounY$EQ`Y?1Vx2H9gC481Mm1y62GY9bj-mtQk5-S{GEzQ2H%nc5lB@$E%a&uCz-5;v3rUt5+G^Lty(5E51h$a#vE+a-moK93~gX`f;SLgi!KCF;`4o@W1Qb&ku_J z=C?BoJdkn2$6Cic*|p;Z$>9e4APE!VPp{x^=&#Husx6s_Yt~12p&Y2ur9`W~X%jr( zo_tt?b>S+{w?+l}z)s%-$cu|$#+@&| zK#xNT81(sf)f-Vu3z-sMfl;tD_9K=PhN0rx;a&U1AifLAR^FgcY>fA;&VfvGOP@x{_U!~|6*SQiv}_jzJXMZ5WFAnA_CO>7Yq`D>LYIC?js zW`;@v3G~)jh!CLHxYyBJZZkO~K8xUyPT7cFu~gieL|l`7ljIV}eWccVI#YJv4A4G5 z^wKX>{+^TeI8nMk-HN}tBm#?`RQBfs6I0z?+%{bdUetE)A3+rINdmeG-_3e%a$O3P z7{0MrEq@~Fo6m=@2rb4fu&g`h2?vz3=;c~Zifu2H;@00|%c#V{9t~E1zJyz?Nfg^q za!cKg%H2y>C#{j&%BW80w_Vrq8hZV9m0I2Rum{o)MyHi#(b3U^0MK>c5NF{i?X=@c z&IMxX>#+_&`|IP;0{b)I?-!aXR4F}Ls7wWYy%vyK7#0eGrkxu&6HxZRsY6646oSr5 zr~%CF^gdF3qg1n`h{8i2xgL^{Hk?nGzS;p^J<=0abMn_Az6Djq@V~tv=sSNX zd>CG&z^KkyKroF!z=e|QbDQ2)wDZHYfLVF+^DNiWev90SxSAmaLP%VtcCU zRkhwrk$$Br&5FwK*+alb6XmC2c!#Jv;6y}ivO}Q^)T(k$;;eiSoOp#Xi|u^x7ZC)O z)AjnQy@_Q~MAWrM<>0g?onGQJi63^>{U}jRX(Nk5Ro;Cn_>i}_Gu(Y+rM4bnWjj#j z#Q39;g>~-q=(bkjo($spB^KUtqj(BqMiZS~(J8CDRQ}%`J{n2djtvmvOfXOI^YHOiX zu42&kLkuCWr9cG?DEW@>;KM^7Uy}FCoPp+Gm`f%{g4pk)(9qu*o-(*jT`N@}%*%Jf ze?w1wVPo*h1RY$c$~61smXc#y(n>ZxS_2k!xK`p*ZIH$; zA4-i69@MD~E&{pfWFHSpJ;EbwW1N^e2uP#xq*2yn>k%<`B?zjIR|3LvdEsq3-&rR37bc8oY~>`Tu3G_I;2NU}ZrUa5$3@o9pv));iw>YO`XMv!}pDDzY^ zk$SUQ)p4fPySWW#h=~qkDXkLv4F-iy@0eBn_rn9gEeAWP+@|MRLu!1`JTn< zI*k+VZM0hK^L;CU78jl-A&`&J6HemD^+=1`p2wC-xD0k8>y=4YNJ*9{%^tE%b=s(* zx-tQMWla%WuRjKy1Lg3Yz8{C$MB$P!b6BcIdG+fa5$_x7Uy1F2`<+TS@Ao*&uei; zV)X=nXFs>7s{kZqP)K^42!*%@nWUqztsJSL_F%rtGMDb%9-fGtYk)*$TfWWHB;8Aa z0=f3#?p=Xdt^nEZL)!5K8CTJ0CT;WC!Nd9Z(#myzX&eu3B<%SxOR9w@$AL+!?nR#v zXpf`shYAm4?WSFJ2u7dTn;Ry1J_4x6cgA;G~IcdM(by4Z;tDRe=YltGxetwzagDM@Ol z8nC=-CuPZ=J6+)CYBdQ>vNz)wIMV+mf?&&QN+=DA!tbqild}mB*w#Xq7HxoF^jPg3 zFI96BEd-zXzzcw4zu5B?F#CUTbgTy{0!grYk1P7iw{0Qs{xoI0j$cWfIbNrTVSXBv zEhxm9H6G=Q3)Arsc=oEBK*uuP7e#(H23duWi`ni zf~hAUx=YKY)vvzDvIxuX1DWOu`L)WowQ`eTmIu#mdF|6FvAe-#Waz0NAE@7%*&Cg! z^JwJBqEd24btWZlg%gToe5+vuT19}lu+6iS6|S&%$Q2(N%s5Ofn-MI~JMQQZ^^zmt z)3(&-`*QOu?-pV;Te&LM)m8@tL;3;K0cuha{5KVL@TQQ+%( zI*pC5`^w31qS*H*ECE}~VScP}){{P*rq^#Wg-`FfEK29Y(7mo? z)FSD)j^TN=T^DX_G-_-% zX>8lJZQE*WHMZ?EcEbjZ)7ZAHz4AWa-uqW`^iO-_UiY=GIp!GW@T95-(tn_svRE@e zBCMQTHU$pglSb$607WwA>QezY!{_m+V@sc$2UI52J!c?Ymlu=;n5)WMA~deC^A`A} zn2n8$Zq2b|Dj;VNxz0>v^S0Y#gKTsEr>y`{*-v&1>i6S;9BhvzEU~ix1)|UBx33@YQ`OEj);q8>9Jhd>-8|a1A>NYQ+>8bq@QEdG zp~uI@E=Sd^y8nycQ$j6*{^R)A)x>KsVsfAjy|uG)-m@>td^a!1+jWll)f^XkOQ=J;!IvV9zZ z03OdayD8{L`k(WqU(mB9vi)oB(mWJbNh9Vp6cM+N6O2U1fa04!4r2R~X7|>EJ5|3R zP$f+Zr&no?{1JE5zolG30f~viE9tMx`{sCtFJCCCLUsKIOD!?l+075R?0(4Gz@e~3 z$!`{`EagbUuP19o%PkkR2^b;OyQ-KrX`ApL@zOeVXba{Dqwz0L_GD;67jBKLdGEa7!b-#In&bmAsDDBHS=!b0vX_>Yj(Gwr zQV?`TdJ}HlM9oo4Y$wnX3a90?ha*CMc>0&|Nr^>HfIiaU+huUMR{SJl?sW%9uC+ov z-CgyRT_804df%Jr`pObe%0uSr@+)N)@tc{O1ZpD5MDS&!FD)zg2(V(u#1Ta+|NNj% zsph~1x1E)QR<^OJl7g{hHswiD)45*M+j16x%22xXzUWdn25Au8=N|Gk-wz))RTf>K z;nHvJbOwAt41kCv*a!|#q4W{s)2&qQf6*XhyX;X%?`9BKNUjsd5pBm0s zZcl+qj#ijU?427?_r2S%!vmr)B@L5Ks?ZNCjo+sq)bMr(Qay7 zC`Y)Cnbr}TCTss+2|?`*D(oH$@{V05(3wO z!80%rxF(na5#~`5Y@0hH{MY^qh8Jm8IVh!rZrT5>vpzpgqR+VtdNRPUjALsPsCB8n zni57f1pdsUECV_kRl35b|NW?72C_=ZC8I7erdj@3F!5@xeiM6b_NcbdKop_K)_`H{ z@X$iH&G&kQlGR}E6FqCUDvO7a;!J0M2s}2M5<~*xv?jadQc@9MXU>nWSNK#fGU{&Q z_lmN2K=4%g=qvA|qLLw2fPlYz>lh|0WtbdomS*p)wDj=Y?x_Zzfwu zq_nfUJ2h(3Z2+3j`yY;(JATp~k z^vQo#Z!&F8=y|To>*HfcKM+Gc{cj}z9Nb$dbrrsMhbRnaNX(%5TRi^Je}y9EfFQ2G zLx zFbEesHW%pN)nawr@pP`HJ}am3JT}fefpcrd|8L3ZAp+c0?=EXHB*W9=9UwE5sNXGq z32H;gkzn`iepb||dd&WXr)w3kt-|{9^EY8vXJ=A96U}ngpXqFR${dr7$HG+Vn&VIY zacJ~%M*LS@!C8eYG;*ATOcAbERrfxt(G}pKa6rl1Jp^QCMW$%&pU>~PLQS)#)112@f}ibFBGJ7J1d&YMHK})zfJyKOs6N_$*JiN4uWh%u+At>=qIKtw zYyw7yGxc0?iya0Lmbk%U^>wnl-EGGa=~6mc?WV!eJ!!Ia6(|CqZE}~RX?V}JSfZ}w zC+c^@4J3l^A~Q8ABJ6jme*c0)sCBU&jX%;x`>S2-{?DHKc_=&w0P(8hVPEQ&3=bFpSj^T%T38&*FZ*>STFCHa1JnXgS?&31DJ*T0{|~! zOiYVm_1S~jr*wMDujvq&jZsKLlhW>GT3MwKzgWkGCA%XZ774gKE-{a;mW=6t$0ha% z^aPY{mT(g&vGzz0BMiR9^@O7oP7MC2`Ac4Bhh*1%W0*B(&p^<1Aa+a0f^JOLiXiOW zGUVH$8zia!7OkMUYtXxXeBV@eNwkgjuyxvvLaoW%sYi zXh)5ns~_z7XLUdp!Y}7Bz8izaCzVk)A#U&anm)keA}2V4rv3{a4z3Z;(%~*`?@yBA zsEJCtjOGDtIweI4jep=#*2=jyL@W|mv?d!clp%s}d*9pe0G&8_6(<<#Isx`QZ)&QB z+Km>9uj319m3_i*Qwk6qBi=J#aHnA)x9b8f#8lBXq&@FMluippAJ3njJS&PKD76Nf zLeGS03~FX$bcsj#DiJ-S zKMhmTCa2i6$9PmjbGrOkfEC~#5I%Kb{NXxF42On^C?qUAze9kDoF?PP%Mr~V8rxG# z>D34`P=PpNfr70?8fjvN6e$uexuHqPR~6s^y;}h7FXPf=w_13xzi;8E&+p$Q#x>7A zhk7TZxVKX0%B~gcMO>Az^FpMLQEd4ifcwAys6yb6$CQR65Ah#~Jnw&Q(+FS2CAC!j zN)TE?|6RqVpKVMF_da2pRPQf(ZanFzYKcWu7_vRCGNsdELV0vG&kB0xTFA|j*D(GI z*V3_i25se9f>Lec%U4twIGi@4cGVA+3e)7Hq|_dzk;R^+Wq@_XH1 zBZ^{bukQ?3PvRj**5OgLt)Qv;-ZT_lyeQa5;XjXuB5^k<-HSE{Dw zkLum`dC#ALQScHf32^g@_BO*yo+=*!L>pG9B_R5uT5$_}dn>~vp*;8R`ens`{x<6I z;n8KR5{Ky4ufwp_3eR*h4EE@zj<7X8Eh$UW7}w09A^?JqRg%mQArM=!FF7Kw&+`&X zzu^7fyMS&rHnftlExPQ+NlyE$Wog+rTWq79WtHM~B~J}@RXB5c>E3VInM5pgRDMax zV#$3Y1pgn^rI$!yzE~=8JKn%`ljLz{KGzvJ+ifG}`gbs3 zZ>w;C%2{A{Kpv~#et#H|-!CuD96@MqgBioC>LG#-m9vU@#^vT%_KLw7ST2RR$j9Ci zs*ziV7SS=w*y`;}tCsDiUQCer|07s{<|1Hx&p9tSixSNvAnujq!Y&8U?AAQLo^B4T z$C-v0y@91Y3c%_bNu)G>IA05ojU56~|554C{o(UX=WxhHGnD$UCfsfc@UO4xVE5rM z%C5G>i~Le0PEObJl@zwA7B|T>!)>Uz)a;bNFYGY=_N3glB9OcTx1KoHhT!oaSV$|8ukSma+~HpS+5-c{$0muguE3eAKn@!BNX0(EU4JwfJve89l7JwfI{K*Fv1ig} z-*XevdA$0j-mvVp=>Hbg-@)#{fFor-YQ+TC5~YkA2CHPG@sFp`VHzG6MR=c{>L^Qm ziec+ zw)T&Wc`v%BjWXFbf_tUj*qxg1@yb4Ho*9o4v;3Cx+|U#~_TywAfJ0PVyz;pGsJ?!$ zuw|sEh^*0ONlBIVvwHRtdt!8S#%aH;BxVB`L!!u)I&)G|N-8Q$np+KeL8eRil;i&M zRsk{LHSykx`}sF-6KxBWGG-VDqa&W_iNBLW>*KtCCdTG^tIul)U~b_;Yt2De-s6Kx zsZ*aA8&l|X;rRLa6Uug}0SqLSUZK=RWo>tb-YdrzneiY{(wq%i5*Ugu^#S|4ZoU3b zN}8PgM=PFs>}hc8eoR8DKGB-hQavPn;jETbehz?8BB@TSz~%RgQZ_yo`eqyIA4pC%Wu}Z%eS}KC*u4pxtga9HaALu!WR6S%$zwu%^_5#Ggrh z9w;KP>_qSlf-RN>_(%N%5opPl1;XeQOj;_kZ@|^t0njLn0i^{CAj5XQ-D~@nd~%dd zPiy$l{x z-0^PvQrqU+=bOkC>+>Ps$~4Y2Ln(uYNrS~JmX~Im96XKL*b?yjsuk=anp&p3dA0@K z5_&!=gMq$FOi7RX4SPhR^Ui%-=L5w{BwFO7)zD&hp{R^ID*Ez&59E1qWz6=A0H%etmbtV2wj zpOg{sw^CFTFex6pZ1#e#>34cC0wv(#z?UPa;zyE^r?d}434p|r#;;P~(q6xra7$Y> zTxCbd`O1gmzstGJkBO-T@kdzs8jdh#*}4m98I7xG|E`m?DK?ti0#H;`pg%3+(6pHg zBo2YVW8=cD++uj$;9gJb3+}~67_lFtH8!xSOtFQCk$_4Gy`7;=p|$mU+5&SLykoZ6 z_$X!L>Y+-Xq5UT25Z(3XF6L2~+#ftNteVrzd0%CH&9u1ZqIw%}q?^U`^y3Y3)C9wK z5A$QG&bu>`tfd|dBU0h5{pGr}d?VMeS@V9L1gN8*$;%DAkhA;4QHb(L1nqV3%Y$4L zuSgV=auhV}gC+$Q7c4+|chmd}z%x{Vyj!-eRc(1yc`;^^L3$`JX$Mj&=M`^gap`RK z>s8~Z#Z`@o+nz&8#s$^rK?;cp)R}2a0zQw-V%Boy?Mocv7`_Dj{yn&<5CqPb9Z>!JvT z)2g$>4e4j~|GTP}L45u`!Q#7YP&gD(7!qkKu6*k*Ekz(*`#5#&)m}a9^j+RwaTLF$JB3s($9{_|AGVQ(A<*|WDM?$L z+?NG+xG3(r9P9i0XtFQW7y%>JGx2x&`}Z%1e;y@reCqR^$mHUq+>q;Iag7=(0Az~R2jy^ew!vx+ksBTXXHGaZnp!YE|%l(hT5 zq`PkeuhZ%Fk`vb)Vjxe%@G05dQIWt%L%A|Tcm|tGyjl|@F@~Pz%Q1pf#(+uC3+jlD^6rKnr&X1(kwEOCeQU-mmRbk<+*Te zN`Qxp4trxX!pzD3`={fa7;M1ucOWFr!71R@`@(+XNsYmTI#=8Cu9Z&6q#A8M#A3I~ z@kt)#0H9Nj0Ngjo>PHd_Bf#;ONvFISpCu(fD@{9$4lM8V2Hcd@6EvUQ&#%BcCBq&d z)E|3gTLl7rP+}Gq7WKUOW>nA0YDS`x8b(GW%Psb`u8f(M@{(<+p(MtWR;M~|#7@lb zq4EZl9$xIB+5&hwq7}-7%7W{lyJn>%?9PzQ^e+%K>t`tc?wL%>@a$D2H%^Q{Y%)KC zoQ0p}#8|rZW;Wn1C*E7OX1Zs#SxH`7=(+>iCxVy)%iA@cT;-u&OllUGLgS{q^_vlE z=^g5|@!8*k;%7yQEFKzXVkz})Y%0&`uLL4dm>ZWTY<$K>b*}x|#n5#4e>(<2p@FIN zMt$JP<#4dKC}Cd^P5ZRHX1o=`(Wwhvzj!PEsxy|yL2bSe-p)bTjWXFS`j?Nz-AV(5 zaU*T=@nCYo(-wA@h?Rv^I{@rU>G$?;u9@l zEPV!elWcRTEP-b@m5CdHMOG4>Tw$%<6}(GD&uz){creb^rJFtWph)3{=>wQt%Bcfj zVPTJpjDYaSXE{-m_5T1bG3SexVWPFE6R7mF{!+uATfa|G*fMes{ohV(F`=ULlM;i6 z_OLJLe#9iptB}YWU#4Twj$$Obqi<)Ys`<4&BU*)smF`xfw-yr{v9$N~!^Nrz1QVMJ zi*Z%d*|6qv^_EgT4dug7pqNiTuA>w$60u78QsDkvUS~kImluP&v{#aQZQ^tn!V zBl5&a{Rop>bqO_`otFmlo8{7_L!oU3rWX61U=_qaTQoE{e@7cbr6sR%Phe9GqY})H zFQ4TyBWXo1+Hxe8eW6fIL`z{#5jE^HRPNU5xFcqIYSA zI$;b5;uPb1J}kr$@VK>LLY5m>K|!u}UNbq>e1Iw7;s@)Q0(`oPM1r|-b1xOXE-E!Y z=b4z%=y2*#8h=+=8m+Wl3P`=OwX!5NnP_JJSxo#V>vt<^hBWM-R*3w@BJ3i%x=Qn zBBvV#iFxCv1Da@~8#-iC!=?U9gm58)Ux}jEMQ)L zxuU%CX69KcIHxx{D=C|)DI1Om>pHaB-=lw1DZTuyQRm6!HEie zN#)EpD0Bd+e4=`G3H)79Z7t!o^Gtl_HEXb#KHbl<6eNlYwe^o{FBK=d{$*KIA8t&+ zfOi=CuM{$|i4Zz<%gwIbO3KV4f9pfSw@1S(rT!#!l*%NveoS)52eH!pqe-0OqQlAI z)7us&CHylMATPm!GprZ0tBSU@a>&~1_2-4+A|FJLPS$${c;VH-d)fq>3c(z;1qU3rIE+bc|RxO}S0d-xB zKf6WNwIM2#e)CHx!{nclu`#RI3L^XJj}UlFqxlL|t5i{3>I!V2;%gJ|>IkOF;m0@H zLNmG1p*-w!e_8HqSw`XHmzubxS$02{Ug8;C$jY&HEh0I~YBAvC%5vfBezv1n!-hlC zAiGPn=7er=99mqN?q2W&xR*2{;u`4#k%y_g7bJS$kxfjtO1N``54tf6qoy|RsaBf3^ zI(%0mqKE8nI|4XvW;Pg>DvdH~S!yBpsmLevl)f%nTl564KC43Y#`Xm>J4F%W@~-|v z)hvkbM@=@@yCXO!VQ3P}f_p_3-4t@b;5Aa~B9FI_eUsE}7k5kmW=i?gwAA09R~W;z6ko&N^cP(emKEc0A$sVYzRMA;O4oWT3{LhVB5KA>nc?#A z@H%|pr*0LO+6(W9ZF?$Nu*bX6_|C=}Jv(U%!C*bFhD|-ap5#t9N|O3KpC|T7v~L3- zgZ^ublSVpQH2kA*9A}9+%f%McibbV}qS5k;WsaBsG_fSX-YZiY=NSTmTo{i1#4Hsq zC^76P4g!qgZG2~;7IJ3=@wT6@#SKeAJ1_wNwqWyw;JY)nqPL=P;PM$BC2eCr%#~$i zQWn0{Dg!TTbp*jIfyX8&0NCWj;_+Z;efDecpn!*k{f6YBrM%pnC7Q>ELd5LMf(BTq z21O%*77oLMiENMK@=}%wj2$h;iR8GrIH__t=LdtcP}%K$pKumMED|=Ab#+Y5G!7!A ze9d16B&@XHjMIOE<>1t6mcpYZaPvL+o~&@V8f_zH<<4lL<2uUr@zGuTVjiCsAaT&W zb}!$VGvZk3cUU}m+|RdXTAg0vm^c11A85{^Lay&=h8ELYVsls9 ztQxK!tq?U|p{yqeqw2pA!`9CrapgX$#!h0hV&QWQNLK2`x6>o?!^OzLt~gFFZiU;T zMe%iUZCyu+mqS=dZzHWf99mP?Ge7i2N3Vk>aI`)_@0!?zmHMND#&1k_nU~1AeYTj) z`F`%XA1uLll_T#+`D<43Am;!N1O*?3sO^4 zD;VlQ0WD%iA6T>g=HR{kH*B&F&ih#hLWu>IB$e%TT;6(IQAvI3`MgdRfxJh@U$0#h z9LhQJI1;uYT_jXF7<)7UO-Uq&Rd7~NzyX?$`_h=PqDJDlr-cEV0(a2TIB<$mZP^I}HAlgRdBPUWuGZmJ*Ozjmo38q#Uv6OCKxSnrwwG#ZeriPiP^1Nf)W~h zlD2LBx5(K0BJ@nlmM&yMKWU*@h5+ zcc9Am)>Kq38Lf};6B25dvz6wl(*+$yoR+r@a*Yl@=J$u+ zCadl*7#fcYQao>c&=QO7;BUd4OFjIEe(!!3lrwn~_*z>X<-g(^eM=>JSFawO?RVA| z2>L<}azX)OL6o-}my)B;j~uT@t$bc2X1~lh&YO>+oTRt#Xqf&#hHsa%(QVYCRy0b~ zCERxHFzy?S1Q9E4MuW$)sE>~zDeqlhH{{;%XS=h*^FaLnvU{JS>a6~pAx}_?5IK7v z)>FsmDNBPvhcn*8BHgp5{Ajv(y)V}){Mz!L(h6J;0OYff#qk9s{%va{6~A0CJNa2Z zKM)qKee7DKX0$9I1F=vs_II(v1tX+)SM+sKorlln82gb2&fm3x7WicN&E`?zy91~LqEG%?!wdy@bkDE^q%hBz>GLyv-Z^2w_lqP z))zBth7h2e(vNN2DAG~Tl8PS~5@o+l(rthyGoap7&i%G~6W2d_u zxhuI$eXm65|4ypsar{-sV3>gT}&D{yzz5bgXE zPl-w@?VsIx&=EYF@!xwH7WgZ{2e$+cb`p`fdl6S*D0KA%4#(?pr-?)vq&lwLZmfam;U9(-u54d_YwY{+svIHjqaOZV!2 zjWCw<>@Y{+z=Jq`4n!qa0${xhi;4&evJZzqo0d}3;*TF7YtNA4M%sFG*UrejYYW5) zZa!;}*x2YgN3ViP;8%ge##|K!+a0DgZSAA1hE-qbHA84b2JPN!FN|v>Qr^FMQ^_kL z-(Fu&F*U}GqLwGSI{*frW0N5wKU|ShGB<6#aLaX-(+-?J`B4=Q^+f56#>yks<0o_z z-)?cRMJiMzu2ZfLV!5T$i{Co#Bb8y@$20h#34Iw6H~g-7ng{HEC*{8PCjT5B)&~oM zFC6sSjiR~riOAeLC$;jI83f@#I@AB(3V1Ub7{0Tn%Z6XbgIeL6$x4kz32`qM_Em1J zSI~DREkdTg@VAcsP$oKtGs$X=%uQ5&LP!Uf7B)=6I{l@5Vs7aAPVKIGyi)^PUGY%h)mZ0bVEib??T+oj^ed5FgR}e*yZdmg-ZiT0E049 zOQC386}oi6o0x14F5g3z%3suuGQb8W3Z9h%Omw3H-wdCGcU&{SgNdUC{JgkGs46yV zTw?RPc*@ZM3QsiZ zq%1bDYR0*ruEyA9FCV_|Om-tyfY>c(1$R@r3Nc&0#Lw2wTmN0j&R-F~=ok`|CNtm1 zO>N@`MdT&f+*(n&NxKn+cKa{aU6XKWvShMM1wR%GQk=}2II*A^3RGv}Px3zC0WwBm z7B$AHlaFstxWl+2|z#$%a(gzNP z@tnsF7)VKlC$UEmCNp{VJ(-6?k>dWcI-ff<@6Ag-Ph3a>9V0;N@{T5>iGYSiS|}nF zI%^j^T|30PpWtHCJ1s=mgD|*aTX6=VF|BEdsgbTvphwS1_#u_10@^p!uET7$Mr5V? z5)yTrnCF~1#jk6f((<%%s^eDcV%HjwdBa*dNHEbkcicK zSvZB%?8+D#?;!U^xU$z+$9W7!C0lrZL@bBzBgm9or0E2gFN0VN){l2zuBg(U#Aywz z)*#O(5>vFvP?88Ktvfy;t>_^6I*y)A-h<(h|HDv$0vM_-T5NpkKPrTBnV?7i=gSS< z8%yoq*wBY=SjHYMI}bEu4Le z4ILhR?^IrsZ@J|zc=XzZ%!=Lk(26ehk3*9q;1gQSS?zfCU=xzK=A5j+nP<$m-zb>} z&5eelvGb_Wx*mY0om8(BtHKFV!TvTv)_i{jhpC4=iV)j$D^h`%LT}NEn*lWWG8)I5 zQ*5FAE~a7AmSkPlm?{1Lmjytdx$%@{3b+q9y0WtH28M@?fv19tg6Wb@8Swet`KYL% zqoSf-UXRi5w4c-f{~RUUHB-Ii1n(sV`=;zl7DZ6Fgv*Gu!k(N?+*r__0YhsA#fF|N zo;i>%!}Wm}k3Z#j8!d*$mm2UkweHpVbjS z=^oRR_u&Qbb80IE^kvV-8}i7=2**w|^Hh~4b9};nGZ9U0yvk}n~Su$PQna)WeVM184w#V^NFIm^Bih3`(S^kabX zk#Hhh2lmvKR{irP`78|ZNrM3j=?oz&ifq_Z&@{*D$d;(xf(wBGCswp;&B z6aB2LDhBMefUQ{yKww)Xv`(UyCBA7fQwSGf**85xv@g^ zN6u*Zqny*RNQ;jihME+J8QE@c=~cX8DLi@GRM6QeH!q~CvEyf!a-R5m#y!96n^}MR z)^S8*wspPvSXlTmuVgw^(615m7T~D>T90`??@a{Mcbi#He}*Z~`^me z$vhXaz23|S4;@FG-D*yP5*ARU`vFDNRjI*hp1Kn#qW{#NS!zsxU0K*hDM9Y8uor-r@a<{nsCfXa?}+du7?zf`LT_m2jW^ z6ySdj1L}eEfSoQ5NJcDfP3N^#(>y#9(vWS-S_u*!RrPisayGZm&0lt_To$)e4b!s6 zzn@);41TX3s*hez*np+T&f(IpQYI$^ zSc5Ey0zc{c41ofwv6+=kbo&HdOg3wc{;E%|i-2#HJ zoK-QL#%-Otl>#T+nt^X)?)K<}Qg~+KEv=bM(*~2hz&pwS;TR`CZJKyncQ*f@3yJZ$K;^OjQx#e*VBFr! zu&((H*r!xc&ySAE51^0$%hyV2QtU!&eu8^ z0f#27Fhv-Gd@7yp6i}b*v;lqaA{v2q6raEIa0ysfdx2`9CSjA??$>MS7>vOSl489* z@)Q7I$OVYG^ez|cy1zIU3<>JQXoG({d19=nt;aR4m69|*fp|c&pdj)qCeTdsdT}Om z@>YEO4Cm6PDLpjIj1dH1!li>Sd0^SxKupiCiIJAMy!p4yFdD zdUO5ufw+|b%x=wZ>#SXj4f5Z)nv0C%P>*cBku>=jENklyb);lEF7A$Z9WT@pKHnY% z0YyIAASos>bp`oA!Z4&)1%a)}969j;`?onM-fsg%7 zReTmNmcQiObSz~HF!3mrb7B*h0IYtb&%V~f;-4<%DK>bz98%oxT=af***ateCeh=D zrVnL`r{&u&`Q&ajZ+Ni&^1)&9&aK`dBlIFNkv>_K8RXAfimfPhYZz-^jkA%drD+`l##RhTf6p& zzm=CiB45*b9u6v*v#?q4j4hOp+4mCei%#XU{71y??HEIY$G4FQw=&I)_gNGqG+UmL zQwHqR*P<~IZ|d_@)1upg*L<=_Iby1qIG|ZAuf4H@Iun8SpY1WdbmC9%_ln40>IMh} z-Wy?6U4;qF*1XpZKNFJcg|n2%lB&D)@Yo&Te^iQdboRGL4EL&C!dlXvxxIhoyjX~$ z`*iXFd(|(1X#-#QvQnS5RGRM5b6u=zBOwHL;;fiWF|2T6_;=C>Lc5p3U;YVjrh}m* z8OHm$DZc8fiQU089&27Yymh(bGKwvy24&f}hl_JpeNh>iQ$K zN}?m8lxCl?^%ugt84|%bCKNTaq&K_KNJ_dBFS7cBA6#{Y+jpUntM7#7@%Rngp_asA zC(T8h)fejJMd!zgy2(ddCkd1lZLum$D12m!rBSgDV^=>N4Mpt&^$-y*|3Rk4I?6IS z3-S0}}pz_V{54pyIR9>I6Uls~O zkTEk9L5|%boaK{BPy2*fG%K!};~qVRMjQ{Aq)@?@nnaE;FyV8#C?I&mPSB){vq#>m zh8i+Id9KY#k8sX|A1xNxFkxMHIxjUmWyK**OdL?J(t9~{1!+s$d}G4D9>*I;8S3&I zGPfpDa!rMb^DrJ&a``%pc4lQkZ~8pp zrg#>MN+xH~TI|S9u5#-$7?F|gk%pn4lHVR9dCGj0eqv@_<7{ogk;|?45CM4wcWoIQ z^|Bs~q5Wc@_Uf+DeWniBe9%)LrZ4Y+z2!ir;kr+{4*s8!zMSZ@&!syjB=#YuFPm2r zRx}9gx%N-IN!^jFt1j~#!-hC!Oq-z>qaKz{ZFIR~JaBnW`RnUH9CKOu-=cC>;Utlw z+ot+NEEk^oJB*%|MLHkPtg?KSs3)*jrP8)ChW%(>YSN79Oc-)DiX;HGiVh`ZShy_LwIv>vO-5%U} zI>!FCFmp#$@OrkAbi!Q4B>dfz{wQu^{wV3qGdT8qmDZwwpJ1!W{Yn?ytN#6l%*OoOT0F65@iOvAHA? z50#`mCMhHoSVXJ~35xc`^_Tz0yOYdH?Xdg;{W6@jvN{sOR@Wl;zBb7h$Y;cv@Bn;> z8c;{D^1cn)a8%-3#2~@Cf5ZYvqirAmo{mXlY3#aVb8lI?-7hCmI>jR!9OR`oHwp@T z2CL3O1q?-d!&MBsn9kieGq9>PdZNDC$P@^k&Or0IRAJT@0J>s735NPn4!b`K5%>hp#jEF68w5aC&8#ziQ(es5K6UJWiD7 zyQ(@UNNlZ7BSTN7scG%ofeTjL1nkv{fUWniqO#`aWe*yqT++xkN9dCh7H6n6(Ld+k z8PxPWuMI39zrPcC`7M3h(ytf~M1Z`^pmCV)KX6c^ATK&CZj5Fbin_peyMu@SE+1eT z_eGKT!e>OfVEgCWAzZ|>U!M~WyFKpOzfz3(=m-N;uxr?m^T$ac!Bsymf-KCNQmA`d z;oKNq0m2wf8E`r~G0@i*V)lp5{oe*qPz;+BU-9v2rBk1(qG-=I z6_vg~j~l`v*9u_jJ7&9JpM-7B9}bgIGa3?}?f*ca0vGfpQiOfEA7w0puH&8=LcDTW zdcvpr#uzZU(~2w^a7s@{LFeZn$kQR}`fNG`mBa2l@HUaXu)sC}|2zC1&`q<@cn z&W9i-g4}oIdD=}>LY}#MU1W3o0)mYSO*n*>mSRSuDk6cdN~<4aL_MISVxdZ`n8TBm zdGQA|%JiOAhF@M&)cC-*7_>`U;co>}MBiKSu>9Uz$PG!nXmPkgd%Zf17D29zeyp}s zI!|k$@e-GRtgO?uNrWpDsP^S)?@ZJIvNQ75ekfJCRkcuK2Oxncew`m8f(&GQ1q34& zhfM)5pfwj}7)8iux%eI-Xry zXLllvMf_9U=|_b2^0mo^l3Z@*N_3~gTN>jTJoI$QHZ-pUNLKga zZnfBBtH8#fF8q_uSH*QmSS23>ueLjh10pq3LhU>@4_3WBKgOo`S({&hg{G^+K!(`L5?&> z8Fq+eZZVNYuudCsZAY-jffyWAW#1>wzmNH!jP8~!`d{Y}wB+FvDXUV4TxC_4+}r#NYOmp%#l|tcvB?)^ z>}97PU{JKJ{2IbmAbW@`kh zq}UxEYZos41$;Nw^C$T{q9%@==)(48^K<=QQ-#-Mx}B{G8lJ90 z3%2>c=a8)c8OKu58}qSyx10|a)0v9~-C`@(W8V?g+odvCp*b+HYzvP)q5g885CO|d zliMrRjF)L5y2z9|-05=&B#iOGbx%c!A3=VNUjO$IsNZ-KPRh1($Jye7wF!c_Kt2$z zY5-;DNjMA9MHH^aX(CT!8k{a=!ih;aWX(bhNhqlhC4RIAmx~+SZ#bFERMDERV$tsvE74@) zRx0k9rD3DWjYRCIu$ktswU{jp%8oG$kQZy@LXzg=XQjGE+x|9%edk3I@rFk{5+`KC zxI2w;ek!pzRKgy(Ej1i|d6P%d8V}?2P zo>RTO>*p2Y?TGrkFw%&MdilB5615*IuZq!noBf3BZfW+y`;GRQDcm+YpVd2$5Mqy) zr+VnPIY4=*Lh#u`MmMb*I$K0Y_#`iXwF^`N8 zTg&N>Q`-7}kMhsZ2q=((Ng|Y?%H>eUq^TT^d9UTE5_tgHp9?6(D3Y`iEAwePqnai! zw;Ls&T(f2O53wBP&J}z{K(@o;u}_#bj}2f#h9Gt@Go+qqxnf)^gWdb%sEOG%nPL5P z0jphxW}6YkL!f$_3o^sEtH7K48^Jz+M|pEKDC5 z9mK2WU{S6Ct0F_VGUm62rje#+JK=OGu9P^-VN5o8QEWzZO67%#=MgSs{u_u>lBL<8 zhe_k9sjI1Kb6P4w+!S4_M25%T`_&OPbA8c}flct@NF^MRgPg0Vw&?iQ&vYI5*pOM! zvpl83K0Ryi>-fUu5yL-Pg0J zo#l}J~}@PSO~jVJS})Dty(7MK;q z>0i`?a;;La-PNMDCRRI$6=e79Sdwt!&3IH=(((Bk=9I!TH1`oJf$pLb+e;FTgx$p z(#qUcEy0i_9L77yiqCOb9&@-bkdqA-is_#aZm8dK&y;BpcdD;BV5>IMsV|iC&`U@1 zp_7MZTl2U}1GHUK#|2)8Y%ZtI%ac6}xj{;_no>MrkNj#+WsPUZCSwgjH0BjL^X-i1 z2NClK_!8LaE==Vp-{Ul#!SBOfwqOTEcqkwY%33Tu`(aSxHrTq@RQA=Z+@q^vr)S;tAJ=hai+yt^lBHPvp^04{gKC>N5yfJ~d>rvQIVZo*`_AL~ z3i%q}2%=U<_O3oK=caPhjn7er2xdZZ2cELnEZM0vIeAUVw`-#pVPN*!Vj_nmubz`4 zI%h7}D74GUr>SC6Ig^d3^(vp_E!g7@h`1<)3!r3HiObsQu_aYHL5;ez3Ou)1NP-2d zeHKRnT|ju^aeD}L#f@|M@;FL>Qt%`{){2SHd*fsy_Es{n-EYJ~UA&}XMfTnb{HQJ_ zDVj-6K+ppLS2Ph;{}hpJ()O*Dt}Pm@+jTLvmTc4&ipd!IQP;qFYN*(vAm_p z|Bs_n+UAd`t0lygcG@N6de#Pzy@^H)_lrAf>g@U(;d4Z7aqPf&j-4oTaOmpxKs zfytIeys`G$g3&Ohx~umL`>SlFNFLopD#(k+k8>+9 zE8pXSGu7Vae91>^n^a`$RzoZiw*5t#Fa`)ykR6P7gu!`pf)q#e`rE`XAYD+L9Pjm( z(Oop@)T@0aP!cxS!Z7uZqi*ghBbfCnwv3O0b1zXwRZSd)44V!IpIiFmr~pH&KtM>8 zB0euzHg2yH)-vDL<#NN5E=ms5yH_IlF~BU~;xR{nfYH0;M`P$r*$>J^0F{NAuq2~V zmZ5UR3bsh)3z<`~FSMRqXz5G&N+`#0fL*@_im4ln>ngMnU`wjDL%cdxi-5ItI`w~P z6#UWxl$7+r3P8c+QuGV2#^$(N(@EJ!^5{+V5%s=0r2o93cV1n-p1LEH610QDs+amm z?XD%pkT`($SO0`JwuAQL?jvl?2h)mXKnAek?zm*wSHK{EB_~=IW^+$mths1v<`F~2C%%RCQ%#xA{ zn6QhSO%(fc9$3*^6tg+Jhj=%1A4lv#*820uh(H*_VRr1SDb$k8+lwa84>^F>qusLo zEWE>eQWkxh{9+k~2~-=^bo!G{jHLkO*rp5^tncp_Xw+ImaIFho#%#RiphXd z-a)NI(YF24_#>{M%VoZ1xm2Y#nhfZyM)*qiS2vB>O7loO_ihJcz|#3TiWS_();9}K{f00k@NCIIpGNr&_v4sm~`RCNzX znqmZH*C9*pq%ojM$&EdzhE)@IXD^NVJX}v(?!l~h;&y1TPKCTYkK8dUt`~wXUOs&9 zUN&J(7P63@eh;B*jUU{HnT%C=%nnr(;>k%R?e+7KJw8F{aj>%bHr*WlE(25Ios$0C zp=HOKzq5VLWX4}OTN)6+5jjJFi+$w73&tCzUN}$%J$(%^`CcyK@q4)gmH9?qGVSZu z5LND0?Frjc*qGb3>Hi$%EYQI)QC))L2~N!pN6J4b9H*dnvTVjbSB3E6$a2yf-%n9a zg*E*ve^YdWWr#1 z4zSNN^V3{{*TUbT!TdU~wQbokzX;c>^Nq_Z2$Ug~YkP}x3GN{hOFx!KdQOq%qB|5v z?+e?8DPJ7FEg_I3p8bq44cO z(L>z&wLcnEGe4N727M~A-gpZ%ctmE&x>SVWc%eG+23gZ4ZAotY)4sWSw`UO%uS%=& zeQ?N~?A>Af)rqwg%ljZFhdic<-iJ`>#Z-t7>uuB-b$op!nqw?aHntsh&|`kmf_Og> zj;}h;P%HBmS=PTnw_CMx8U=;OVPhItvf`pAy@-t#V2f z#>0FVTf>Q&Tgj9-r|Ecr5mJ&b|%vh9uSO}gROJ#Z3*QxM+bj{!d->|QBALg zBjjM`%36%$YUle;MM11V1`!NMDI#cYENCQn_%p_33}zGzQiE^h&Oke}_sAC57piC= z1E&BNYno38T1IUxZ>n8yEZ}d#?wmpHQZ;46i!l^r>~h%F_kJvFN)R^5MxfRI_X+$8?>b}EgCVdbcc`(B~Rj_3PoFT!8}6#}x=1JwxB3~8D*vuTQ&KnM6=`B9cHLm7>-CF3lM4uNoq;(cLn zk_cR^Y^{l+ETketAqYzZf^#7SffQ(b@xL&z4e;yr!fEi56A3_%kpNR zBaOO4k!)yMS^bbDOLY;K)sivBeVBA)v|Z2(hwFRr0@qj^QMugh1>n>(;9+>Hzx{D{ zZ~24Vn`i`TtA?iB*r#b-D98Z@CT^?gdGT!x?2)vzA!%gX9Nck2j&Z&oy`^R%A}r#! za*b9!4ZM5*2r6Ue;$a(XVQ8(m{fS(}QjON6G>>QJE>3|~;q%E=K=64YPcpdx;3r=K zVu!zo-~v&OpL(8*?IQ1R_#b`px3*`5*xZO3EQ#l4&39&h`ItWurEgpB+(-q~st6A4 z-!U%F(tkeJ4Z%t~X%(HTmipNpgX|mi@|2mC=!{~tqL?3_`^FGeLIjdy)L)o;Y;80TxIwv>Di>YtNeV z6zNnH5w2Zs*s8an{>};NX!U#`AtB)}Y2BT!ozUZ4J-CM&FOcG`bEzaep~w^fACt}& zj)>1C0C1GP2$}n`j@l#`MiJMu%XE@M)Nk3gX4`Mr@aygV<`qEH8pit!5CE6UNv7G| zWnJ5IGJku9^e7_b!yKUX(jwbpN0wzBd&Q7H^+6lZxwCDUyKMN?7p`8_SD`7ygCCD= zi6w5h&fjryZ%W;_*Yo5rT;Ws$Bp=L5TE!35b5C48gb1@y*u z(SDbA1wm7VrYxh6)R#z4OEn*(O!|FyOlmY9oJusO3WFCa&*v+n13bBrQCnXtW6<}qv-Cb_qNm(0eoK!t-*Iw)H+$c4*-bARg(+i)K z5EvBK@R0Vth~ppF=eU@&pYw`7&#v&R^6_gsqill?0j?7^3s1uqKcGiWI=j7jMd|n? zy+sl5YRgL#9p0XPyJ&OPiyboi2J1yiOe_n4xDDVb;muxeU}t&kT$&v}{7!}KuccAL zUym+vW+FT{!mX41Z=Od8DU^fI$@J?^KPbVL(gfoJUv{452I`3f0`lcfBerm&g7I%M z!@api4t4t{+6|qXo^Ouh&UjKdvYlG?O@;&VQ!g`}HQ;5Frbq9u#+^yfxU9Q;_h*aJ zg^sIWyWy||+t~74#>Z*H0s}R3-1E7RmWmyA$RJ$1mp^LP1g}MP_2qoTVa?Wvk$so6 zl;9X20)sC&jj z2qW_H?5QUCz~I&g`z4pEuWuY{+yNGD4bZZ_TLWz~t=P}1WAuN*wjr4nregJ?SJMP- z0>X3h0m%3{WJo$b_|UY4-^&S7?Pq>4i1;y@mp^;!J>brrJ*(#5RXi^PuvL9J!t=F1 zbSLV_JB4N{owv0QBG2zsW>oqU~#b;1}p zKSZPXd>Pn^2y;woaUX5yzI~b2(dW@#C1C$8h6H_4Jy-2dD=ejPs2jC-JN#kO>D^#e z2+F?ar%#94RohF<_WMkmcZ!*4DUkhHS=7~4v86=(s6$v3P%@A#=F9!Z`~r$pY$5*p z)(=e3Qp}JrIKR;Oq==NO-guGUebgbfe`wyieUe_O-}v)q@1*@D-dmZ`6;Q5VutcD1^5esZ z0bq^jCDMtmLA~Rs%FudvvI@ZDQ;92d*uW$RsA1O-9=v37&XoG}+5%BjG@-{05vxBZ zB_-uXUzp9DuscPB7M^k$gso&$xcd~0j-k%KE|L>!1)H4T&BDMC_AnwMdd z(EM0y@Hc*mvxBj!KzQe-MVP(@;z?q0WtY5vGDM*4hahqSueY@}_j-p8Bs&?5vrrt# z#m8Bbii&h|=1-)&CB^5{$>{mR3i)d-aeLZffSxDmr<$RV5W7CUjoQ5@pT?IdhW9DX zK<1QV5*hm{(qaW2)N$utw_flzbs?32%Vmt~X6YljJD|X}2b?t7!UWDh>ldvO?iTn+ z$Y&U#zjQ8@zlt|uL3$ik_V?JI0fq}0Es=!d$`3au()BAY)b4;b2!p2|FWa03Am)Tb zz^Ocdea;1B*;TRKo)A!QSkxlg-NLx~n*TDO#}yXJDL4vi6Ak*L88hjeR(M#wFx(j~ zflHX=Bo|N3x*j@bQ&O&^AO(rqq^ed@aJImmDx*i;p|C=zjnJ^O{3>nc{k`G*q-iyY4CjZ@a#8)MYfuo+$WEkl zb3a_m9QZ5dlw6BD91ek4f5CZjrtHE>_TUkg++72jdUAlUmtTsf8gJJSOf(@6?%rNY z8v2X=v>{&_1Q?5?sa&1mnB zW|uJKbiRs>MtasfEQ39P>jY3o=r=s04UnKOtT>?T!>sKS=(~)rIR^aY|vq zip}lEcd7Cw9hSg}R}IDE`OdINp)lI5W;n1cxCVf3L~qQ|o`tcjM0VgiV{TQ6@F_%K_Z`2~IFe87%q)UinLV;hceIVVdjCbh>xt8Fu0NZ>o& zTq^I86L*}}ml;HbHhyUP6K7N;@TsWFpWOaJa5tG9%V^lW*%Y*k%PtouiAMMaOKE<3 z!^-Ko_T|B@<<{nlja5hzlWC9*Cg?ps;?D4E{WTJQP`Ux{|nRIgxC4utu{*)ze`x9q0H|{fn{hyuY zcj%d@CvE&UNbd1P(}CFu4Q7-Cn@%^?goW@gNSz&l*XTzQQldMCbr|RZZw?Wv6$<49A;mkyE&&&-)AFmUu8%eMUu1tktrw=?oDT|}*i9{#AZY>S~x;O@XCW+%nkus&j8K;vE zrrQOHpMts!NbSxh-V{~M<2F&fp_V&Bl zd}>2RD|+;Y@VU*!|BdKkaPF2zb-CcGIY>dVt-j{x_P~WQ83qN5XziUfRw(Xx4?bl4 zIcwcb>nJN@1(!}9bFb!t1Tmg&!0;&Ohd<@Si z?NM|`$}!$Q<^?>YS|P1e=rdDW5{WWCy}2n*hT|plp;Mw|53M^EH7JfSg@-)mP+)A` zF`YFRiQsy8iz4?JVbR!xJ*-mqZ9ukt#c(`sc6m^iA$^ z`$Ly9frF(A3nV1hdrV@Ta!!s|4UZR7m-SNVUH_pG{e(`<0^x!@u+aTgS7IsU^^Vb>XS|&FKYvFk3jzK1 z^4_f#Qa^W$n&d6AXo)H(4seG(^L8Z?u;$z321yAZnaz~^#Is#BEP%Dw$~3-d&9^sd zzFLy&{L*&W(l}Vo!n*n^>BVXroK~K^Z~XE_N^uvtvq_~d{Ud37pNqd(#w{_{_|u{K z!{3N$?L81LvA<>Y+ljmgX#KHl6kOa8Wk91M&Be~Nk>HrufVl1#7&M4ULUlTXZ<~on zicHVha@gH9yH$%7!4;tWl*itmF5tC&-sGj{?2w7y zq1&q_zU!<}rurU~DK@^YP|x>qJ^&L3iZBvj;M$yeeKuKcLvJ07*Q2Sy{|)s%lHpN8 zoBQKiF`~qX_i}WxO67JWuysbud}*s#oK}zr(`ERJdTY?mqxvBVW~EJIr-9e}LQ zXfj1%-Rbi{z~69y3%*?pL4n=_d3bo-`Gsid>E)Pp_o66~|8`wyvM;7-D|6=S0kuG1 zobaxhOc#;rpZaxRRj6d*VR;iU8byLi{gFP%9WB&usogG?-ewUlV8_ndnH4ugG}m9< z%C@T!ekT#7d{C0-f5a0L56>_8t!~F^oBo*EFFOXhp+|6%M<4dlwXoiqH0LU-Pj_Sw zZywFJ-?=Ql`gJz(tk7geN+n-M;wLbZd;aC9&@9q6wc%UZvk49mlL8qH5jk|I$d<1J zu~_ns@%P*>VpY+P1+3q+N?sMIFYn&PiHZ+JMG-yyOg%s#^2b!ppM!`s48(y9T zh6>bqoTLOI`R+lm1vb4_$a7YkKV?eME>PFE@<(MsswCGJ_W~e_9+6qL5L_Z5#fj7Hx3IWwfNEjieew4_cG(QIn~pfMCDMqqq}@*-?zTc7=Jk5%MOWUc)a=K z?15{(V(V#906;#%+R=OG@v$4$ED6+(`yvRX0dhI%tE~gW3k?m8#I_wo;HyA14L=;+ z8L-8|GY9SmWN<|SQZc{q@_~?9|4_>Xu<_0!q2@ z9awWhk%-C_KzHGr5$v`c0C^01^*+Iix2a!tNKS#uK{)om-Cec()uXwaevR6H>NP~X zeYd{UAUziU0%!4VBss0cude(y9Qp9p&wHpoBXSH^fjI->4kq?bH^t79_zR^b1^TJf z?~KsPPlF=j89kD#g*rzUw`?0CLGw>^vBUL3ubX`)KLB~nY{Fw>5z-7r z()n2U{eX(mU=X1x*HRT>Z9o-{jgG!QCHPP8&+f{Ui3q)Z@tXk+i@HV zt;~gx^VtC+FDGeChkN zWe!qTBO_L7zi=D1c-l{iEQRrC9xF2gDR5v2g2<@#cSS_3zwc>X3rre#6oc*JsRr)o z11D>;o=ZIvP8|^sM2HLEmM&vGiF63F_J|3+T|J>Yhf%y4m19BUs5uw{SO+0qDIJ3B zu?c~1g1TIO45+q)-H8nfKJc+>r54F%`{!sAU=C_^>NKEbl{GZ5vZ27CZU4O5n-m_v z?#j_6)^!6Fsl(`@tUr&Vl-R$`f*wk|mK8;xDerouho0_m&)idqhz{y0wKX2{4xnHy zf$yz#iS+KuVr!vZ?ch`14OPFzo)$ev$&6u2Zm{NhPWK>fm? z*h5#qGz((odfkM%8g>p!?j2iC=C6!M*6K%+oi(pWEl+Yqt6m?I)El>xi|qtq z_53GGtO0|rPm_p?h<#uTK2Iw zMZjZ)1J~2#WR(mf+#2AzA^LQGb%7yhWUt)GI|JhV2Bmv>I4@5^Nm8};3d_Z!NBKDj z&ir(;nZajkk+;iUFQ1!qbQ{4z5ABqg(8wC-+GsRsY8dDP%bJK0U%ueW|1n@Z&!asr ztU4`E1wVa@O@T8oGRy~7V(~9juLIFb?lIuUOPUjY)pA$h8#?URq_D-9-EnDy$;MLM zLfzs8ojCSoj|)G6FGf~-$?bON?mW&gT+Z&M-k6e$>hHhQGUGtPczJSrG^5Myok<&v z_ojZ3Tb?r#<4Gh|g81!s{%7|mkx(*o$vT^ygc!M3uqqa+q?RfGLQE#~9u8+q{lR$i^wFiCaB7_YxIC9+tJoiWg(xOwUvsFl zi_Y<4`sm4aa(6piahR9XM=N#Ng!LoHVp)<@CbbZ8a1mP<7LV z|1_?#IJP>cUkx{@X)7j@*0F!x(!RW+_)%LboT4_i?oX0ZjndFm-n{voBPzhBaCyiQ zvbzy(U!Yz+XU1UfA{3z*1RbQ(LXy^&QWJW7u$ais7{=C0ze?idUiwQA8^RsA?MWA> zbk4@JthJ(c>BY$a|Ay-LLkuPeOCo)6a~DkIl=n23<@cwc-@W7=&vyzRkrX1qf)IOk zvLA58;m*;y?(wt-qgc=s8Ew7?F}g;GEKpZkw8ffa^qF!ka3|{DYlP$_g5N+kw8v_# zYn);!r>?ocg-ioNl)HeJLIO=g#F|hb@B2i0zuIC@4`h||iq+uU3;nc-J>p5GR7rvZ z?g?D$dLt(Jx6kGEvFSXyq!!us7in3EGnTSWn4IlasW0?-?B8JeGW8Ia#l?Nh$5;0e)O#S6M zPFQ!dj2s^7X;nwEu?L>feBp8nK6_JvK$|lAI|>6)KM;iG?2+aE^qrcn(Gg(mEhh&0ezKY+598n*S-QvAErOR5qFlyLVpp zN7i?0iV;EQyAtMbz>{08B(=>`mQ-Y+IW;&t7Aoj))dX({wffu#g`li2mBPHB9-}U5M z|ARE{_hTuJ8Rn&Dh98~JrpGG#OP)I`pM$(JLPf{hH!PRM-wKf9VSv)-T*{iEf3=0Z z6zX^2tnfjazmvih28lGy(E68pqu-2~WQK;}bd)?stc}XpQSJSCxMwuA6Ecw-dshv> zWx8NA`29XOA|fAP*NQ51h1{!23@0(J-N_zMP^3UyGpYzTQB(WEu@H!IKX^ZfYS_j6jIP&S+@};kcdQ`PKmoc`xzy^f3olmiThd zS5cwZ=Dp&^4l3eR+Q8S{KX&A?zJ|C_FsND^7iZ$T*N6x5-3`HMhPS>{^<91l5UnSl z=>b}l{oaHi35Je4l`oGldjVKzUqfu}646g^CbMJAh~V9tqcHRQFY^E^%}z8X-EDr{ z%MS4xooSb(dMmc8U5)`9wLf=Gqfe`FP22Ipd!Y`Jg;HDuc5AjYDea+YF3;awcCrbb z+BA%dl!;T`zH>>MDGA9pm*_g(TPQ@5Ol6PYzB@O(zYR$@9-L}aU@5~@Ec$(8&c~%G zk%Iim%zzV;g0Tt4aWPTBqP%tME3_Ccq(Kzb+wXYvL@f>ckV6w!*vy%kPf6qVw_KOk z1@nxi<1S*{kyP}e6cYCgTZeLQw^AZ>m+vi`z6OWiPz>ra+jNJsm1iKPG^AG(pjYR$F_;)fRJpjHSMHoT4yAco0;6lp5SQ{&w7>J5 zGj+reW|RtRMN)JqgOH3SPb?E`x5qYeFM5rKz1oa@vosh-T_~3;mUflXa`MeLfI=}C zo~In!J_~Jt&dD%!+&gbXnW8-;p)8)Nys?FxsYFw}PJVKNGD!VesdS#=p?KCTnoV`? zM~fa)t=aKXg0?oXz#*D)rrZ10hlOqo3=H0O$wV^!O=Df^@15{)Wb~>s3(obJYOP8! zuSXus>yejDT|_TH!OsT)wllccpK6j-RB3X+9{$l32mV>-Er&vo*Ystp&|vEv179{A z5~^a~9S3Y3hlS=_6)?vIi9R|x`PF!*?WxAtm83LywLchJHoItRx5=W`NkfUT8Hv3< zf=a{XSqHk%zV~&V5y?6+55`jp&T5Xut~^*nxPP%AR(It?42DCA5&^&e%{uMfiRuAP z)c*0Ty`cl1`D*kzfW6-{kb5YCO`A?u6ZlE%iSoiga`A?s(2sOm|Q1DGmpx6YulP`H72(O9nxQno>W}mjr zx9LV4!hXJEe<)$Dv7ffYz{eD=-l2`N`(3$#P{3~oU28J#>CerS;$p|~XW+}bXUwq+ z=#q(O7MaGP)*LYS^XH~rv&H(qU3)rUIvYDE=#rg&uAOiT?Lp4n`woC_4;K>nIUL9* zCy*YDhWm~f?IEP!Y-5uK#-=BWoDHGSbKWKke}~IiXrTbaYOV^zdOrt;Z_0L>bq>AV zF7|eDHf{wVYZ%zjDTb&}AM}m1RJoj?`I~g`XBG_=aO}6I9$TePs?TBjp zo>Ad9)4}%M80JeYVL|Gw^i(NgA#)_TqyErjvVn=mHIm)x@~qrRn&CK9_e!J| zT>ygrt>h?(NwE!Nd;hcx6n+$|)^vR{`Uei_Iv);M3$;Tm;f6P5CZE1x!!zNk z6U6!hj^d{`47MIZ$Q3dnS40_SGXztC;t&38U4a#xSc+RE3NM?IkzC`WJaxwNQGJDW zF}UTaW*onS3vALw5rs}&j}qLglikYm&7DLvvL(lRmW9^T$Vgr$)0rGV^~evMjKCm-FxM|BTLKbb672@3-}(0S}Xxh#jd` z-m8O|Sex>eJ5th5pN6i{N8Lr1n;kO3Q=K4|L*2@pNtxC|vJowkq&hKOZ8p28q)b-W za`Ks9(=4xzFi+Y6)t2%T+174EtStK3Y-9q%9f3iDd=m~cxL7!71#AC+i5i&X-R!gC z0>L8+_GWW@s?m$QwYRylPY7a`-E64mxL7++lzWUmKBE#U4L{3{(1dqVWkb{-=j=l+ zfc`3fP=gV9VGK+V+rGC^>ai*14Muaf`9e+5D|(4qsU!pFu7%W@&GSnpF=h@YvkbWv z0uwm~h!A}u9?#Co%4s_qYRk>Jl~u@Jc2`)4C>6#FB`G&EK@9k^I=VhmDTTQZ*BPZT zbiTI4N6^sCoz<@MY;I%(*K4H}yy+g~>#VT1hci{cM_Yu4E*LNeVjn|`@Fr-`QJ)7?sbqVg>n zt6ief5G+I-b@j$9?ZN%<>%8o1WGwaC7nl0}mC)UKrE?_Yc6a0bFeiKOHE{B*^?$c%Y!wINy&F{D(p_0U5odKis$-Z@0NpG9GkWQ zy{&-A_pyFP+-9b@b*)=YcUsm9t*-Tv`S9puV}L*FCuoE#PZ|KpBv~R8kZ4t7xvEj* zu^A-{mmV1jBEDpO$qa?6vDgP8yes6LPF92bhExud8*;I|9llvK-@IO6L%Hhjen}1mAxl_|c^vxf2czLHEZnc| zT=)Y-CbJm`=tYBv7TBM%uUL5if1}FUeJ9ARS?g8ZRE9sFVPizH6%QOIO7vBK^#9IS z!8eh~$C@Sug$V~R;h#BGZ0|Qje;h7(xCiEozG8&PhG1!51AM2?20b)hIS>n5U2zYJ(>UA!z=URP47WcrDFeh!0VA~cw_ zVD*5A1Ro1=XXZYUoe~V0RJwDt(-;mfN?DWq(Y$P>CE!`TJ-JBm?fCp~u}pHGqJApN z<(&Jltx6+jsaD|If>s`m6eoUaa89!HjIwArE{B^4AA_Cvrl0^|Ur^EFys zI5o+=X390CYfWddy0Tca4*_qELcse0 zpqo|&djd!$a@>iefqpKzJK!l*1K3IY0(T>^$N;@tePoP%-b7nVSq2up0g%af7R)Pb zCc4)>{1EyxwwUA`Z}JY;C_v&+3_cKFk28_qdxK0QyE&`e` z1%WYn>EfXOuFr?$>85Y=`Y&t2>Z6vxV31DW@D%NED( zs(rJuX42Pw%2@2h@OLO^v+cZTx^n)d(iH?bl^p1BQ!70RViLnz2biNQ>m5c&rb-!g zNX0T&gX_Yy@}A@NIJG=}?le^SawHJ^P&N?t4S;%1j0Qk_0h(0MPe8ST5l}jF+06_v zx;x*p1AN3LTAVM8W=eisj&Pie6)6_)Ud?JI5FrEHk-Et2-Vp4`vCMCj%*;w~Qvf!O zV$%?C)|vubrcLH5hyj3uZH>#7i3@;JCU0tM(gkrQhGVrH3MP_SZP%ZQ731xKu4xO zvLKVxdirv{Ch(K!9I;3Y>=j4Zz;BOGI_0T;hnFC%T=;oS>o35E*8*hahHVow33LYj zVo6dkpasm)#UY9uG1z`*mZRcR5po%mdCQy%$7(8FSEzXr(%&A?j=9&SEm2IaY>Dr; zjmYnhW%@*>y*5}%UoXlhiajwY?A&$8Q$~%m5`JT0p z%7@R*z0!s#&R$Q=vwBa6S5}K348-^k_3U*S2S0RcC{hH%IZSSitUD{{;7Ug>jd}B% z_i%5-BQcfzL6H!2PDBbsGKu85Sm~FScJP;;MOs|m6qKFid70eGbrlxq+v?`4ZR+Ry zcr=@WH7&q*kk0*dDG$h&r7L%|z@sA&OO`Rs?y2o8{Yc&P)zDl}D&NF&xEqOHJcbL( zm&2RD(;l{^kj04hA{EJMAZJIBlr^fW9e<^wp+X9dHa3F13?={5;ngE~qmK=ZK!_=P zWqT-@ly`TYhi7X1aOL-An}B!$KS%O|;qg0?W6I96o0}Vfj!DK|LIf9pOck5TYE44> zqsmun+B=zO;63!kJO~osHXZZ{a)yQ`Ho5QMBR~nxve)@=HCVgN1~BgNIG7+e`b>Ow zz;>%)g64~zbrd6K>`b!@Bd-gmkqtbg_5L9Xc*L!vc_#( zel0i<9voIhDyMUm+33%&0pO>r@AE<(@HyRaC=7s@1pNJHK*Di8KoRlcCrAR4&V^;g zDf@TCJ>dA~3(|hZ*aY<2&KnS=k{CrN@QQ=X(#Xy?Qx;481{=A*U85XugUjW?Yex}u{Xf9z zbBJf-^%FL__!Otr5ZHi#LVZQz! zz$*D`Atq(ESLGiS*a{bwcZ$oG?0<6?ZC)FOQUl6O{{|N%^d_FT_I;cYQOU*MSM?r} z8};)ncY1|#*1!KED%2sOAli06P=@&bQO z5+}V@lK7uL{|`s(@1%Y5+(E-B{JFcxy`x{>Tm1p_)5dZ%RdwnxRt8jYy`diIr z+LTTt?yON&ez`9*$iVAl{Q|s-)c0Veh5t?}2pXA?TjHKAKKh7zdxcKybJlXMSX|bh zTEc!z^hYc*>zj3_r&EjictID9A(z>>MfV%$rNu^C%RAsc8EdcW@-$H;?B7c#1$j;6 zipPL?8Qg+YA$aU|Wy%Hu%Uz@#aqknNqKJw8PrOf`WkZwoKkZ|Ap^Gb}K-zGYJ{ z{}b1HwgeK>yxJrkUS5&6A6cq7(NjxaN|^=DYT)>4@0tLa2K;v&%S?e}U|87LehNhp zNdEl!)9!SQ&t#?q=QW)Wwimu2V72mWyW9Q%lwB~9@K_^FX3J=m!dfUUgcaR_9mgU7 z&+5tgRj~MXb$d*pI^Fw?Yz$U8?KET*2*r&7oQy)58k?B3bUyg$URGIIx$L1hF>~Y* z?Te*T%}5;e_rJj(5-wu~py(Vm2=vrauXr9*ux63l)qD8v;o*hcl8Fi7HmJ|;jIgW68`3gW@c zn$;qS18Q)$KlQlvXvD{S0BBSOi+($I1|r%|!>Q7X47z>qq}#N+{YAOC$T;RTkE@RD zmiw$@CofFd!B>eE%@JFUs-xAGtK8BLmHXnU1ta zR#sMz&PSYE@dFC^ywLnm0^1iz`Wix|5&$0SS!ceuO<8Pj1ioP!XMAk3=Ea#Rbhd5* z6wrJf)>ITqRWL=<9CR1eHEzxjjpwUKVPIh=0lf2LdKD}7po#OcqZs?D%WwCmG42!Q zrlB>BSDL`p$!faJ-HI1QcvZKiO8NEo$*b>+8zTWnEpX=tt-Zd{&wvl~Xe}R?O8s^x zn23=l7fcT-W^bF`w&sp7;IS z_v^09`N!a6BC$SPnMDe$*RCo9~p&-EspL%l$#e^ktLc7 zU)a~50Fj01hibp|t;oMM-?6f+e**5dfK*Xvo<*0?lZ=m#Uk5NSb7Ny3nmeH@MB+2d zwL2MSg(Vx(T2GO#SNJ~jC1(I;lQ!gzP3r4K9k zy!zlo6W3z*wt1mMC}DlRKN`4;rM<&VCQoEM)GMv|nToC8v8$u7~r$){6|aI}KAB*jZg zP!Bp$#A0ifdIj6ps|iH1%Ooy#WpTWA_>>Mtj4zFdZDE-y3w5Fc3$xe&+mwb&FTIVDsmi#h?n2-N&<2_{PMzBfk&61$^>6|Vpv$~Qpz z%dH98G}gXTwRq0rq7Nc*7bJVAC3?2a#;920&fo)l{Ajf5$mna5X=sJ(S*q_Z+}kq*`D)*!3l$)5+Iw$FC6! zB3x%T|9O-T&^#%Dn@gue#ATDPeDZ+sE}yFEkXv^fKw*cJ(}yvqK_XMaddxe<@AM?*P)>TVsDH7i2EJH zOxkLorb^5)$hWqSv*`;`XFu(&MpqznVI(n@xNkFfYfOL)5a-E`!(vY`m(6uoIE_96 z$KCXx4!APbv!uLs6yVfs4hCKmgL;nOTLC8DM0dh=tFPvpozac8=X4d{*8*RhMjJTz z9LU@O8wmHNdJFC+vD#5GOcqJj1>!>w+A-39@lO%`s z1t*B?ko@CGl$>e0>I;*1!Mi_k{zI#|Zzs|>dQ6lb{8TwG*5BO#FOiE=49|QMZ1EN^ z!wW!_@X@E(ZA|T4GLjq>ItGMZ%S9(m;^a{2tHr1`73;Y39;S}Ry#d$T#x2b!!s~5# zvu_&r1(Bc6TWlxHcMIRG|cQ*7x5N|g<7wCB^^tJX!m4?r74 zInWJ`Z$Bfrs{HuV_Pv*l$kP;@+@17uV}bHq5_@_x3#V!E6(4HfYn+%cToII>LLr5@c~NHl2IQ^U=+m zeJ5+N{=tav%(Fz3p!mygx`2fy{VpI5kdkq7e(9=xk-K7>F;yW`zW?}YMC=Za!0Z&^ zGdUiAn<&><_6++TP%s4WYL?>U7zu|WNr&^@w0G{^**G*KPF3P&PjX|J!%v?Ky$a>> zn-0K5M5voF{Ncd&KTVg9mi>fnIBpCa#n*c;_X6}aLDc?U{PB7Y5*4Y~4 z_?9ox^2>Q&pF;SenG`Q$& zqMC{LTw}7BQ&OlbZ#hsix5M9=ox0Dw(9&4S3onv{x(a+l(sev-$gP%+ zk@P&XwuU2N=O%7&aG~tP?R4#yL1@YFSh2OcvfXUO4b3`-DCM6U=!*|(ZkV=ytKD5% z8(wRH1zcbZolpETi*%Wk+W>a`Rv+eNJ!;kH+p^#Fy3oSjf={nqe&HjbUv}r8+9L%= zk`3X-R~XKbK|;x@csDuV$~YY6e5wlvPW*R@GR8u*QaF@;Gk>2&D!j|4!dreq%$0N8^ z;`x6;vS?!f@Fd^->?(avtCN#Q1A5yY#Z+$4N4Rh=-9?2>MJS@7B~P$Oif33)(ed7q zi+{mbN^rfTNe)i2rR$WT(4k|7N8xD$wD)%{brd*o6%KtCqy?c-1BJIB2r0y{!74WSlUn zNEx#1_OEQ4_xo}_J4^@)CkZTe?L18C59~%RiieDYu&p#t4{XyWP7P(x+#){QFhP7x zkuoOMki&5m>AC&a=06>|V` zlF`63!Pa&x(Kb$cPkIYVTCniS{PNir#{OsBFq*7?FGQqu6ln3zHs<;xBPSG|?dN^l z@>5(}!q17ucxtcjxZ4EW2#{iP$@%VwW!2Z9lC#PFfAi>IdY>MbXyn>&p^ZK?c~TP^_6Nh zc*pAj=|=I}G~34m(bYgdU5Q>r{UzE~5|(HXnQZ3>Ifl zc3ZCkQ$D$~Y6al_>X#X4KqvLkE_)3r=b$NO*j^)Q_?)6WL5R)y>2C!H*Ce~=>;~rS z-{*2X+BjVPwo~U1fF?Qtm2~ZwwOV-Na3Pb41tJvF&S>F(!@H~)ySz_X1Hq96_3q4VRvKL z1`}{9A9(iJxFp6e#Z2->X;au}Abv@oQYd)(<-0Np$v;osc7*5FS!_XJVaUgI^Doz; z%A+OqMKrJE;HbrGLnsPV&z`A^Ta;4vyMpaC%v?Cfd?vTjpFqs1~2TvH!hO-V_ z$O*;v5H$vgQnLH2%dCPUeikJGu~ten5YPOgr^4l-_s58Lcs61Ii90Jc{Wy<%3JF{? zCo@So4&+$wC58s31!5?)gD8T??R%04%YP7mTzG+vV(f`Dua#iSm`r~u7__vn zt5!r2;?7lI*m=8PahCG^b!@pj)>0@z$08PI)Vbo{D7!r&KCk?rhvtc((ykA27ICda zY~=b(Ht9L~ym)E)TzZ45QI|mX97C*7^{{pw+HZVstFihF>TDV;SCDng_XA}=k(%ug3PZ{vq?Gm> zYd?(L;233wM5!C{!ueW|6jIQSXy1wS=69d>Blegga~w+sfV{Z>mG#l3lR_f)(LS1z z$zm>oo5F*Uo@@=UA~`MzbBv*up>WJ(I@`CVy%TZHQX6B)0cU4)76=81XMPQBA&g&RXQw9c z-DUUW5xV`MEo)|NgSSWw;{FN}gU9O6g1Hs%M&2 zgn!@KZGAjB>KdGvWd`ryJ(5pCxccyg!7$&8!kb1eDx`f*>1e)-iIDz&s&;UBN%8$g zTJ37KPHquaSErV=CRu8>yej`#^;9f?a@531bw=iHpIk0Mvd~hS<5ys`My_~H;zOqw zVO284^s)q^x%F(~AKs!a=IM|3Pt@l}+=X4ycbL!A6;lSdXE!M8`6H!fBt z$W2|Nmu^B#{ogMMZ4QhMS^H9<^g*917t`xSE=$*7} z3c`G=UG=C;?sagO_TiiDKX;$?_+IWep8)Z9Hyl3Mb_BOyOWoX7{Z#cjuw!_tf3-Gb z?Mi-CHTeGdnbCkdyISci)v3*rtD76P)YA&v;X^XR8sHK3!9~j>_m(&*y^`t$z4Nvy z?@iVYpJAh#vS!InyE+l7T4Nn(*H%?p^U?#oSJ^x4=dOAF!|t=HYM@6pveOP==EtT+ TdQIDv*}%t0-;7kE=Scq#B_)Cg literal 0 HcmV?d00001 diff --git a/v0.20.3/img/workflows_learning_curve.png b/v0.20.3/img/workflows_learning_curve.png new file mode 100644 index 0000000000000000000000000000000000000000..6c17d905fa7747376178af71597fad0569920a0c GIT binary patch literal 18869 zcmdtKcUV-{`Y6272quUm@kB%fVq_#zRRp9P%@F|sBhsq@0YN|+iqt`g8iPm}dWR?= zz4vMe;Ls82%`z}_WCj?Adf&AN&$-|I=X;*}{QkN=Cb;%q>s@beYi%B1(NX95>hM<> zhVf`z`uQq`eUXo0yT9Md2~T9+#_fV1Ul7l0T-ytOeD~V?4&yIxUos|Q81HrTAIA%| z^qcVTtlPhh-1J;*-8`?m-oTu$yWPI!;&$uiuityza3$V!ah8!fC3Q;TdwVyx+ltcC z|9vmz;%X=TwJje%hJBA|{QT3kyYX|Qr0g4>=G*K2v<)LiMKz{kg3S4QuOp?q^6m>h zKhnlCbdks60C9a1pPKg|L4eV8Oejohp4}Xn(QTO}fH69LashPLJ zmxBIX{lj#R-YtTQi@*^DF@FSq&xD^NdJlX$+1bRFf8?Xm{)SDduwqaPbqYRIc zU1wtZu@Gf)kzHqpE6{~uZ3Qu$+3Lc6XY%}4vKUj(Lg2ZA7oB=}k+;5=(Z*@d)#(_p zYTqC;y!SdeCnv{Pnj;{K8pG4IKd~R1?|x`k`hV=ARb+wSVx3NYek$8^>1}d*dwa&{ z2P!2@!ELJRjlVxxLP~0~QQH3wXW&csO+OrYtNnh{T9z%{&(w*NZ865uH=3(A=UqB| z#@ELrB+Q1dB;&iBv@0zuW0aQFNIv2f?r2uqTACZJ^6b;9JSrd{z@mz(#c&DT{a%A! z%dA<9-KyPLQ@fJmGBHqK&FXrj8s1;C;9g|W@$}P6;ff%n zv`^YS6;?|W@uFy1oc<6omo2>YPFRQRJMU;&l~yN;Gq^2getuqr^7?AoLt190Ci#|= zlX=fLo6F_SmYXw%#KjusWLL_CiuHx6bXQ)scxF95zI|D}KDyZPNOd_SWAs8j(%`p$ z_^+3dl+?BvYNREG{h5|`4UfV^@_a^T3th%*Ge)aS1H%zJ1>HP|@#^Y5HO&cuHj7s) zs7ZB*L9LcsbmdQP1A(#qM9|Giu}UGb&q4B+C}w zBely(yqtm6OTjK}!kgW~6M~C()%~}bBsF2yEA_O>G;Umd0u(%HR^LXa`GtskH7kvU zCaq;8wP;z;$-bRTUtNopOhavVZN8m7oB^S^g%_i)v;@z|(qZRT>8@Q&lxpRJf@roi{|cyB4pi6N zni?D1ZU&`a9+R(QwQsx%*|-F}pqtjMm{xf&j%~}6g-P$uC)O25sK`rpdY{0EG)vv5 zjF%(9NgJMSq){U>xKJ9{o}zCV1u@M7rTI=u|cusp7Lrl_xRe8M&S{;`;jS-Wu{YM52u4+UqDf z@I{7e|B+X%7ORDhYwNoXVo@cfCiL;zVwKq&7u&4e;#MMHNe|}m3 z|1<|mWY#_8Kxt@h9ux`!t*u`GV=Divq?g=%k-jKuSMx1~C5M1o=1wgzxGDsWeSu-N zKh^Jg`$udg^~NKluU2yCb<0p@;0Omu<7-$i=9ooD^7D;fk^BaDJYWt?GrTc>iPGdj zXB~h6mS2UhF&8Jaa#JpZ0DEBPZ_!GhNX-S4JNwa`bu?#a=o=)b0Y9ytPbts3e`JpN zEH-u`XBt(vF`>LVU~Oz{9MdGpF|a4kT#{8o($R5#LVO`sWC1Rv>ww;uMu9KC-(J?; zc3WaBZgebC!5h8*?p$k*V9d{1okxUe>XzRl*HUF|cyX`V$9ud~Cl{Y#=aH`hPIwQd zP~W;f`38JNm4ld4e(i9&>Pm+4(mw}Sl%x%cmOrg6Mn=JX=1pU#yLo!<>`iH&0LS7h znifSS)sMdhob~m1jYyv!k)q40CnSwcDwlH>DrkvzzO{?B>#p0IOkJOwd_0dD%&Yj$ z4cE}mMQpE@OvInPGt*+3xU2J4fXBybPWuqAySA3o0~r~d*`i6lr-ipZK4NrT;5A?M zX?`l$3Qk+vKe2JY1dYgHmf+(&+OfSNZR(ZjOuz3}q`=iGBakepoX0U_J(e`6r?PwY zm@w;7?e=<|n&+4o?M7W0X?yF4^4*W`r9T8>3`ZyTZ*$n>-pJ&b-CQKqH4JkoPDc6> zqz9sWhj5jVikaE_ZhU)lT$jOWvHb|euS&!+%7cBrXY2@ak>v zcnNUQbU|k;b8-&4`~Vu9rh|_JPq51LD38y)yqfAc~lyyUC%L~rNN@ggx+P14CACAns5tj#ZE}rj%fN1;z1(tfXCES zQlTnlBcwaq5T3b(qG#*(8VKBkroCWy8HjDy*Q2y*^X}jrK!B8fbt<9uggAA2L&9;6 z3vikHy`=Yn@AYV9uMR0hwH^D}F@os_YLPCADfJy-hXdb?_lw9&D!zgMGT?EJL1PC2S;*19SxY=!?0Ze|X$q5#37QWLTW@9+ck;Cu9d zxPFiNG4T*`As*p?{?ikwXStAV-$5HV0IDg?P5dFaH0%JCxfdnJ2)fy#mI^Af5fGHj zk6Ll$KeHfieb3ZYvqga_Uj~Hc@#dWh!Sl#%M0Gw9Js27KhK;8>!ZRl)QknkgEUgp) ze%piyE(iaXUPhLVk6zJ)2qFc%8@Q9FXjW)Me)QA@MJM1E!l1lHi;!ogAw)lZ9+-t5 zU2YtjxsQ7~K|tr^ccAi?M~pOUgI%|__MuS9PYbx2I3gW#w8+n+M%Li2i@mKY3OKiO(BxhapE6_@^94wi%>)q= z5b%)`o3if{2ayb&M|9M9mpuOmXL>KfOaDV!E*2RV_cG9lbZ7bZZAZU2xI!uMt3HkW$|%{Zt@aWoL5LU1&Tfj!!) zS1(c0LqcTO(LWWfX#>=(bqSVgGPv0>kb>GGv-7k@4tfGeYHMSxMvhckA{UMCoYrR$ zB#$Cx6gcOTdOLxj9S;O|ok2=6B_7YIbGCG~i$xY+f}hI+x@~8AA88%b#6wP$2wI2y zm*-oYbFa|^XXF$<3>R5I;dwG0e*&&?+w%v7s+)WuvX8eA=(5tvKXdNT2b*#A9YoU> zz7wY;Hd1?VZYBmIN+qSktJiMh&hcYB%$fWf37PI1z%h7;cd1dIODdUj3;0FqYAEF zemvf@ZhaJ$K8y=$%NJPxh^pS9EG8)skEiP-RDN>w^Yc60y34AgW^!Z49v0;y1dV7N z39Trb@?B*5yNodsnN(uZx|#o6v%kdlM(1{XZ*Q-i-*%$Uf>#p_U`sQ{N!w>4SK}Wx zyvrTs4$v~aOn+aqPN(F#P*>WC+adgkgV_9AFP<(T#>X}bW8;dsBwd14&8J4`j=%pt zKen7EPtaJXoXs8aU!P~a=c_}>GShfm&125!sgR2I64AUrW7hkyR@HOy&7fUYo*Qo& zatu~akg@TgCuQ-j?e5}0n(R5BwsjdFv17lXdNgEfDr7Qmdo3?a#BzP8pB|CtOOPh9 zT9X*->&(T@mw{Z;TSN1A_tbBIgJCr{H!GAm;XyuHvtEHxlyiP@Y^S4OFho?$(PZHC!XKe{{d&7G~%2~#iaL#^#~wu1&Qd%N8sxJ z6Ftz3ltn?2^+-HdVy|d?KsFSX!m}DOnvhX)42%FfiNaA7s7I~;an`Ca%1G9b!2x3wJ0l~CH zFiQYt@H~})d%!7F*;IHePm)vL;~-^l5Ga1cX$g6Q3&H|wQPlEr%tAIDpo%c{fv_ed zND#%N?-Ys`6=kI+r*-K3RS=YtqY#>7t-J)TNZjN3m7JN~gVje!NQIC1-F5wQy>nb~ zwpY35u5w3@DJ3JAx?DlYyGk})J^qL(!kyaY{TR^q2J~}~bJ%8;%?q-OhdY z`*^1IP}1@3xgk{-^Ixq8u4*LK$yeO4q;RhuFPoFBxG~O}9i#49Ki+>q|LSj0*_Mj{ zdIopr9Vjk-QY^JPqZ^CbrpDu%ePv<6IW~Td@hkH-e&T~?Nj0AweSEi+C^79C`QEM! zI@84B(tau3rh~lKDEB7FfvuEeO^ae_g)5T2w9cro-TJKDPDtPG)Vj;DCI|0qeV|ZQ z;_Sk#$HzKI$}#aqq{&TZleWhEmHUfdnepB4=kBm%Nb+;&#)*NlyRnl>IP<;rD2Cf1 zs5JUPTwEMlNIZi@*Id_MQda|`LQbd4CkmE7oDHiddRxcq(a75n{nsn*0K_knU|3{0 zaJ|&?=C_6Wr9|Z=3G$K=BgJP~n~9BP8=Kle1fR&-b;g55U)s97r1sce;>^a`NeW|j zP3OsB-CO|*h1mJz+HjOY7-^i9wE3xrvc2{fxj!3azJ8F@)W<7ciKwpDv`{TCy<1eF zqVU1-+e+6{&IziXN6%>g&@;^_n4NCFlQ^WAI27-zTA<3>)H5CSa**-zp8cCfS#e#w zpUo#d_jTOdA*FIN*B6=)V2)%Stes5~eUwRmvS|Kk^MU!V5Q#g2wryk@R$52C#sVzB zktAD39@OM?Km9^_BP?OK=8RAX(Rp3z)a-N)<>)HkU%kO0mj2Djv1TYDf)%{YjJSGL zQm2DY>6$jvf-6$g&MH_=s*J`BhGZ1I0&zQ0>jWXNWj}km)l*sb(?iiKV&Ju8Aw))- zYU`tN4EzN)*`KWQaJB>fh0Gkix`DhK`lof6piz+pxV9pc?-8>yAm_XY#R>95*rb6q zO4%WE2~dzbOXBRidkZ2s?q2Fg9A?NlcU&h&P;FB$0p6j0J zJ`edCw5d7yAN(Z_y07t@^Gaax?5TZkH}<}J#SbE?!SQhPO0gNgdfH6>{9dff4m9dy zf9%lSqu9A|w2_&k!!bx}{7{3D=yc^fG*H0@?Vq`k%Ohq7ZT?GdrukA8ruXJz~RD|j$xeUZ(l`HG6L4Ja| z20LRO#O^6bBw=N+3?dEkJ8W`jEY-&AD8KWeqyK@M3i+LlJ+Sti3(hyk85`<832^1$ zEdwg#UuZrYL-^vmGI6Ll2Ys~X(JbUf11ju5g303ukp+GPnLQ&ACl2}l`Vq$&a+m;R zbpTSIbHeN=nKtp%aEKxS7CSqM8lwDg6B0R$n3tjXEvmtmBM{GO7U$|+IRVAbODXO+ z#?m+>B{x;8;P`vM8ZE>c=0QG9u+;`0Tr3D$PS+a`#D4^<`0r#Kk|iuXCw|6#=^$W` zh%hj)pRut)d_Yjoi33-r5QR8)@Qxf>_Vq4onT%lE7X;pi?avkY4WPA!l-Afy)G97iJ%v=JAWG-bfi0%U{&sfK(n->iD+`7+ zefcqBLlZa%-NQ&lBQo}64?9;$LR-iVQn1)(>LHU2&})KJ7F+%qOb-=LYd3Djs%>;` z&2`fMFs%MHwhXlLd4?9*rudZ3RokjVP}TP;*{3oF>>+liKC;uN{vZW zi~7(UVoCS0>xidh=%q_-nOHKnBX(h(9@l@njVrB1%+TIOf)Kwsg*u;36{hkBh97e@YJIVeWTNF0`R;bPBXif=rn|Px;nlMz29sRk@17Rr zgx>U#x2N&umr&v$#Eqy;22g40QX{KTSX~`&x!G&kj+(k!{!HizDtIqXeeUB)&MTNt zTmB1UG{{tqbFNg}l9+FAX?dR=wae_@e-clQS&C>QsG4i!b!Gl9XEs$~udX9wh`hN`UDgO@Zk_j9- z3LRNLnTP=QWk2eF0=6K) z1Q^_$p}0e@3p@!v!Tq4q@+!<}g2CaGGrUPQ?8ngf~lbcr*5EG=)t`V`7IJT%AX`jlL zI?ia#Ks?r1qTR)1^>MbLls7rzxKcjOIWEL}O_1a~0G-;y1|X$S5qc|R{%?4qh(Gx$ z;#Npl4`?-XZXil=VrGF-JfNaZ@VQx@OmYf}lk6Q6FTjybB`%J|zxTdl zXKZ|*ClZG#gYCw7P&VGgb;ogrf;Zwu@i-*^;R&1dcC9;H^$5Rxlp>$lV z#~q_KBGfyOVP}OqhvSGxHQoi(v__la>&Whckb)iHblE!9f?@2t03|4To1^HR&~=T{ zg%Z64v}HoqXDXr8!lr(^hvp@)=e?oe2_bvl$6FD=RA-aI){}54{@c)bN`lyRf&$oF&MDrA%U*c>VgT#v;%WY{lm4aNf-N z=UD5XJFDn!u4{c0JNIbE3k7;8TlvlCCABp0s>L&_X)1=Bp8wwis>S8kQa8^YSs1E; zE^*?ZSx4=IIC@fgv%;PC({Rk{-N`tdtTOp7rlzpabx>`5DZqVhpg?(T>KUJ>rQZjE z@mC?+pF(K7TW@*k2h^4hL~SUkhX~bt@;a!LpSvvmRRHT$#i@jHVAFmsuR3nO*IhIMPbH^+Bj-)p=EH9#{Jo56b|%Wy(yz0_ol zd`=~bM2gQcv`U*O(?q;Qsic8FQrQ0XLwD1ye(+rAWF(;D5GqViXDtP92`ELZt8p@Du+|}lyi8}n(aMOmiH|D0PC;BYdIBypg zz}C>yprLHL#_>wcK(WtlXDV(fhmneh|Kx;h1XoSHSTTM)>pFon?u2&26Wo)qM}e8t z3}`a6qBv-Xlr#Oz3e^GCU!M4AUnZp&ku(ZTBrM@Ikc8JF7Z4eGQcv0G96t`2Pkk?* zaj-`|QRImSJEkg*&(=J#Ll{ai-E^Q!3WC5n)$^ti8OI~EBMlFh=m>r)K27JM0A4mK z;e4{Y60xrzrx(utFt?|;55nK1L#GqifCKJGW=BZenW7j6TM<5pq|c#<`2`O5CzT4k z!V*Au5o0q&G#=s{zmLx*`g8T!k0R$`j`D;A8Sq!QTSwtak=Dfukj4F?D2^_)jEm0bW@OYrGM0=J9Tuy|kEa3$&{kob`jjqQ&2Uf@L^RH3(VftPGx-gk8dai1 zLNNo+3eaH|EC!`Hj0b*uR9{a$Pm>xP&`BJMMJG4qzaSZCXvPbmu2I(VQ~!e>gw-;*7Qo>v4&=+Lc81CHBQC5}IHM?ugDg~SBe z7kELk3+$m@o~=rTs7M}Q6rL>-Fx z@e<)S)UP3}Gr&iiQ`VqoP|K8^X$~j_-q}=&z0BCRKyQ92a4|@ z^x16A$p}Jmqc|QRX995pDUS(?m5NSR2n{PX%B65%myFJol6zt+Z5-p3D-=*sdAkOG z1X-#go=*26uc8CNQy=XkjW}J5O3LnsXtN;{*cF|uP{5wWku^WN+p+l6uscB7#35xi zl54bfB9AGWLW7uIjNDKOdd~pIfGa0^#HoJ@RV2S6rg>@4BPQW=b_hT$MXKt!1A=lJ zPkGo7kdFHBP3gQTB$CJ#=UpU}2vL<>stD5U+yP z@o~EEP%eOuS?ab{@;b}ldX|KQ1hxL&Zr14d=I9X>Z#R>&-B@{cnkMc6ieJCk)dyFh zl59^N648MBQd>W$trw}qsSA18>`c@VB6;^^!MTt3WvAahp0Cyo1FcfTm<1 zq{S$=$~gn6hz32fteQZAn`Ch!tgO1VEG_@V|KnbW6c08P1Ez!*=9*9x3vayT3KJo1 zG?U^j*Gnv0n^ji}Xd7cH%n`kdm%(j2)&t8=56~5r3f5#?(h3bOiTE^eEEWyib5AAx zg0=B>-pXy}<{Vb=auyFvqntkh2Nuis|F@#{O@^KVaq_Zr#RQxhBtgwen~Za*rPe*! zg#PW@mw`P-Ut#CSl)dlHLY$6RZ-Hy-vIMeh^^1(bK(g{rb_YtSr`) zx?1de()<^gt<7y!6P|wPMh29|Xx4jo2U&za67se`taccCTX}}2D|(3CQZcJ~U@VPc zR`L*$bUwaYyW1p=;3I{RhEh$|ED3jB9!Pt+`8sOwBodu9cX4ZRL!TeTjbgYPfh}Lu z8%FsTIw9^72y8))9@o*DZgv>rMlfB@L?E{zhg56q5blwOP_xAQ+2`Kn5NhdoHy{PX z)p!gP5HAt?P!J!3zS2q#by^!Y0TktGPJs!ajA96dEPx|wh_c|~AcRv>vX};bf-4D! z@>tDvWYkJH9%g%u@WiwD4}4x?<8zZs>)wI>gI4rv>gbO(CsM?2jQBSH-td&I;?}@u()FYTm zNIQrajWY;uB2*!UEaJX{J%Rh!ZyyTGk?2v3uaSA7DcAhDXvxA6gZ2R1Mt2@fa2TnW`^^N0zkPM=D6iApp)M6$>1(xn_PU3d_Ls2%5pGxYE^ zd@AZ_Or@qt;r`%T1Xc^uHJmOSO6pHLU7fGk@61orQP?%WVc^je_kB@#*=_$BnwP5# zyHv3n!_9>UXt9A5uYLqYJPQYC=j@P%*N0kxC?Ud=%t_SPUVi;R&T&}+bJgIP@!4w>h-3^?R7_x1Vjk6zvTRm)#zZVi=>J}>YrDju>J z`0*;?+#iQ?hK`SJd+4tec6(&~_*d1n(Q79jj1^!1t}0IMTxq`3gI7|&8@#%QeOdeG zp92HdA6m`Ei%8W(#&^SQZ9Yv5R_L|bkTs(NWEo^1U4&4XF9BLr|A*|GuqRJM^=}*m`$_;>;K_jb>uj?8@gWsx$w(nB zG{{a#?0bC!EM^4-Nbx=^Pe+B*V`Fz9TwTXE;8_V5p@g1d!sHil#EBUd*h(EegP1o4 z+G5pcqU85w3PR`&N_w)5%!A4Iq@q9!Z;*_#?E^=pATqu{nW0hlLvJ&{?+QAK$p4GS zJ?hDmDpU@1AV~ifz@e_P|2?Dgv>Z8x92{}EH+rYvn?oJsfYKF%;-8S&I-umypg1Mb z<{!j?omVdx$a;E|f$&zCJ%dsaG^6je5+rGFPzxj5UWUW*A3wA}3x5j>L=@g9AmI3e zGYIxfUePFARj^NgxzJbW%&gDM6g&2Ta_<2A?VY0!+@&u`64Yuloc`1ttsXfoAlQ^W2U>phW@wngQGiwEPiMWM{x z1BFG-QjuTJ?q zv5GXcX@~G@`VrfnFIskwXVBH9&oGj&?&AI5NE zjS2~a>XbyX%Mts@R|oX>ckt?2)ApMNld~5!0y_vk%x3-pO z;AXOKRFI%l7CARE^eVJKjKmtX6}nr{(z(5uk-eblzdBDys!RzLu1b4eaLF3OjF7*{ zfA8m}M@b@;wIqfbgnRc}(No4QqAk*P_dXo6xuEOlmoHkveg1+9fjROf)W=!VLeXU6 zWmK%!+eX{1DJ8u@#Xm=6{pzwkdHNhjFpLj*bc5>gjdSY$jNO9sLQVH8{GBMI>XKvI z?>2)s8hEodC_(xQ!#!;-D#L9~0$RXtGKujNHbu14yX={8`OoMGuxu*!;?~<{Mf#Am z$D}mruVaQx!uA5iR!-$i3~A++jnI~bjJ2K?bN>^#htz;+XPJjom58 z%#4q|JQjUv>`;=0&b8_JOMT49>{$)Ys<8bY* zJ~cLex{6^Ee_qwhHIFc>%A`K)Ppn-WxEsOu)PS`bN@YFEsb6`O)xOcMLZL287Wa9S z{n{67*ETY5?*?m26WkU0DvV2qLc6CWkERR96XxL=jNM@D@|BOc8O54A4R_wC# zW=|N8EBUo8ug&B(a=S=mXJ)$k{@~>&p>W1zqHBtO_8Dc?I)_iw(0c1x{F3stgq;v^ z;lrfbnj%@>Jg47{WLN0mJ?&~5&{!tItcUyfwc&zF9`T;rOCOqv&}$-Pfr{F zk-~x9{-)0@ifvbkwSIpYPgFHeb`E9I=R>Ka+ShFbkGV-5iBAobO-3WKr!@^cBeEBC z`k2Ru4MZI)4;zoFc(%>tzE;ewDpBol|=CZZzK`n0dyF3~fo)NgWttkvHVzfuG>_4R|+ zN@eOchwBpHV;)-E_=fG;`nv8gvrVQG^&eGctyQ;KowR<^WN+!5;29}4o7Q|vKjMk3 z={x&dnZ=Rqt=s>u3^jNYn`uI7rmZO%_nY7F(~U@35Sy*a%AUO``6#U{Ps!SJd)H=> zuYS89Ni@A~kx@r`rbiG>Cx?X*($gUH5k!xhw(9i^>*~1Fm0Oc3jA{=9cb#p8s;qrF z(vK!3A9W0r<)*KA-=x{(XJmdC(VRx^wvDln=1=gS2NkkzC%OnEIMj5Q94%O3$gSHS z*NZ8uiRob`d2LkBhv_@6+7i7@i!@_3V)bK2$GlZt6sl_4{LN%tw%=55YNbBc)IB!6 zaLHRhzcRhyw&%0hpjW{$)@f0}(@rwYIxdU1ifywar#nqss%eZqV$&*hq`5tt>3)qx zjilJ;wdx6O3Hyvzy7P_Hy^zWGXL^$4dDS*<3;E0y6us~D)m2{lc?>SUwX3=`@5`27 z589Qiill0kL>-+M`iH+yl;7gFPE-BfTW@3Sb<}f+%1v=OS|`+-!a@sdY^qyA3yKp% z4Eg!BEQEC}RE@P4UE1#}j@O-KdY0M-MZAtpI3wr2dTKPdukHCt4dr1XnJ0@Z&>xk( z7&GX5Tea-^+{fk1MKOc7A2m-(1t}{>`g+A;bJQiSZ{yE37VuB-mSQwdFI~55^VTaZ*=htrE8SzdU$( zbX(Y`dspA$=Gfu{ex5#H9dhIL?T`1H_wUOK2dDl{lrH;;Zm9oX`9^1gzRll%z4eJbQWkIq|5+-e$@c9fC?N=eqYd0)<0HQzqAz4^d4EjGgQw;R7mXx0x^dl3(^ z`b;3OpVOWjF`*}Io${KoiWts2m~V7{zbS21JT{(cX&4_q@ZgA|e8 z(?FkMhH=|A_g&obCPh3>Gd7Z7e+6oEQt6QZKfmtQJ6VpP2dKh_d6Sz0WbI8#Zq2TP z+~}AH!wz0PK~Q=qFj$follg6d(INH@$sV^Py;w#N=Bw2IjzA?e%zP8#A~wBv*<+F6 z{F}r+$U8wPuJ|PohVvvB0vqo`IS)M9Q)XQrDoVOG(e>i@M$;Yi77jEJ^do%zk^=tw zXl9E%oGUgsp=u<7u4@6TeJez~bh~+mZ@ig&uWD3HU@v{);p{}+7Rsgv!o~fl9ZvN&mxiCwq#N2^ zzZ1h=_)E^BB&(~JP&LXG)VhO;b8{bA-7?43^|geA6e^Qo)5?T~VDP`=zWVj=UqjIY zSGxbIx}HycSVEDC`o@)?4NYV2kTW%JRsW^|>~Qn*Gf>B&4C?fST?eC3^9REs|Cu+8 z+P5N7wfdEj*$zv-O$a6@ja@Aq_RBB-nN5O?$3A&_$fK|tm*@wLjZk!ha0m=NQ_af5 z6w7%T?w}yHaI|clJSl=M;=oM;TZDuX>vF0K>B_`F`R|R5|9uwv*p~i^gCi)2txQ@o z^NyO%LYf{`ya)Nv=@g(g(vcIqk>{tM7HfNbX_8F>W&V&9q*z{bxCG>aDp@qo$uPpK zH)?Lx`8P9kJOWa64Zcc$EJ>&u;R*^u?Gfmb6c8Pcb%uyY3(0Z^akD8X{WMiVP!Xgo z)BulA5es)>4`x+r`v0m&S@5&9`ut1|>Wqz1NByTy9vj6c;Zi7W7kwxYlS!@{leW6! z&t59mpDR}3eu3h=c=6R9D6BE;oGGBon%&et4kX+`&3xFwK&ed5)lHdICA`-t9u;&L zR@}SZCkjPdRkgrT8mbThUwusqY_X$-XLsg}cH|JoeU1$hpN}3(d5W_nd0v%XO4d(U za{etIoks&Sa~C^`uCvtOysA+=d>;Uh-u(Ua-NnR>>k+~U-xi!@E7F_4b$Q|3p`XcZ z6wl_Q!@JB{GOvYxp<~{<)g5#<1H)bf3z}*PDO|bX1}ZqorpjsVh%@yc8+w`A*ZI+G z`IRg!q)_Lx!bW}e{AU(?CjdOJV#@02|8Njp!@;mC;z|#-_QzjJ^s4~^q}T))sBV;! zOGbg^;Dqo`SFslnxfdyQ17?M-e)-X;-+*ByMF31!IFml}LDW@Sy?pB+{Ukzwuk$$}Ji6pLsAsZni(%tS*fz=y;89bhVgQv)f3ezZ3mw zFQGqAvclQDFjH&9WPwdcq~hV67=uM4J-VLVoTGuhg-U3(Z+44T&-K{}YyCvUfwl|M zqRpa{PyI|uI&mfLo3Z4a-4rWbik0W-frMp`Kg?1Stl+BD-gWeQ0o>Vix9U6#zr}y4 zbYJ)eLpyyjD^rY1-Sn5))Mu1dmup_r_7N)Tyt-==QXr;o*Vk-1%&V=Q zAG#!V%sR(Uk5&=o>v;^){XP#uR#ljO0%5l~C9~tQbmz9OzTxSv!OAyxtR}q-UyJ_H z4t-{D3g6wxPnXRSt0+EuO1seRhIp=I_fu0r+y z{xMb7(IHuUaAe4JFewWP#Ms#VCI={wDHT()Ol5^r#t8>&bhPyq6k_7zZ#5P!!baM% zc4h4(v+UtMCM|>HH%JP{8!_Nb3$9Muo_Csy8?CfTs>{p-$C^H=o|8eP+K30VkEYZ{v(HX#eel+Bsbp%nGINBOKJT;C$VXhCFQYvaaDKJ(CB2A- zkpHK5BX^nwS*To3Nl8&=QN!jBDc1i%RG`hl*q7C6*-R`Pp16wcLZa_= zyrxnfpWS9rbQ;WCpcJ@IF!Ya>@SFX>amV6_*+ zc$l>OOn4&3E|FsHIT|5S!RSz8)GYc*=T*}+OK%OH7Ra&inuyrP%PTz{&0&zq-Yv?f zW2*tK_xR45j<@jDeteL6M5_J|j|$q{{BRjf7d}+)u|GqfE9wLk@$DGsvWuM4PH;Bu$&LW1Dk08qh#IgKt|c4P<4%w_p*GK z-=s_urN4HyqJnug`>5zb5`5rRo!)+o z)2Jf8h$E7hb}L}wgXUQ~%OyO<1xz?KT0mPJGDbQ(E4t!{`&6ui9c%vYhs7_`)i$R` zd>QRt8%xc^R>?A(-c0?9`O;zfa*uAAk39r%lqnQb9P?@-JN;LSSR*vmsJc%5WmoE7^boqwMTsrl1RU&x(rY<-M z31a8`qR~O<@i;lB*kD5q`hsVjl7{Qs`g6)w2+M$m7uA0{r=(^yvSC!QuZQP@AMv XUYU_xPRw5rzcv1?^KcS9jGF5B$ac7YcEhxbAuRk~_l4}8EQAl4 zv$CO3r%_6`ZfJYN&kbS=23>t7H+p1bot&!H9dY>-^<&GU_Yb{$$8EKXY3K3O2a+rb zzlAJvDmzNmO}8Zk>KU}oc1`ETNW67bvv_TpR^Xv$u$ZmY-!hZYA-Vg;@8Ua$ z<)qrOD<;xwKc9JaWuhbYaCv_$U~^j>GU}_`c3oz%4VBQaAtE<9vqG85*?&L5#L6mT z?l8)Ob0~S6L>R@wucd2xvF(3Mb0AzAPKzSiK6-SPS$gH}d0t)vuf)M}cMDU~_hvnd zi;KkNaYDYy=f69=DiM)epK^MRg=QI+Ucpcn1To>M`uXN+GOM3LQgRnNukHHlJ{*t3 z+{;U1(R)%-E3<1khUk6%_PucFb~bU3-zG9!>%>W)MT{|SEQCH4qCQPVWRmmuTyEfX zrVP4I3e$fHJ58rVMM|q{8G0|(D&gYntHfu!vn5mJpC5GG1IC}aWKe9UhKqM@5$Dlp zH_X(_Rma)%6-Wt-3NIn_Wb=F#(~H zz8>EZaWvd%I+JNKn z?C&3n+*quZRf!a6*cjfTrw(=eg*dKyAC)}o{PlpCse{8Mr7NPM;mvVk#Vh#j-A_C` z>XO&zF<99k<>a}0D2ZDUs9!Io4S|g%|d|9S1$OM>~Q!#bNi}6U2%jo0G`%lV7-RaK1^Ago z7P>PJ^GM@->cL29fL5)~;3M}dKb%)8(tiOWQlurpeJCbCsO*BU&$Np3=UuF0JGj>5 z*nCif~*f;Vl}m^kP@oS@;m9Hv(3@3*Ul zE%Siv(T}Ib4x&(%Gyl_l^1RoVH=Br?syNftxnBKJCo@i;mA2F&gPQF>1CKxZ8Y5{P zd>lkl>(}lq{jlowxax+5!SXQf;$dG*PT5RGU~n*}XG@6hO7dp$*6Q%`gjjWHI)&46 zrVADQY36I8&0zp5Nj#~{ob_WNT=u=>JuJd%!&$v`J}rOmVsdJ}+A|g`xlj*1r{A`C zTq}{<8Jf7*o6l?SNicBgP!k)E$n`dJp0s8Rc)N?`CJJR4+Q21?7p&g0f8a*Dy+{*| zDO{=L^&a|P{ExdqOG7$Bta4gyJ;j)sQZXsL_=M^W44`&TIAfjO!W9k;wDSIGtxEn> zC`NPVj-nG_r;-#X*K)Clf!G@y?;OBvT61fEad|xdiO3; znD%=XF}rlSefM10gEL(Rt-G@ta_KX<3`gGl%(Z!=irSTHK3wIqK3h$n^rn}KB*?>RIEh!XZq*7I^aYPOleed>B!5ISFU@kRd2}te#$I*Racrd z@#bDRtQa(J`jWKw-3qJM$XF9vKT4A#A4wnW7 zjh)ZRZ)tqSEOWoHGP`Db(na8-O5R7#0Q0&)m0Y&?SpU0b&StQ>5_mPhk#i>7T8~92 z^N`<}Bc6*qr6ruvd&Lgl6%DXCvlHdVHmPDv)zr-~jEUgan=li` z=;xF#l!2fYC3F7krB7Ik^J!^~oWR>H!vfNy{a8Yhd5#Zr40u$7UFmqFq5Gh#BIjaXA%Ecm!9530)&Klyf*bq!`B`r@ z7E7hZ#>MZB^}32OFYvmjhO^-qb{n$>iLZ$xI==of=34O_l`tXPF!%KBsEkE(>{aot zO;WzyU|C>b;Mi)vJyA!`D59;c&1RtFvYxSzYtugpv*?YoZ>ZPwTx z`D|btDk+AuKVRHhhJ**$Sxx_<=rlAX{(rl;HVy*d$!j+CLzNg(uzSV1MEXh^X|=ez z@(E+yO?}dpOl+hdP>ouV4IQ`HW4v}pB^P_8JI5$4Gq*|tV5~ZeFgji*Q zZ{?$s(r0c9`ls^d0}$;^2Kht!CRjMQcvT63I?*6M5CUC{5Gca&0{`@S0WS$gOUv%? z+0pw5@nCM~j9q%uDz+Y-5L%L+0343ZL3c8|c%tU%4$o;7<5<0FYL{Ad5g4GODSJ%Gb77mQ_~nk*i@#7q@-YbR*2*KUa}rA8^^TRPXF)OegLyrxxP`owXxZ4Pt!ek>iU}mNp~dr z=}TN&pBT<VKgIQQW0l^o@|sO#mH@xwO9A@mb?7 zuRrc-=d$|uiXJtu_QnEKI?PTD4Go5v`9j2#C@eoYJ&p?bn4tWOnJCWKu zi>8#N=2j^Kb3YjS&3pL4QxI#Fz!~xNuaEOX6sp)5T(q05OaM@-@+X+2$NoEj@~|A& zG1$8e<(CREY9IXd=L(rZ;Z-`wDRqCDyZNe&44#|l>D#^Z0|2*5r++3};y|L0|926K zj~6hf0I}eE{P=NVBn>@AUIiRpmC{7Z?ZR`Mz$$K9q5S zG2JOrFu!YO5-ZTV#@U@Hz>NBLbkB5U<^xV>;e!P=g4mG9V;LZNs%Clt2G~#z^5)pL zk6UVh4rRjKD@Hjy+XB!o?YX_>?)HG&IFBv0wzghOe8em_3o>I>VQr)X3SpLy%`-~O`R%DLs6%Y+b5Z^%9-;_U4vEQ!{WEeW=z%yMFJVIwxZ#eyKDEoXfy z6?eFtUnHUuGUq5$M>$RpAKo^*P`=m#GOULwxoe_1j`Uru>N0k-4jXU0_QLr3!Ys&$ zo@0*XdM2*R)0?OSll6s^-i=BhHoUQ^hl!iP;=D9_k_gb$lwraq=O7pl7AQlMDhh|E z)U|7o)f?s2gk7wL4g)>S36f%@NuoX30~Mc&B)F5q_!JUR$desJszD*`5apzO#bwBw zfgsM~5qc8?&O}?vUQ~e+h`?7cZ~2L|uO7c&Jmh6jy=&^+62zsL+`F7)3O zX5I!3VE|eOv*rONM~lDQ{qo8cQ6l8uKLhvPM-^ySet@1-!vJfGjNDZ)-LW(L5IM6 zf^a^HYE9p-)9xtLPfK$cd}vv49npGK$^Z^4mEj*ysqGV z+k3)RFbSg4ZiDp+&U|HjP((i5zS>V&tnXaWMv|M60^_2fR?zgE)W=MQ;vhX^0owed zCDlLAyz&E37B2w=E;_*$8>R2_toIiplNL980m}4+7tyOac*Az)Hd5=W$N24%>pz8l zYKDkw1;tGT#MHGKX1khnXyQ1)!WdaO3MpD{|9QyS((6h&?`rseyBPKjLUYZaBmhU@Ch#9QzsP{sNcVJ4s+QR*+Dz#IqfBL?Gz z2AiL`-HbTidk_!>34+g|%3x9`2Slbyeze!!vVvi>)4&F9zyY~+AA+HHq3OZPR~be_ z=FL11a{d*=&tJod4y-bpyR2>pAqa|O;8u74Vu&OFMAGaGh(#t-0&2eS0x?y?5%?>z zjz5n)SUu&}`!b0*DjU0nfi1 z`{-kR{O2!-lR3gEf#E?_gF*$XfGM@XOelx5aJ~%Is|fmxLcK&FDtZNc0zBWD=d;zK zs;>orSv(#@n+!$b(rPP^B=ONEY zv+>3V&yKjE@Su}oMHo?(GQv?Eq->e*_n}%co#4 zdD~qa3}>3;m8;S^$YS@fU%>cHE+=l0uq_5d%aKQA>=%bBd%^pmb@KA^Z0zh-^wq_%hRVD(B0C1SXiFXF!X1^NEE9xfCzjO22E#$ytW+nN z<1r(P)~H@`vAm?TOylKJGy5E4OvH%FSuE4{x0W{ghH!F7(~V8twQH!A!yA>})Q-d! z3)zj{W49f@OagK+2wD{5p>^}^g04N>aq#%+n*F(;%t3n;q()p;DFOa{}xA&@s+LX)*XaW_V+04pf=BSlVp9(|DtyOWSQk*+8Qp{$Qj@r4{AYl##m)oM*F( zWo9>m6p{y%QLQ)rvkUaJuL@v4o-pEP25XPsHo!SKKEtZx=Q=Q(%X5pOLw%Z{O|uEX zCit#!hPbG7%-~TBBWy3h$KRqmaoh$r=RlVAh2WrgUt2ru8-nIUm~jrX><@Zh-M)xq zC4Jz~5;w8NSgWte2vi}DTt0|B@)n7LPw+m#G9W%HoDt-sqC+5n7?0ugpLSV_GUh%F z=T1g67j^xBq zhuI8_)>1HahhelP*y76EDx;3}ppw^)3cL6P&gL48%L55reBjCPW1GbMIfiLO;q(xGn z9~#OB98u50@9b z+Voe}^miA;Fg#F^mI1G=QrWe7Jx$#I<08z;2c7nSsUVd~2DA`&eGkxq`6W2WwJV~s z&wO@36EvB?1!d>9M?JV#_yXvL@q`{%4q_{jXdqc#ChJw`#eZ%UBaa~=;eOr3RwKFX z0-}YTa3uSC_XwY-<$I-<7<7*ZYTe^s%&ulXq{Bxbkw7#%thION9G|mx z4`S6a4WwY}+{$Z|A&h;bu(59wB^NX%D_}weRaWJIONW~0q~vh#LK|wz0a^g@ee-DW zT34o?1=y0P^R_2M3YIQbZ7jpqof6W-25qn#U^C~(!V+rAr0bEWQv5Rj5+bUK@lVNX zqal;9Z%|VshkLJ?ZrYn?<~jGNE_kD?5<@X3;-wXR)l^jr9vh;RuXM`tGrd$mH1TkE zBj-=H2<7qUx*&zo=eec-y(*nCBw8@?DXNa0gS}x?5*y7SiQhUU@b5gxw;}E2;<8dA zyZDb^q04l;%}|9nXe+RtIB_Srt$s8_7H>v~_H99>ykyCU8 z`))(Xw@r4#D-ysDV8MrKIsyJ~^d~>~vB6Si7DxvP_$6d44kY4Ppl?Hd>}jHRP>q9u zfd}B!V8tu4x`T?uf%^oTU+B$v`~!1 z4a^}qE^A?XA=UMZfmwzhl&VG7cfh#ciQM(rg>;TZU|pq`hXBt(cmw2&-v4a6UxpV8 zK0r2?kg&&%!NwoKC8qxymbXC}KvFr6Gz?63Lu67WnKw7z-2%l0m=XePj&{1XCL>|;vHm=_&Ng)tNm^boHT2A6uB2R(LiXGkZ|H*c zL%5RDfXTuNST%bC7Abycn8CaLH1m?`AhZ<%76Nwko`|wDHd+3Gx&-zAsd$NK={<=I zo5K3H?7_d>srBohk1)lclA@4d+FGW*38PSqvjDwJ?Z(rDZbzhks{|ePq)bMe65(Tv z_t7AC2EZiNnh%EqACa6Q4MBNU5R@aRq%=5kB_g2hzXgOnm!3>eg<+TFRyo*(k?gYT z&kzj8f$T%V5kWOWg&sDg1T~inC|ZOzVl7UXX~?@b_T)RP0#5_I*>Etd1}GM7K0!b7 z!ZAMTz~D$y1ahoA_qqo=D}O%R26m_P-vkwNCCoXzFpF>wL@mj0Fsyq8SPp+z7$CCk zhAs4YG;CrVg&}|-rZnR%AU0}B9|$M?@B>y?;pK$IYzeZ_(gSGP3gDd0EQkb?e?I0T z!oj``@dJYpB<+F11>U~h1+&l84LBAcuyD`yp!%PXdzb4@IK?bC!mQ9P_OQI_`|(BUI4E}i2p1eBa&GUnoAm6o4aikA4@<(bb#b6QVbOP*InQjkTOd};{<8}x38x+Ykwi(GCE1^T=95OD zh(rliUeBjmjJK#!u6gnSiBy3^zYD~l-9}@&GxK~qq!GVA1vs}~vfXv32A(kQU4P?(=htNwXX2=F%ja}?WK%^10&JaQ?T?u`Nl|>R)Dps+{MbO}!Ef}$N&Q%7)~XA$ zh=aESy*g~y6Q~w2-W=(<;%kY8&|eeS>9}o{i$21&oS|3Am#LfG2->=DG`xQuH4E8U zv!C*EJj(3Rj7inUb$-MUBJFa$J5FV{S&kWR24F*pJ`2vCwfyK%&?+zoJA0n92E(zQ zq=^V=PLxO|(}YmtuEOK`)l2xzFxJYPIY*GnzE6MfBL^ClyYVQ2c6gXoVJz4f!0}3S zce+g>&>}~!2T7J-Yq0__43>z2W^tvv876+(xzqOlB;D# ztV6s9ueT8K8VA&o4j2Y5IbY~xbZ}`y^l3G z+!6-&kvbCTF5rj=Ii5KDA1(d629J6s@it>KnAhA|g4L`wsvUabr@W|q#4R8%I-NUc z`tu4|Ep2J#>dBx)!>y8GNvq&wES}q^^!lU@og2G4C~R&qnkY3}NF=Y7E-W=s_=zN< z%NV6`#D`wDN!K!#?Xku#=Qn;D-bf!#+_+ljyEs)x^Qprwkc`~RLN^;I?4C>YE_P)f zWkJhHLW0jk4V+?e$5Px$#n_GF(lMOMa$&BIAhL^0o42r`9Fb(@|lQ2hTKrbe?PxFf92f&MNs%wtA`SxTojn!shC2 zR4V76eFRYPj&mJkq% zyMaibBThhOT3kiWuAq~1vE3vB&{9F;Re^viyJDY@_$jm>Xj3UWp2u`ekq zQRZM1%X8|jKaLxm;-Qf0&xMRtkXkS;lSIZwhU{`?M@!@Oq~a{Jm*Z|Kl~h_+b$9iG zk=d#oPhv@hEXCH|bg&p^sh^#$oquK32CzdGrnK)E0%r<;Wd^!ot29rz+Gp71noThx7f6jY7t0>4&up4dQl-kQwR)3XQnsvh zjL~Jkf9rmErgau@QeFtf7pc9hIZYM%&iTHhlW@4^ncT6XxhL!0-Og9j)C~4*LtO`K z52fB_vSp$x>|?ty;9}2yfd3!$x<)G=E^jkqU-z#rRaH9QfO^-Ud%9OnQ2bM8mf-Q*=LjG{LOsutDLPJd z<1|livw$2N+vb$$F+)Nc&Ox{4SsM(%Oj4%<{Cn#W&fYvVh}#Z^2W(RJ^A?N>up4P4kJ^ zoCb3`NqG+IWq;X@0!4sdYRMxT*Q(`jVsH^~B(#)fgTHY{lh@JuAz%G&`+}g4 z`c%G%&4wAejr5xVQ~om`e*y93!VEuoyDn@j(Lh6T+AjnD{e>|MfvNa&^ zXd9{tHsVDa#tG;+WxmqcPr~8S${M8$JU*qrvXqC`*VE@d!5I|*u}x9}L~rt{uwexG zq%26w(kZ!`4fB~B`6QF&Az_UO$OB+!B%_aGu z&5$~1t>`DfJ1yU-L>h*_rR$eEcR-yu)5+@*o1486o3ohIJnJvOQ~Y8;5$AWFZ?`|m z*<#D4+dePoV=rHXGTp-!iC)vRKw5NPM28cIEB?kqC}GFl?gtLBaB%P~bcw%+R9j z+g0*GTWhn;j3$1`3@TR@^eN?-48@Bi;u5{iLgodO`#yhC+o|J)&pJWzWK28@2nD#Q zj}rW7wZxz2uzClCG&l*fk`=(J<7C3;>EYfB_HJ9PfanD^a>iJ6@f%t(XUx|$>Fr3ZhI%md`cqf0^e5io zwKf_sM3`={ZXQ`~dCUWmQ5g!*8UQF0VInc%kMW=AG{dbOSj3_PL4D~|yWCYexg=}> zSW)ljO8mHUSSYLzP{H=UCdn{WmwPH)(ug!^3eR3OG(7ml*Lu961_y@pkaY>}0${prvU z=3W%4A5mdBG~!1NW{r|QfI>Ax%!o58TX|;){&6dTCR8`ApY&s)xwpW=nX|uAJY;_T zA)OvdH1DT#D562D+^lz#M_(q7n}82CASgzff^AxDKHXD1#8YDn`=Jg2MVEU80+!`? zp#YziCz(l)layb%hcdn-Eh}}ZXm#^aoSQ5@SGx2JmQ+bJ1^ZUqF_PCOe=BFF>DdYb z@vUkt!S)Cn2Riy`RSYSY8quXp|M_%Ul>NXxW>ivLQ~XxOEfZ)cSEB!B^RR? zHPkgG{55D7LgsrnS8~5P&f@E;sgFDIWVgKTO2GO3R!4 zsRar=*;xtU^s4Jy(~a1~-W~GUR7GW{>p#W8R&rtIwxPi>5G7l>MRW$`Tf7)mL@?o@Ss^aOTeA0RTm9c>CShd1uF3<|qE~;v zv6pLPh(PnA&p5M)xO7)LM%ww!22B$7>!x@>e79YG{o3=t=gR4 zTH!sGSq#{$k;wyn*vwCYEe|<^W6y36kz!{_UA)yXM7xS|Gm1W-MNbnNK3_zz3O>|d zu{x&*nN|aK{rf!7eHsttzE1X7tIP7BkzlD5BYpZ6QN?o6Vd>wsWxR$Hi^izAC!{x( zp2L0i70@O`S!T}s{xa}!#&{enZSB4sSGanOk{f zJn1pgaRmS_Ck)4Fhi;b|VE=FJ$GQ!&vMBk0lbWbjet?ik+QQihiD0fc=Z#{zl*R(h zQ7cD3T^k+}ZIy=)xL6B$D^46}ZdRtVIJ`|rHx_VRyl>r6|^wd zNBMHC`P|~e#g^1d=g6t?KOWbu_VucsI>6)U(iHe1tW!|)$Tl4A&YO~_Rf&_mviJps z(bFj9o%po!VPPGjGYy|sZF%?5Qp|8@yYL`cgWR5$JDnannL`lq#{{)BD;?c+6STMd z%oV|UQ8!11K9`egRxxCIG;N0(tAz}OWOcMOG4?dD3ChxKC!n3DMd@|tWYb>jwJlby z`X_GaW4zwKZ@vZG5S8tR0v*5S3iel$v2JVCJy_ZlTcy`h=cHGpEgMJEl~9m1<@k}Y z-Y&1@_`6z~RMW>_4eNZ;+%zOU;sLzduUifgr1nar@AwpP*2&Irix@XdH6*exL=44* zzdm>dP<=U(b5NkS`IVTvvd%ww$!Ux9h!$1vm6$~5lAaI0lgHfXYR%5z8NFb7g}R3KhiWO>JaHv3_(L0> zbNhd&XI5?tO>z&8Fw(b{_+P;iq;3S^whD=VXZ@ zS^!nnc-^P@SgznJi2-Zw%gq0^c(oC%#Ikcn$#L3(HP%CoVynb9_=X#*#nK$wDe27i zIVBh}wMzG)W*^R#K+Jrsbyp;zu z&E9+rOfeRxGO-hk+Nv=mjUJz2hupS0Qv0|R!)UK}8p$QEuY1x)xae<%3`LFox;`Um}u89XGp^L-C9UHFC|^kY~%6@D6*vG=5Z6B zwGscB`T4HgEo$y;dKzByN=#=@HQiNurB$B4C^0Usqp3-icAYbXpEJZ>w-k(iddp#0 zV=AT%pqs{)^Sr9P-nGzs_D&-a+4H=4Gl8y_dc~cGmw2799tN#l;F#IZ2QT<)_@W5Z zspOCX`(YWM(UZfi=~`(kzrQlifE$G(2lvZqL9V%T1;r*55hDWyW|ey5Pt?eJjAl!H{}Vnw2^XPH)v z$AcITjju(D(x2>lBRN|2b`vXuZ{U)a?K8A0qd=~e4>?yM~QDmuk^=ms>3$a=Mf8+S%P`K#PtIE-gWPkq*t5o zzt86GQ#+QrG9T&~G=84^dG4R@b>~HOqC?YOh<^$UY=31(KA%tKG%Ca!#@u?4&=#)Q z_Fl6sT*7K2l>05KL(tm#e$>eEiNv*T?f;2`KZm+$b zmz0$38XT-_&;exhl8VBSzV1Su{<6rXWHxs)Y1nzCNkbt#@Py95>(swG$Vt}kE|pqh zT_*lUSFfagCmrQ0kI1Yt{t~PwFDN^pd^?Qnmw7H$lkZpJq6T@UErGdna(yQXRYL_< z!o9cmCB;#D6KVR=OW%&>*&0++Fx7Q1Fn>Br4)BWOSGwP)l~aSx>fZ86N0U>$^eTxY zspcr>MxEux2E&@g`jYp>w3zm+-4aw655i|P z>T7Tw;DO@Q_AGE9Nq?BO;QmYq_)0&%*L)&b(d|@6{$zD7PHSj|0wWYb+9cck zlvug#0!m&Ulb4$#Cdj{db9XM&!Q0J&5N-;BM$pMxqxe(qB>5~3Y5Bf{QhhK_`Sc>7 z%B^z<^7UUIun;v4m5JXsLF4LO_w)U|M2Hn^-lYXbkqbKu;lg(GZugzP%sIh4Q>nt~ zza+dC2AH>dVqsRe4Q1|Az_wc&_`5irjYee(cV$Z2UH>q8Za+LiRKkKlP4<3^XL0pL zIL;wQRbJuBlurW@HCA8-%WHpb0|4>mDM8KB-wI!_QY5d`J3s9L_gifcAr06Tu9ihd z2~GUH?~}x14(hL>@|9x{E^EpTgAD3d!`}5itkd?N@k3p${3-y zm&f(?GlN%malu0#{$-C6yk-Q#;BnEcJ#ZjP{>aM`;a8t#-@2f5Pcmaq@o;9E0zPcE z^l2BcXsZB2S8NLh+<0yx>NaXL>W+&VK2UHFssYJKeJudZHniHk#Y+y&nrzwj;btsO zfX&1ji9?8x&Lzb9uoZJ4q~PP}5e~|%J<&(QEbY{N_~pcABsCT!_&mAz=hZ_FF-BJ~! zzCK%{QCIb1najY}nfzKArhHP;GZo~h5q#3-A98E(hK^gK*TN?A_H3Yt(@hLbNeozo zyT9+~_}qQ@%}mGd$Q@p<9|GEi(qq`V{cTq0vCUfxxRv_g$6oWl;>vJL9-2a6iZ_fY zF2-Hz8)6FvwwyP<`z?>WGR1Tw3b@fBWyIFD2Pm<3ciuPpr0(;u-$=rDwfukFWv`T3 zXdwN~{E4&IQ`O?UY;$OBDV2Ic~ zQuSMPj59x3;@Zk>4ld7Y$E3f21$=pq1Vwh7L<;`?_3Z);TnZkJzN+>{zQR-BYuu=t zFOd6!k5w(tJ4L;h-?7n^IMcOs_$55aEXr7%pz5|rGXd#}(da77!Y-Ww<<@cV=JVZ% z4_SjuZDIbtSM&PKEEusaj-U~`?A4m`ACLHBXs-r_qxUNM74qlHOJeNWmqHhJ8-Xuq z%2_hp#LeKr>jxt-&;Iql9UD`B7M&F6h zoPtWQh}()@SiUJ`7O9W4fV{vI3x!066V))H&HY2!k%!R}pJ#&Ir|~BjJD2j2SVwr9 zZ7#2QK|a6Xwn%R2JW}*f;+ou~xzJ8zfk}uA+Hv*LKzP=o!IWUj*J#K@=Ha*~%QH30 z6~CX~ca%)s@UKb&@yDcN1cZ(Yf_9Pf^}C7=7i!vL)bJ=|!CNk|$*5CuH0_xX0HD*C zplhL)_0EQW5o7Lt`;<1LV$KGSV)wB_mHEB@eDLB)W@A4ygCfp8$7(#A5BhlV5f^dr z(o;n2H>QF6#-UUH4t^Xf)G`&Jp~2AUX+-9XFYG4sEpwg0_%j?_4`EPSY9LeU0BvzK zD|e(gytx1U1oR8^b?nVjCz&ZHF!nVLIJ=yV?OBS}nYkC_-yq@~UVDvvd4)Li4;RK5 z#i=PJ996Fdc%E2KvT)2*2X%pXmNP&+?!)T`3VW`M#P3l7h~TK2ilbcPE`f^K>jX3vW0^2UL^R{kho|y0l}ZGls@)c_}k1Y_t~i+ z_o=mBhV#{=BC}aK0Cqg5y!g^3eCV8H@buu4NZBQl5@R}c#&pFNw1~Z5B?5m3Jagwi zt~3LRg}ZHp>=mvj7H!yhxG_J+-#6@UI zS=SsfXCh+Go_%HDoUq>kq)z$x&B4Be*C}eVYcDTT2SBNGhQb{Xg{#=8DbZdz?75H0 zD=(N7xtmX0&aug z-^YiDqv?K3Q&jF7!NcgUm{!5PujcSM^HUl6(n07;HGkO&ndx z+*-{5DtaT81WFGUOJf>izKV(a%Hnls-?BqyX|F`6$;I^K2}0@Z9)ZW{yhA$71$nOu z@)`oKyuwo*Q@wI{hqCuR1Y~C~8=~0Esejn#r}u3sKgSj1F6z?SLTEYM>Gv;My}Ojs zFk>xM{ZaN_bbHD=!OML!@H~?i;fv6@WW0y_)z;;&*)8||HACOy+MeU!|KH<6zrAx# zpDFDlL_6&91-30kG?4X9|Gbm^v5;?th0Xy*Zg%?lwx2XRO&>lu`l7+A=4y6B1(%E3 zvGcA^=y@`+>^fDNcXQ(Ps&D{hv#F}*l5cUN4-{rR)RV8#kxcyO+wtPj*NTr`>))$A z>sXs*Nxq)y!JS&dq1A*g3*s}co4&XkC3o^n4banlp!qiAkTPodaC$w7yC=*ry=pPt zYk_a0F#EEmyLRKZ6qlw89GUYfVE=RJi*M7=8d`Pfj-i;#!Ks%mPhE#cOM-zLk|T0= z8|)e<>>B4Qq*YWK3km#HZLXYOCWI%Du!5ipsfBzjO+KIe|% z)>Mtg3{!uw;Vk^>_s!QxBpsLi^tCB|U?&OHWj$t?DDvUm6Cij%uDapRsMfoV*a-c|4-*F`ahuayYTb zhQVx0a)_>mie|;{-f3w!T+9T5WgXLTBJk;eEgGKV{3%{jI!sko2H5H>R-uXy0P1;w znvz~HjPyvOZz`8_u%!BmCqAt-kIPhzWqgkT&y$P`YS*3{hTY!UtP`J}SJDgXx+XH7 zDcgUI>7lNk_Dpar)zMvh<~1AEK-Pl2FV3sn^w#Ts&2KiUr*(6C@&NW930<4?QtuU< zty~i6Ne5?A4(&z&VcV7OANbF^*1mtM-msWo=o>uyB-N0q`h9O+S`1FqAkthJUoDvF z)sner935q@Y)7No^e*xgS!Lsn6mgk%p~;U40&@|iz&t>*xZKfv!DQ8eL=gg|*Xs^gr(p_~NRQd)`lxJ) zfQd3F4hk&i9G~m0l#`n676-oML`-_}l^=L|zjl~KZ*TKogunA6a%pM#&)9LIS6pG% z_hUht3Vt;w9v{n3i57It9{3fT7$Z!X86;f{QcdiMdKv6HbOa=&y^QJvWvH~Altk0w z_4K{xbfr97mN%s8uw5tmr&@C7@4XXa`N`WUvuY7&K6oYp&qz~0CH5Nmk-;ArnFim3 z>0F)fhXCwXG~$UL6WS@4vFk<-uV>KY$(B;Qlm^bgcGi<0O|g1N#J#Cw_R&4)8<}9h zBl>{-PJ_W{fYGbatbamZozOq5qWi7>^@A%#p~tV%9ltH-Dhl>C%!rqJ<+$YzwhBGk z+{o;aS?dV7y4v`@NVaI4>P!0g&S!c?`FTE(KsMBW`4lcS|L(5)wU6#q|7NOAlBJKz zc7rdGo&cusJO8s3P1YH7m(p>oC!McE znp!=)C-0%n8 z=BsM4_D+m5JH|{|KIEXy8s>zvsLoeWZ246DlIhsGcwD-j zPI>W2{89(+(e$B?R<{wVG7)WQELq%YH&V%-_^Dv1mZ<-BqVFm(q5>DF6(0X?jD;ud zwAMvJX85>{GQG$Ur*~25MP9}77f%HK7GUm7?h}`Qqe( zfx|B1f<)t24tHXsK%7DDl+^HI!X5+uAOiCW`0mpz{Bt2?i{fr`Ki-Yz`jRufz_-hD z+@BD})I+Q$Zu*b49p^x?c!0%>HGU|&s!sl4aNV9ghY%LXufjxGx0PZO59kxX7Rlmm zmp%;-kp`@oqaKu)WpD(t1hZ;?{Ww!~+3qVw@&GY=UXnl*ras{4+JU#;Sld3mi|pyC zQpA5n;q|SQJKEoag@!_SQnZVkaI?DHe7%P+9?3*!F7y}DK2!{3DPvc2UDm&Kyq>GH zK3dgBtRr~5Ui~4lRT<#a9s{D4sa!LGJw{_@FksQz-b$}^T&{g=%(RsUAi>$G#I{}= zx31Z@`#$1*R`tQx(=g;AJys#Z*2f{zsl!{e;hvDq`Vd1heq>~Xx@1nSY2+mExjH;* zW@bel3VgNCSD`pSMcCJ%yMI`3H6Ub{QDa%tpivty(fA%wBgL$4P4>&RJ`MwpN}nfp zSp)ReVh)}47Dk<}__8S?A&PaT_VrYyYRIhLI(P0II~!Zu&!0(n@8yQfmwLF)-rn?u zg$1g$$QOQ)NH{qZxwJ+SqQ+H2_JB$DpHd=REQ$5Nn9qM-8(1|9oEW$PAc6}r*`LmT zsXUYg5v*}Fz;(_KCu%)OJ>_yyaPfOMxAooQ#UE5i zQO_XKc^`ax09JtA=Ui7V>j+`9+Ix$i37HvD{X4D{wf(Txx)IgBycw~#SOuq$3QMOy zEp#=(s@bHuq3l@fQUet7PNVK3-7ct7)vu7vjmuHXmp?cl^Bbu;oTOOfo}>FjpkSAk}9os}hCZcQ!#yB(OCvh`Kn}Hm;X8F425> z-8;W8@p5D7v$9LhRZq@S!J-Ms8^+{zokq$k{ZS9$zC|oj3R<$ICxDn^tR zYDJ;VuYL(CkbNvVdvJPXr!x6n>(Q))qAYcb}l-lN?w{B{U!vK zeQmn?*sycSlb46jdp(}15>NT0cOGbx=YXqn4x46M!?csH6cU?P8RBP-PCW)oRUB7o zzU)Y;2@OnDC#GN5IS9so%$EFl;fzxIg`=S7*y@6ePf#AB@6t~fYEE23{A~Cwr+!h} zcu#bb&$+qg6@r$kMOw=)*f}XtK-6g#b4N8@l1qDGJZ(%o0<2q$`w2hhqru}`)hiku zyRW|qy0B>5u@`X&35G+Y1k0Rvchh{~ZVXoG+UvxfJW%^?LKZY+lnK32mt$RDuFx4C z#OTDP)b4h_DB%$>VQdrJFwlXKI6nV+s-2)Z;afRRJTVSH%mVRdq!_jI1fZugl5o_? z{QTcSW@_KBv6qNs6LQrWT=(kdq|GO2rj#8SiSSLX`JppVdM$k0)~rV-+D#@=}sUk1AlMBl9|dj1{#@WcqY zNKEpy#$~_+;igwJqq+AYI5{NaiW@p5)E$YhU{leh)vQL+h_Upim^J>xLmxsfLez=f zhqTg|mIlA(iR@@hU|3*_LwZ?CLr-0O50Iue>RPPxZdb46o3O<9vu5;o|NbQ4ddf$; zv?npb0Q90-VjmgZGh20g64e z{i}UCdkexJ3fTr0oFAQYy&}% z6CWF@7@ji*!h?5PA9qB7{Ui0(tM-|f+};tOTAQ2PN9VDQij68o5Kw6%Rk{rj5RejjkzPaS2nf+pLHf|8E1dwLhfY*R z5F&&Y5CTLx1nE6M@}7jb_bs3A$9FAO7D-Od|CHU%-oLZA#0mN%l63tjf7=QHf|I2Y z9M;@PGb2UMqw*>cK@;fT2L9pmLUg9QN+&qeO4){lro)I-r8Qfvr8c&-?m=?@0GLE5uiA~L0G)7uo~~rjObg8q))ZP(;i*<{S-~NIe>?@~#(Y1Q z*^~a^j^3$ZAd$M%B6^GTv9 z_qUvM1`F(XW0|1leQT$0jb=T(ZtP2a>1QxguMWmv1K5St;n~0I^Ydsd4%Qxd(ej0R z$j9qsB^*9pr&oW4NRx!D_u>Xe_nJJD&jL!T=1J;Jx`5z& zIQ!)|xvhVgZ&6@n&_V>b;SQsVs2)sxAO*;C)Bh zc0q)CJ$7=|&&tebH(vyUQn$4x)#p93DHDxctr|%i6X+NmcA`NFu9xu8qna0`Vp~eX zR}^zc!&VBXP@!)CO#?kFyK$3Q|HG}@u~8??wfcv%!WMoPm`e8XA4{4GpkRxZrEyX`!AA&2$q*82OeedwPTl!CjzD<;U103k}m`Y`kzI} z3!lknimr+M)72!d;^|h6Em6u-!LZ&@txZhV<{F_;)I!&zq2vwq_w0G0i;$iY`W_kl z-EgRps(yf)wUk9=wo^TNW=cAv38cnm)2v{)S>AAXWb?(d1L!0>rmQkc)8Ot(=HsVq zSk+9J;syeVB?JBWY?(rDALD&9Z%UpNcR8VmjZS_mg|`_hdM9V$gXq{78BT88-B?WC zEZh39iQeoJUojtA(VUtyB8tcjttyC8YVszc@5Y6hM2xsNJ<;E{V5^$UdY!l8B1vI& zbgj)Tt?8@Dt1)e3D2p`S+W7F{!!xNg^Ra_ipACvBUH13fklt*m=ru$AR@9rezi z^PN9foAbS>$R1qpfjM0tc+q^<$e8jm_cg5%AF$d@d|fsVR_^- z*3si*l{(7|;;{IXtWi&3V6EbAM2sY*EfPIWxJL77{7rYe)`dq0a^eqmFr}0GWr7}r zSRhcKbI)~KsT;zJZW!0oBkIkW>=@PfyTPJWx85SRu8J9+$#?#O=ie|>^<+<(ujx7= zwf==kGl|&$Pt+6}Tbw>QlJa~uJvObMmd|^9 zDR`3$wJ$yr->j1?Vhj{W83=RyO2c-YasIfa|6UH}mfcDG?cs7myi@Tj6s>%7e{^A2* zbcSpTR(lD=EsD6!=}$u+6}Gt}Id8$bL-Ru_-UP!-+HCZeYV0Cyjm2UMZB^}fbXeth zl(WTRpKRR|P|mKjR>PRL`cqp-9=w&Skx%sZ;&CpNe>{8?))@vlq&UB~c`r{IWWeG< zdB3T4hyqQ;c^f}?CwgSu$%hkHo)$o&{dNiKI4W8Tw#NRlqZiXkvJ32 z-%2<8%4-gPeYaTWTn{}t`0!T$3p0kor?&zMh8Gmg_b?RZ#5*rP{_I)y8LPSj+Z1+q z=D^<#clR;Ua7x(tLN(@}8iUBwOc-!kmptaT^XOfPhh<_(s&<_0;Q)6C~4 z?u?A!fH%dhCghMn?^$h;!h28y`7RK^d4)UM03;O%k%^&8r(}&p8z1ES$LKY7c_k_Ae^V4h__oZ;tn@Gqfh?tF)b_rZ zN>%qqE$vLgug>zMCtnS=(IVzDYQXwjr7Mk(eBb=WM2f3a>xYJ%MKr&kb@9dYe$K>1u8_P_SjiW9?uK(L|7xku6wtvwgYIU## zb9ut}c*(Q*Yse%At&@p3k))`C7K$eM2;3Ebb`l;8iPPINlxy#ZP#I~yik7g!s12w!MUjvr&Fy*cK z_^a;9%T|uIZc7On&uOj1k6OJO0PCKQDwH6PBbR`q%Rc=QUHVRVrPew zeI>3^0gi6zyo;-#F_21cGHE z$OWY`U-tN3?gCf>Y`td5#bULU1$|zpMKOSU`6zruRj}NUm1OZ|vgp@3*gzUv!awS7 zt!S}PNB9wwT&u{~xAk!tGK+Hevws%H{rPN4ORIu_*=;9^4T#a+Z#)Bi1-*53a|$@e zN2(lRLPA5gT9po3f#2^;1D(W9+Zd~JkmEdEN&qvYJv07;jqmf>#7t9J#)WbkeT_UT z1Yj(3r(sL>4iRB_Q#*5jmiL_`Xt}3O13-cR7cT?xj$eQoIG&b`*jtV`=8NTuUjbMZ z{v_CdU%6_sw$@!h{3A~bkC!~=`P9yZFl%E`>jSx6b7B<9w|*f3Y4oUD%WZd%7NC@A zeCVZR97|W=8}fTjqfh-n-cLR@1}sEl;h&EKWugEUjKhIeBLHCWlE@jvJ&wblF^4~) zWw@>`Cwu#RiU3nY8XbR>+WS@XD>F(q(p@_yVJAcaT35m@cqv33hq|CFNAm|R@n7~n zX1H+;vvy6^Yc{fFuM2Kb(^X;9&cuP(Dz?Of6wwvEd1`U|H*ZSM0Fz)SI;$5lI5{dW zv{R26Oq8}rXbxHlhl#Dh;%+v3P&k&W-C90=enXaNtQNEl^atKe&nT<6 z`&DNW@!%xOJ>caPnr(i`Ttl_;QMCV0E=Tnp1(z z+ZgPgQAyQv;0eeB~Pg07Kv1Ac|7hFY3fFFG+jC=9uH;lhPV zc7UydNGbo$Z>|g^C&=xkPc~i|Z{u?SO(;s=Ayl21KCx=MVWc|pWbOADAKpRQ_VP`XCGRbfK zrB??3jy9RS#d3LcpwjAX(R&^K8GvVSO9iG#pwgO=1*XMPgAufbSNd{S)%Mq%xJw6qtZ*83A2B0Ww0kqFN^|*Xb2Q0`E^aY zfF=cm-_xK2kizf_OR*Vam98ukC~qIWueX7wtpd{2^DHWV+(8luT}isoy^sIu{FZ1c z5xIIFeoF9?)a&SLm(^imM{;|Ht`8vRb!iNdWw7=}Z_5if{KdOJDn94g=~)Cc{g0`* zCzifNm(S4cqw7zBH{5P#{3FmFiFI2S)cRT?X?)Q3lSqEm|e(*VzdEndv5hIH*I(qrj?KNB9co*kGfnsVvzp^2~0$qgGI>g6z)p2YzB2 zbmdDX&D&&l^JoTt*hGQ>x=_ARk(-E}2ws>?l_O*MsC5+?Fk>;XEp$?aZHFU3bYnOUGh4QnE!E=s*62?T-1l8Xk1LE20wV$@ z$BrSyy0~ljJ`D`MMWv_HtOh2!sS6Q`1)N)@O-%{sO4pqnlpDAt*_prX+qE z>Zso=Rr&vQ(E^h6{=zs7siFvfJc;uwQHPm*c{(= zYL=$Fwy1M|$XCEiFd9i|QLD9w_riiGJ*^xH1jY&}k>@T89fMMnMl!7|J|&IKqTjJnXPYPFp~l+Pm*FU*yc9wM=f)f zd;4K}DUnwzq@)JIjV>*9kC%)Mr)~N9D)Fmz8lIIEU(;zW7~BzQ%BAj(BTP1AI-X=x z0Qg*8B@X{eaN?9juWf6MNn--QIR)q$+g))|E^()*_X5Sd(cV6N)qL24#_Uyp&LLoc zi(b7_J_U~*GvrXI+tC{9b=W*0h80c*1O(~^EL0XGx0yA+9PXzEjl?dhshKuZ;pS1n zIgO_x!E%iZ=bg9I5oJ}I-+mp`5cJ&g(5&OOpkcS5Wrv{U?OT?&h1iq-?1a`*wwgR= z9yX69jy^tAq;gW57V9I%hrZv+m^Xln6}$4}wCEc&heZa&vu@>Ak~GQf!?yPQfiJfh z3M9!(8mTY*R1RAgG=t!f&s%ge7E7Po8qtSXVYYLnmUUmk&Oa;jE*zSl+a@an z&SEnyxOO)CJ`^O21d@w&45=hxs!X#7zO|%r|IocyslMWM67RR)g#eu7WS)ipQvs*f zd)ui+zHIFvfq(r98%~>~?jx6()iYeA(;TN%w04lojx+W~Xg6sn^t!-^~9^z2Ml2|kab zTD;%%Cc7Ek^OIRwn85~r-0HK_rq03)r_|_0nOd>kj^2oa)e)Sefw&p0&wn+DtmSji9_^(x&TB{V1b?Dg_)I(MEyG2MbltD2iPkGXB#sQ{Ujo?#-`jnDIi3ZXmXe=M)DV61YfCCI@SPY1 zT>tkH((6GDn@mcr4nk2_t+OqU!@MtAD~H2eMNT#^t>NjG5$S^eXZ&}uq`ojtfCU^$ z9M(}%x_kGNtCXqIw!F4@-mo(RO0DavgM?w~keUBDQR=s4N7T@cgN7IOuQE1*(Uz!7 zg!o4)dOe=pjM-&K9g{5Wb_|uZE_%0G+_3aMpVVIDs4FEXbNv4Nk)SrTiqu(OJxtvn zM2Wt}O48!>|lB z&c_TN{q)f*NQjo!{tUH=)X{D-RakQyOK(|7T^5$jv*R8zb8<6=NM&U7jZ1I4P!9bW zOS8`!Jh;MN5L0?4u_Qa(S53LNP&ddlA&hPKj7IF+Qb&2Dky!p?F=X`PPKM9c9HjTY zg*F}k>}c%V_>UFDee7u9#ZNdJw5!@NHM3yE*&d3ed1ko&ye0{;MJMlWV)PGnFkZ@R z6PG(9+i_Ip2QK%$*Gaw4z(84jR1aOo?iLkH)ZGv;z9wIPpb}2F`z`+N7Qc8gI<4_; znIQZr_I3k4`9{G!QrBHy#9;dBflb-8S~n>x)MYfF2U;G^Chad{uZ8lMVKD?RYy}NI zGpyQeQk6neaF|9C%$9%_BIxp3W6>~Wo=eI+=+u{2nw9~i}1H^=jZg!N1&!f{A7l`?z z9doF_S2b){7MP}*sh*1E&&q>_Ie#|n{_DZ-B@(i=7u`9iKk?ua*3&%_P8#{n1Q}wI zKk>8BA<@NLr>KfddjG`t{hOHXPr0(8af{B2vYA|ti2wunCDC-f6=edMHF#96-m6n@ zrjs{WXjb&SNMFCG&XZj=dg^;=9ZdyKb@&{-`r*Zg+u~J?yV;RA&BluR?kNmjHiYUk z`LtD%K+Z#GF^X*BKR>e65$Mruv?GS0pYr|^{0`DZ>-mQ@!yw_kHY>8DR4!Kk8+tf4b zJjb+L+JdPrJX9&YNAl04I;bGw3e<;(pG`f@*ORh}CCe<0FfMx|@?{m@O<>~>GqwWc z{rft%B-33=(3BME;Bmnq24~`8NE#akA&4k*k(Oe&%sW&m6N#8P5;#y68(?X}tyrvh z*}T=_4jRz8xx%+|a{dsk^-$2RvUAGk)lAM!shoosxA*gDi_eg1;t(*QI~gh4Oy+{J zf=U>Er+urzhvjwytj4G7Ws5&0L!+HO64uI8q40jFg4R#nrZqd5+dcbX6$91Bz0sP1 zSY>&f9zcFhC(YDF3%DuU6=a7m7EbC<>mR`Cbz`UQ;WwfM62r~fq2D|X_L1DFQ+1wZ zh(%fa#taEP*;%Cj1|7~-?H_%!cCyop7q@l*lXMqH#hP{a@>F8cuDH0R;jQEu<4Po9 zL{h_Gs|gCP0+6RB;SiH-f9Q>S{`uVY;+Y1Kr@Pjx79%bjI@=kzS|?>D+cqU4qRBJ6 zLuObgxxhMN_V&4JbSwY)d2sx3+1{ak zg`QNwz*Ipj0-8JAHdQHMxEAAp?E?&J=zHk z=^4;rv(t6aX1G#JddOj$4ZOFek{x{Q1@%50cMk5E?I^zInZeiW!)B2=N1a@%&zw#n z`D@3;BIo3B@n(9M_80h%UrH$fmWSV(*O*^G5glcIl}@{P#P0(uu|uoyxS@+IFM=I^ zcf=bK8}MFuSgTJhdTW$#M<~5`YzhNg;-Ypjtq4<8&r=uiXSb7-awuD@QwWZ&ZjG9< zPGV{Z>S@%FbM1!^lzRTQJwA1j%e+-iI@5G_mu=bU04=1jBKcB@WcgQ7a7=Nne-v>p zWXg`XKcbM&ZE6xk9B4oYlf8O&9SSB)rZ5{(dxK{Ku`x+RLn`@NOBN+6qOmAtWDZKb z&%~+U=&%;(FLo(spWITed*Txv6bdQfZty7pryab*RYHp@KU;E`irt89+%oX8WF?Rn zZ>%YrUNkU$*W;luBM`g`{j=E~n|U^xNGP6vqC7-QVlQj43`}H;F^Bio7xjHT7<-t~em&|U}T^jhtd;R^xf8M=dzB0x0V*gXy2Ms^G z5OQ1tv!L}<;*c|b?zPsK7Bq-RcujUk#-*UuGT!~D*0k@-T`nl@v-QE-`i$B7)bw;2 zYR>lAqDX^XP|aDAEy}qPsctX?{*@{gGf^_US&Wn_M)G?>)1W?S!@$|4mNwkbg%mJ` zN~UNIxuMnVi}}aKUcu~wLhgm7dZGE6z&S?lqn5}#d@R6b{goc^W>hlU`x_3>|dNl z5ZALh5N6VeUQ{|X|K1xU(z63bO(c$w#4Bx6iH@y5T#-<(ppTM@=5E0YODX2~m|zyn z=Q34>z#?St>Ci7;M#z8xDf}%_7Nxdn<etd$6ELBSrDU{n8#tTDML-7pW3oQe7z-L`50EI!l^syh@B z4aJYa8<3=B8OtJ-{*E4a(cYK$g;rUA_tqnO*LN3M*;Dn@@nrsp0TS6W* z+Pj*HqhYK2j{C&Uk7Ey&i02AAzPg)^!B;3uBg+Sxgr(Y^mD<0Zf4bFao7x3ik9~cP zL3h3+O?b@e*?n5-mQxxR@tGy$rkH0?n<%Y;TJrf%Y+`U3kG+)DjQ?v0p2?L_UHnYz|txH9V zZu+JQ;!;Yf`I!qM>x(IFm=qMO*%?JZ*ym$*xm*lmI1uvF#Yhyak3)_X%^LD<>&5Bb z(Blz<_(T7;ZW)Sf$i~)ujaEz2n$%QH3_iZW;;QvIoI-{C3r*O_kAm*7{LV0RcNo7+ zuQzk~hGya(C40xV-*=-G#QpOQWz7%Zo|jO0=FwCS6s}aW*v~5H`w?0WXDdoQ>3QX_LP^9%S@|zLov}c zztLfPP-vng7T;5bvE?7G+fGglxQPp+IYcOLo!xu}%@71)@F?2&$?TjePLMS^gw1Sb zr}mSOiPCm*q}ziOv#h71sfp)QoV>lRuD$idrGYBw*&zxG^JzUC*{ z`T0k2vN_KjE`AuWndtH)+$0PJG3)Q-U{C(hdD6;Ep&mtq52LSOHBLP9@tK8MMgO{v(X(nY0Py7a|?v>Qgp+xzD6?K@p4N#0@Sx(y?DpNlvue$e^kU-Sq|e=tx)N zkZ|Cn0H6L3y_G%H9>ax;*H%oKYLC%wyb9r;&pFnai#T~}Na<4yY6J}{F#l(m3~ttY zfy?X;RLWuf5}IIeZ~zJ@x`=>0hal789b-)JVR#nTtC@xW5qis1VwVlH8)!2 z$mG%On3pF9GZg~Y!A1I~jpt~cQTPi22ltf3K9kqlxi$t(;ULwuFfA2hJ`FMn*sk}4 z^wHBJLP5Pw@X0d?x@`_ozCy!JK%P|(J2AU+%LE_100{2bt;Hb+D=TiC{Kk)JjpHA8 zKrIMC;PiI{ITV)IWxX9oW*|L;TDYY}z(O9ooG-n)ADDx}QXJ&TIvB#-LP1t{>L<@)n)TKaOfyg$rVyYphk%dH+|f^67kh{LV^Tdcpb-9-*JXa(m$ z#aG3^?cW_BlXe;r)vFT&GyFGNkzu4-|#YHOf$;4k&0bMC$O zk`>urZLE}S;@~ZN_HLk*hMDiMogh@?v+|VL{+!>q|4k5fL7f_k{K2WV&m>>;lj8cxgv{qWPOI_7Jpg?z$;OCgmY!6{=Y>o02$kst}k z9I<{Ihf~-wQv{WW<-sZm8vpw&7y%8+U0JA^579`+pmJL8ba{8*r$?c*I%hw+ePbAk zkpi^B9<+s7BGfv7Dkt<>Ypo1Q)7^3_>7F$Z=H&jZUbRY)a`WZ(u)}b@7s!M|UUW)2 z>#ieXPCWouNM>lY;wD-{t3VsDMhto=reBjD4U|2wdmW%SZzWi~A2{3`D7l*AT*^5a ze1V_KWDpIDvjxQ_{U_cr=Rdmy`2!1{vJUnF1!8cUNsC#vVB%m%+Bv+C%9qq8Fcnt< z8pg&a+;O_3s@kbP|0SqQoRXEL-4|3IdV#~U@%sH^|HTsvSp!Ocf|=4PSJLvN-@zPs z#!c4@Spd>E4Pk(vdLHn9HZ@$@ZDu?NGU*F~!Y3*hxD?X`%bLVMd1EZdRGP@Ucd&um z`1OWiwje9G6V%$>07PI9zSTpq{F?j}bMjU^h4tf8=7&_Fe}#=3066`R37!9zU$4H( zgZ-@sBwGdQu#N^Smez0fX?V4B_}KzU(=T!!@?3!C3qB$Ha@QSHq0VUM3=}GBAv=Sh zPAVuYCk+C9>Ap??XXNHP0`#prG7CYG+23Bmf(oPYytHDiXc%FgvcED1>I|lO`hb?$ z(;#wv95!0#!D7MJL91ZHy9YQUA9@+`nU425>-~+8cin~h{cijR*XQuS zS)RSm-fPX8dFP#XcCfsh1TrE%A_N2kvXrE#A_N2!B?JWI8T>Oag79PW5&VL*SCkNj zC?6r*25&@-)ul|{zJ;I#f5SsS2AV^_JbeWG;Da9s2c9VmqRfK+ z?{CPnrw=y1s}+TS5QLBt6;gJB+)IabQkK98Xm=78wm~{L(40?SmCJ5;)2)pk5(kwp z7_C^!AQsw;MT14h__6^evEkwJ{R49I%v&!b&Y1$$)6MUW|G-VEy=JT z{b3}%{=J60yvp!?3u6DzfTSRWV5c7m3hJ5d>=oXBM`JJ(K)se7GNCSn|95m01Tv<_v;XIk6Lpjx_zQ%F{n)m zuYlm|a0K=cVZGDMVW+d-==1aQIqT{VA4Ns|W)TPo2nG+su+Id0L|o~8e0|ZeutYn% z5PK4|D+$45(wmlIlog~;*{C@QKD86MJ5&^IZ>h0 zY7w8=+=p`F12Yr_;VwI5BVUMt5G8nrj@J44Blo2XwI_Od-$u{Tf_MV+d`dIo`C0g7;7ZwALr`zuZ~uDhR^qAFc~y!gWT>f zmppl{TV6scp+!-Azw-~(|7jh#tZm98+;;gbI4pe3Vf<_1LM6iY&d#@E^t+nJC#T-h4UG@LBe_&xWPiBY$WFqvr(uSB&> zxJGmO}&ch`>iF!i7MDBUMEE8tT*J0ye^>(S`Vx$ze*Ef zBuVk1&RBw%JESVIdr^8Dc5$b#|68^=@?2!N9XA1=tqCE{T52K z$#BTQLUT#G{QmxR)i`7qK`dHSe#*j|`jrF#%70YjFR_F96V z>`vvPO=Neuex+NfCsfzze7fl$8Hq~HXV?=KjYjT1mLrzL=UUIxoYRkHxUtykDw)B{ zotBpNqs`rQD2YX+_vNSl7z$i=i?~hnbN9`mh>%(HzLV1YX+9`8oABg< zd-)Q|hQ0EW(<3i1ULUV1X*@(=IxXl~ky0($^@{-~8-s8-I5@mcM}qt7HKwEEGZF6_ z?c5c=$RyAQoBt~Kwy;2?m`D%3KU*iWH4{gpB-|!htD9gI_xn>6=doPJ%nV-HT&?fv zMgPsmK$wn?m#;sWFKG%y(g5?L1U{%M&k0jvgY}JKjwnK@)>jmLef=ahQ3;J`Hk}9C{AZr`m2VJJa9^AI?T>sJ zoOCN0qqqCNQ|@ms)e=4I*89ANlG#KO{GM}lS6G*2Ez7E`<4ghaV4r zX!us}dvR!_q$R};$LjQC=hk1BN}mXkXh;8fuaNtnzf~uK#HUo{%-cizZ-5aDB)oAP zPx}`GN+W;^E+J5K4Ei?!&Vh_MNc6@+?w?Oqeezfn$TV+6{|zL_fPsw&B6g~Op1R}} zSh1aeI3lut1A|XXXxAH&68kr7cuk;Z=^HqR44_2+8`!`A1C53f!%}~PNyiR|Bvnl> zjRXG$hYmXgFfd=0yZJY+kn1@BQ&(SzsuuZgpz&$oU|PZcU%0B|0V`T+B{}o*zk&Bp z9`paB=`dAIx&6OfE|h>kyV*H#^#lQ^Bv#Cxz%}6VI#qt-rHuJAk?I0qIVS~rj(>KJ zhgdz7CUC{d3?k$D|$)nh(eGU!{v8H6x&) zsIohL`c%t2T&AS0tE-zH0sHLP&irWrjAX|duz;kDs)?_pHAEa79FoBK%}%ZN$99g4 zFp3Lcex#;TE$!H!`&wl&K}-L>LH-ThNt0A8WubDB;(F_Qqk%XC@NPn?rj`Joa+A|w zHj~EG)bvy-3P0K_9upYo1~ehb!eAm3$@};3kCwkd8x5ymk7n@s4neFB#P7zYBx+zC z)6uAw;u$WFi`c9GJ;6 zt-#7kX5dcXxo;aes5Zvb|lzxYmb46b;r`f3+)kM=@L+PU9jvQJmGNFRM{B<+b_p z=4QcKDn@cY(%*eiLZz3xR~S@zqf_mBEG&3O(bC!){l^=1sfX8_jR`x1#cvROz`RLH znhI)jy_D;<-rkug*{NQQr&Ak}QLqq!QkC^$qg3T>I9^~srbG9{1kvG-J$5Akq+uWv zG5T+xLm=R$19y`9%Ge*=L4xS=M2VVul?8h-r$z94f9WjuOONTA&rHI?!t_N1|eS`^m z@l%R>_0a;)K_A=Eq5tZb$C?l3>*Q?4qRn%to#)>cw~ZAB1+feU_h{-UAqSr@tzAz@ z3fMxp(O!P3k8788@F{lM?W%A%R}(~`#w9qrTPy%4cjUn|#2G5Klmchj@gAX1Lk^|? zCxlfIa!FDn?!i*KHCJR2xGl;j_h)3L!GaLw#f>SM$wrucIeT_)W&(SbM`Omb`*C#!RTndS$VHG^hjW5Z0K4In+aDsZnD zyL~lF;Mk{^Ucl3ST*;EB_~(EnOThs7WVC7fe{TNy(_M86WlR4%BuX6&NR_@C_4qGT z;5|i#0AJzwe}MqR>MZ>jb%~67%74*?4P*&j&*A)O{~4K}0SGtbRS~?mZ~sLo6G<=; zcqoV}{{oVCm$X-almU_RMe2X4&r>GCMu??U^l7;RQZ|+2ZP^3Zjb_u8FKJavgU)xS zQj4V~W&WKnQ_L%%-*lyV{(B;$wlDxSFMFia)p7CJE&SGcBVTcG;WB8|cY#gK@YNnf z)*Q4T@Gs`O7MKQ^Cu>s_r;TutOXEVHsW$Ee;Ns}8{c-hpt@rAc7%K6hwbkDs;a>PY$gvRRLzs}uf0s$j&(CkWlhxiX&=XD&2==aFU)0N^)6>xUw*LV9H4+Y^ zXgEImkAk-Zmh%k?RFRxdHp6rIJKQAQ#Xw75?kch!{!sH5v+=#cI-NZHRJrzT9Ig73 z+kO6&_4x_~p<-K1mKKdf^gZuqYQOVA#HJ;6K3R8qxZ}P!T#78!sOgak2VtpXP*fxk z#h;$w9Kibue)A9iC%A97-;xW%XP0J|rBx~RtN%E+y3!dKeceLp9f{6mw+_#7I2*Zq zyyfNc$}7*@%WVQ}%#}|##Uu1W+r^_4aT8cg5(0>UCah=$s<|p9s>Lkw^_Ch=uCBSj zffhqXzxix6v6TlRR+XyI85xDDWhrtg`3pg(RYAGAx%ov_syB-b4g}~Mngo;mJuf2g>`Df?4bFPV+uGWA zX1u+dPo2fCanq<&zbF;Cv{pJgYI?j{Fm<-t$Pd7z1(df9ZnI_<=vrtbz zRZU2KI2*&`V9{3^c`>-4?K!J+i>E_H40Z47=sTG0?6oA*DmO=z6d5xxQqypwu&pxm z#A?K%0q25Ix~aD91B*1S5yWEHv%LN5i+a6f@{SSh8zbPq8v&POSb76CRIz8sP)nckO*jFke52h4Zl*pZf*B^-QV~j7%6D@MDZNve z<$mPl`4`jo)3|bP+PjO5Lu*4%Q^4*Cdvbwm-&hJPR-0eW`FA6z8YBmG?r<)-<&MY! zWCCkI*dt^D2R6kCUrL4i!9Een^71rm<5 zEhXW1xs{dGj_Qiz{%p}RU#`LsZ&De?>eACW>-KIbA`QkEA_1_Uy@wLq$|ZkrYAoUK z)#U>_-Pw}^%tc*n*=hgW7@L$YVhZU^P>8x3?q6bUdY4WK@G)UAG-IMA#y z875`JJlSYB9VI^7okF!ldSlKSBZKg3Q;1Af8amOAs*A0Jx2lg&YEMB#9j2%+KPaKbFdZ>x!xEN?6-28ZS=Z z@lMHs_bqk$G$R6Ts@K)NB(o6l2R+N`(ny%qK)_gBt)wlN^}Y1KfEFdIXamRuSd@T0TWC^q|aJ@4}moT3@p>>4r}L%ZJ3G=AXZIG7I%+y0d; zmfIAyKOz-F&hmMpC=&sp&$LlN+%j@R!rj(kFULTt?65+pFsxgj1IKX|cz z^x_dYcIjJk(k!Ilq0{A!>yptMzF|0GSQ43{9$S_l89hd-QTm*19&_~sg4&+I#o;ur zN{!3;PEkbpT2Hu=sRV$IHWzyu&j~o5_ePRTy&0Y=3@6C0UU0o2YcRtkRYGr__1TH& zX}gEBxjfKr<)tJ?fvV7Hhl~*yFf`f9(B!jZwf&*TB-K%kU_;Ye-Fp~YN5ZTJA(zgR z9}qrumFgE5STZVebqNOC?X(dTj+!pDVXzOa8_eNMxi%PcZ$^)@I68w$`OZTXyKN2z z^!>12FH5XGB0^p=O5CVwyg|B%grR=|!7?P@-i_-L>TY!D%=p`{_o1_UNZp)mt2|=Z z60?7nN*Kp>x+T|Sf3IYJHVj_5{`Y<874cq`gr|jhY z$$xmCT*;^{OR_3Pr>G8obmu7|7PS}1X@o|8v8WZPmp>mV*FapN<-@MfhSO%ZPWQTh zLjn#L=S=@>A|x2W&*gGPEqCvV?~jD7{8<5SPJtV^8+G1CxKD>FLJkoZgSx03qb<>Q zJGw_Ub=`L}5<@<)@lP0p3-Tt(EiGCl5gr*?<=qR9<5ho+%NnQSRcc*wrac^5xa;eA zw~CT90cRJ!w8Z+(n9NO~k)7-$9B+qRPEw_M!Yh}(%OpDfZ@oh4s+xZqHZk>kx1lhv@8Vw7G!A_3xO7m&Cj$}{z zYKCVrTpo$$s^-_`>V4-QqIYw{UZ^l}mzXQt-%GgL=5$QfbhZQ6{!-$IQQJyIY~s%S z^7)#nviJjUBdOd>PaJ@LxvpQ!cLtNrs~G{$as?ng`Ga~*J{LyRsfYOUgFt;~m$lit z50de;*j?`e3^HKE!&09=&c5<`B%p}aqhV{kYrIN z`UT=^X&{)KQd9bK7cyJvb?n%_3dIm(4BXeBxDrZlkcEujK)HWj$zOG4b39j+#TR(7 zHF{S!Tk@R33GaqaPOS|+!=h1<8`alvJd#c0L4H`GhC6J` zhl(RoJMu&}VhRs>v9z*zsY05$3o!QX6kab@a*;CtE-{pE@q1IQxIk!o4=`yk5+9pq zz3FHMOZm!&l1w@^#(gcFRBrD~51oLH0e5~}3eB4UwSx$P<}YjDdizp2D^%s~bUZkX zp&Sz)pxR~zprqBWB%N&_!B+eJPK#5N(wq^%cF4dXf3H*t@8LCXtmH)k>h@2N(qDb7 zqYJJ+O!4R9Q(~|<9mHU(x7txGwz!ou5}fZZ;t|c&|19kO3~b2>q$}ja`n{i(Fkn?M z5mZd%$e-U|9S@MWhIC^#IPRi+Kb>NkMbSR1o~E-<#kOcw zC1#coW8-Fr$8nfpFjFC;Q2w5q@7@>G2JS8>Y?dYTu3nNl)L$A4`+5rmLiehDYd2tN z-7-ie6_?63pjb(5=l}7s1|yAusa$6^T&SkK^8~wnbt=;{Ay-<_kM=;sL2lU zcCw-im8fL7V&n9vbKvh(N*^2aoIm^QU{*RkmZ-ej>R1= zTBAXgpu+GlL9?#Adcy0iG{F-!kxa#}ede&7Hmj{yM+cvn5WUV#rfqVM(wA+ z`P>6A^b-k_o|u`L*?q~v;*jxd0^3&fFY_SDDJ+`%X|8A`yr=rS?A#4y5%hv|zQ z%@8Guph3$RI^h~nj96vLrIMy6P~JgokGhPgakEb^#R0G?8#3iao5^iY>E`BEY1oT$j;&JTkA#z>R-vO1>ug;BBb~??UZKg^ z%M>d^+90g9fuwc(&B+a!L*hl}%7D(Dqvp(*fj5~1ITw`-tno@uC08REHR-s`qG`dv zUba%mCwN;~gRB16+()a|Y`)q}*+6=7k+wY) z68^9y4(s0Jnk|JIP2*aNc|krc36Ei|;=8L(%h%TP@B$SYkqc~X@@pQo)(cSSvv*#W zGu0YKU!~(|{q>l-3lZIrxSDs*U-nOE7fzN}>$q5Auo@5e{oovp?|kVFarx;+Wu80N z?#w-V{i(*W8txTdEhfip;}??z-{O4w?BU@7B$U2lpPB(2G@GkuNa0i)>A#jmHyvQb zrtQ2HgIL7=fUqu4$cViq{`GM(*LL4R{eJgkv~;YYTTB35W6|mUhCPDlO#s-eMdC&o zyv|{BN&e%LMOsXE+t_|y7XrH$Znii-t%!rPLPkTXM%uP#tVvb1UB1-Tsedga6=;xS z^5w>C`FSHY;4ny6+AfdhUaCsbcaTG6jNt*eMz)KpZ$-ujOShazai#1RNeeDR$JcB{L= zrz~dOLUCoPaJtH6MK_Rm2*Bk=`;t)XloqXjX{=HBzDcYoM+5&w0It&{kVZ~v9b&SJ?ykO2HhbAJCwTs!Uh56 zE7q(-U8}k)@b`~wUyHcc=08REGXewQ_Tqo1flS?JZ?=jfn(2=FbhERf|I>XrcSA?T zH&NMSb~OQ}3?-A{Ee^fd}FUZ zQ#+Px1;-C2x}7<{0FZ&1A0(eWrWOh8<;p%1{Xe~MsndBJ6dmGX)d3%kd=1NMz1YIF z%^)KJfSm{c@GM4sXkE2~5+*bOsA>j^X8nN6P-ypXe{DG}f1iVuzDf%yRtZV7hKHAS zDIxL3=>%$+jv+dZ0ZtyA&<{jv5K@X}uU#K?KA>gr7`cc+=es=<>1U$V zpSJF;`W+1CGO824*Lb3Lj&B@l^_umt7q)Pr0hXif!fWXY>4?Z3-;-A=!omT66^CLm zkY!^kzT`)NmcYFA$tx&RUeV=Rts^Cq1{14$obZ&d!INk)qiI`v&XSqzIMP#=eql5A zxjl&twkPB5EX7;v>we=!ie&aTwUUYa(5zo%au4*_uowk;LXxBaz{LW*PMFWw9bL3; z6mT&CH8nL6)vKn8)kac)&)CiLJQ)5xmOJsW53niS`7(*m50219^#6xUc|h}B1DS-` zR5_8|=1_0~gI11iyNAW}@#w@$3uldLM5GhXLrFY#r2}2sLD-^S_yfUTUn4LaE@yX* zO2#herM89W4qdnhKHu!7wB4@WP;WjYUzEG>v34~U7i96!ah!u_N2eP5(1GULhE<*V zVdzQx~!@J$$Q19X6!ua{Ljv~%1Bb~w3A zGoiLy2 zkUK=(rcX4Ka%n-&kn!~Zp*h|BxkAT*NvFk&{$(FVOnqOXI50wH&j&XL4vB=6hnrod zeX^>i7lA)#&k4TkvaJZJH6 z>t#Hh$BS%`ydhWs!?^SEe|7XK~0!YBm9-c;+h^9%`L#DYWTT*H3$kTH33@H_LuB zzic{tpsmGe`6UfLVVisXs6SO9$s=M{9r@a8s9NW*{m_u?(kTMG&sg37ytYHj^P-jfy-b&02U+ z&{Z{6dyV%*%}kX`)1!!WO^GEWl1rD>m#S9@vpd!P>VhcNtSfemSv(&q!n9St7TdcO z4|bv(spyYSmqcaibeuEFJ8$*QSimWQ`?}4C`wpSeeoOqv0#ZjRl@}`=no7+3Ssz-4 z0RpjJs&^S3YEaw840v^T=lm`5+Jw4QvK3`oi(gkVSzS8WNmZ;Idfq1FuhO102HU*F zb7vp$_J60F_VbtR%#OhFZh_n63)(jo(qEs=()CQm?uJBHKXbA{XVQMJ4BaB=4HfM$ zh>m=^^$7o{FmSP6m_uK3J%Q&>;;BQ-9{6|4K&b8V8qRlNLA=zd_r`~t5tXlTT zl(p-Td}oHXy7RXo)i>$0U!kUQ`Zq0NS0BYS{AQ-VF$5#I(7+L*{BFdeQR+FOC@jkh zF7Iy7w$5BsZ>Xobb1jA2Bjm3=JIs{4ILnD?M_fh2SBsuWuFMi76+UiOAWPkm%3^w- zrhS(5f>s^EN}|3)?a=!Yff02yNAH`C%cz(VZ;w&%7tEiZ(aQ?KhH;3nGQg$Gld}!%A$^RH*Bv@Rkkm~H2*-X}g zGAd5Xx#r)&Igh3U*o0b;agG(}Z%i2vCa9$Ay1TpE9xn1ZNQs37HH4&DllA!fH71Hc z8wUo`=eq3#cVi5Sm{@mlf=@FP%4hpzNI#J#U%7UT-GqH$2(-@bKizQ|(i%+)n%H(l zvEn4ZwtAkyPY^b2Xy4H$=m67@A2?U$YZD5uY*PEc8lphv9$z&QUjb2`p)le}Ga+1Y zEB8e;+xDKvWqUYl6TfVH;_Q*eao(`m&);3*Q51HwDbSB{@qRqH-Xbd~Qbp^dL= zEl9UKdjC3iAJA~M|MV-q*~T`ZocqXEAxAssfZ6XLBj407L;;^1a`L^gIjvp^Yb|pUY?gfDg>FRu={TQu6nI=0HtupoOT@tI2 zW1eU*#*P*KO!OIuCq~r$K|u_Upv)`f`VR6;WOWaK3tGC z1q!^*H@! zsV_0=p1m4SE%ZyQL0ML56AzNlsithMhtB4VSRF<7+`+^B8I>N(+8i@{gn_-6z8u9O z&~>e;)g@1-&_pjsq#Y4N%U4%3{$c7B(vLh321_-!8fxHwj)7P}>aHr{GQsHL+pk}k zAdV}AKeJ^Fms>AQo5rV9+yb{evETMH1DDoLJ}ZcxBv`4GaHIe|x$H+q)z$4`y9x>H zzA?6~*Fvp%>?V4sTjESIKthGQ{Q1zVbL8TAG-3(T_2rMq+O8Mfse%-enoaZYt?u~U zGI3oMy(q6#ij|+GS{H})dYV3TTNBjlE%YE%DuuOh4CChx7x!oKvYAvGKnI_2H+*_1 z#fQDDNOwKH+QxfL{1c_lf>F&G-wCGC-l#L*bU;xKu-G#pD;q}_RT;nPu->=X5KF5S zLV32U4TKFsOlZ7dI{0QX$mImXCwN=0WR~#>l~Ieyc!ttpXFN(H zD9MfAXkR zF-hG zK;G1k)i(%98Gxhu#l{i(qRUy?1246 z5k#=u)T2==6oIfW7*$XfQ8dsNZRn%&xz?vR9X$f`62C5oVCcWzQB)MhqfEHKyTWpB zw;X%c{MhQVJiF9#neBWHG5rP(Y~Ho5r9gq5T=IZhP;*lWv>9HBP{Ypk59t5$!xRsL znHUnf{JN(kjXfFF8_+K${FJM`Lf2g0$FW~>c7ytl$_0=PDHl5Cq^4FDY*3maA6^(z z%Cf3|yA&AeG)ydoKdf14RUpe(G=}7%7ZmK)`OT(&{XNeO19HX%Br+zo`}3G8yPVyT zNH{_U2VLa5sudlIqb$_>buZ*Ze!cRyYm*zzIpFfV8J7shC{lCe?>~4Qu7IJWH3W!i zW+K2PCq!KA@Hd1Jy%aBZF}% zsCP4t-Hoii`m|hu#> z0go_)`~gyCF~=5$dF9~4Rmd%*koK|e^ecyJ#ahK2Xir(A3y}m$2Iy_zi#i1z^gJ_8#tO>9C&Fk4%Z}x>~QdgA5 z0elpv0xZNK1$3%W?QyKFH=yo~{Zy9r_bpz?2NI9M2v3Z6xgc%XdFg_|v^L1p-sJ@e zO*$uP%;A6H?bYQjWw-{H`peTp^6oJ{9D|+Rc$&^PTF&3xcOZX-N9=fFrf_ed(HTpg zETZWxlkZ+ODznv@Wa~{SpAd>w8mm1D>jtgY-ZkQ@w*`B+9FEoa6w~rc6Y3n&T@$~% z#QO)MdTdJLe`o0PfD1E5&pItJ4xchLrnp&J+F*6MjC#q?3!?~;a)&rNg` zP2!b;rRP#D@+}Inct1w>Sh+bg`$<_g7t7?{9j)yrL@YZ0+ww-VML`7f%sQB(ainP$ zqJFD`Z|9*~=Wb#kUuezscZ%4l`1^4PWV3zW#(II4)Ze?KWoK%}A^S5{y*8JT_RvD%14`QZS_fGo0D z_ZR2_@K4jYWmu%`+LrjsXref9j?NWZ?oSEMNLUGXC+~{;6_$*XZTsJy{%F%8fbrml zrJf?Ltb;zt@b3&*Db#3->kq>q5_1B=i6}jw4~fOnpO+*WuBGVysDGP^8;-H?rEFIYYUJZW6=wVnhtSQ?Of-j* zs>8{9lZ4a#m_qEtZBSEZl>+yrxJgouG+VdXV&gWdvQsUrl(>ob z&*1B*?Z}ZN_9MExHaq9>)fOPit7HYWu&Qdv@Y!@DJD#1#?&0CL4tDYRMB!%T>@vJ5 zpW0%*IJ~XfQ-h1_ab6Xc8<@F$eG$U$!FGhL#sQ@+&VRA4DG@PhsN z-u>rmcqNl;qwTIA2vaI_YTp$$DEMhBblX)Up?~n}_S@vk0v4n4+0!=%C>W7QcKE>% z^EyGW$of3nho$}G(r(zZh+g;;S_siqZ+ItS@%VEI|;a_yDjY3U;81B_C5nHKQH7L&(cqhFyscp<|-4 zjIm!Mo9uu(UVTz4hV<$rm>b<_rRlJfp@iBg7GIbb#ZcPHQIqQhb*xeG!%JpN$S0({ zC|nPQBvc7N-iaLhwMAxWppYd?kfpbOsa5#y9+IFP$80IvJttxpBOd1vz~#j!6mY@+>G30h0?afn0=ZO-=t6;3 zyMmz~PI+KdOqqA3JXgjwQN7xQ{{2Ji7$a;`b91igRK-}Uy@Z2IlPLb?F&xXP2_*u# z3s`BXGZIKBpwz&Ok}R33c>jF}MMdG@#!k7R^_hLsWasd%8sMrM&FG>Qcnk|v)UiD3;D`tp()19nXN6BW2a? z&kDu^tmc6R_zweeo55R-yPw!$^|FFD-y7U->vcTJ-C!XOV_mi_zIV#l*uQ%=Qzrth zniTRQAed^+kAMh4FjDZsQe)+2FGZ(`^4rFYicx=zm_5NYzvHxE!$$%nzh`0v=8W?k zksX}Aa3{|Rks&(RaHA7kKf&THcFqTvsf%*G3&b#y{E zhZQbES8Y-iVWQaV%SoW>Jwx-B5(FW?l0GM3l|00w{G%28*9c-jP71eAVQUN7rgSIk zp%fNOH3ex*^Q`0+YH3QG)tFn{rxGt_N$oLYVOPyi4;>DRp9fo;t+l6z!v5&eyY+dk z2|Vsos!5I`3)X~w3h|y>U5zBtqOp^nhkD>{5Qz^7u1>7bFOx&rJd^Ifd z9rNPDBs6F}5Cm-nXY$gm06D1Xx+Kf~>0N<_;W9LUK#b+j^!lrzf(u6D0d%CKP1$cF z;DB)X-5TUBKyZ?8uD83P?_P(o0#vBu!VOuSgh^bfCaO7`Y47j;45#ap{BQN6kC!BF z_Nvg;@lcwwaJ<$~N)=FVzd&e&yku5m2ZENx<(el0BlR8yj%eLJxT-}6Dpw*>)sr2S zhzILQaR!<>&Ib5A9|(b%LU^go-S)cLZg%g{7&Hx-{VG5PM0$bLYI89Be2X(C#eiyu zH9yU=z%|dJT!T;I{e}Kb1QNAU5k3?&^ton2I$OlUz~quy*VrSsQDfkr&YK=m-~n;5 zFrA6e(b3iaU&>6)HX)4ci8D(~Oq9d5bG`u8QXsX}j{Did4(n~@4)-!E0>k!v5W(y8 z@#{`Oe?(9@-7%Mx<52P&-HI7;WPAITWOW%yt%~S9Eo z3kFS6E-v*lrW42w13>f_g3b`fm~L&Ec2PniqPOME^cBYa8JI6?G9ge#ZuS?reCA!n zSBJ)+U7VM~@B?)((nM{VGrhw67q^!9kiOKljcp^83 zk}>}&i&L*&%XaB~vXSvdGcL7p5U{bGx?Ny*riJw1h3w}&+MJN_mBkrK4_|FB|JK!C zhGw=F6m-Pgz&)+E6Nm77x<1~nF!Nlt&BC)FEgc!xCgI{`ZX3Ct{!`s`+ABG?>H1{D zH?%rk*+dd_t{`&n=$Js*Z44N{b^rq5NX2}tEg9)j1f#wv$a?({)L>IFQhF@v1&qQn z%SUWzV-HMHdwneWv&Wez>axCrlnia2oZBfPoAVu&iKC%ZPA2F)v6U_$WAtvotjx&d zwl)2RZy+VuRx2Gx4GQ^_MS<1Be?a7)l@OGR5TGsd_fP-B5s@z_SXt2ffY`RVw$^yYwraeI?iu1LIPe*O7O}u1?Dx(1Ha4j_+56Pg!k9-#hUI;>wTdbr#QkRH7p5| zFjG+1Cs1<*kg4$yA08@})b7t-ROUfj9%Kk5Mi7Nf5bi#CdT({~WyckD{WTGCJzl6c zpZA4#D_lu`4;FQtEkSC%X;G0oB&xe2VD~vFXCQdagVZbTt1|c^SGaB#-pK|MHSYy2Kg0s{qz{;4&uV)7;Z$yXlC)EMCy zA8t{NRQ>;000Iy&O9}rpoeR$B07^jg-+6>yiBp1R?yeG735VVjyK(jw&x1tgKQ5JV zzE>&`3)XxaP4ijEs80p{gQu=()>&YlfmrBrCUmpS`8>^Coe>A|;>GtA3Zy8GkF%)$ zPCyq8)K{WU>MNLx;TSJ8r}G_b%UI(e;((Yf`^QQh4<{$ClNSuInOLxK6nGRIQUgO4 z$yBDC+3uJ8PS1Sl%qJ4ymzjK`sDxln;WKBt0I}CvPDIv3 zVl%5Z`m3fxNsyU(8=u801jxcMuyuk91P>i>c@0Wkj`VY&2T(v*Rp$j}`a-~O&x-;x zN-4QSr3~tecE-&Ds3J$0J_s)9@SUH$C|8FbC%pPbCDtr@APp}Q0|m62lX4pXzk(CM zJZVe1m=&ZxP*$FOai+c(gS~Pg`abu4*b6Ip<&!OvCDTDij=#W5&|TrO;CU~H5SO5Q zjDUayd64$ZidaPsPBp>rMERMbvBF5F6cQ5?Q|nd|u=%HEMxY*Rj6^%Hae)GQ#Gdm( zoQY19$N45CAiod^y{H?@T6}FZYQ^Q|<{0_<>accoy~(4YdPB`NV^|H@Da$W*i5Zbv zkpvx;G223A!-oIQq^aXR-7$xr@)Qq3Lt|sMLaunBS_Sdl{k=fb$Im~f$IO0eA_Pos z;25LYtf9&qJ=+L;W+S&60@}_zCAYBu+%r5-AgT8WD!kpLv|0HM>o)O$RIy+%=u>ia z&ZoL2_1oUeF`mlBvnJSPVRnab3?ZYfEPOij>~BuD7iA72hKFF=mBs)VRYIuhI1mF* z_{LH8n58Ia;xpl~)tNcWh+EjNMxLxw&e$JCu1vn#GxqCT-=%9`Ck0~Rt)%6X0zJ0r z-%>rfUeA*1@3FU+&2l~4FU%zQF7mYW59n^3~^=L+ZnzLuRF`x7B!+bF?STT_toGBUCHrj%)pBkP;~|} zaG0O@GJ!xV3e_J&WBc~+B4s}X;olt!`|qf1&S%-5OSz(O0qH{a6p3j0nxo>Gk~DJAqAq>vw4n^%#e2UXQNl0|Lch zoXE2099o)qGEr4c;BfO7n~Mhm%$_hmTqc&_p{BmjKb~^(sp(~sIb=_y7N(p=TxuIp zrLwn?@b1e}@2-CD3g-r;s?%Jq`V6Ae>E_$oQ4cHemG2=hs|68gG8s9UcZr#eIFA-S zW(k`v-_F`N*o%SM=y~yQ)JtfKsWPNFDm8tQBSMY~6dPw3*#6iinlatoop{qf9b@Wj zz~QN%{;szE&8o+8963h0|qoK>=T~5%&!+s+wNmSwKtP&o32)@UuDjCX(FdCZ|0v;YPkGV z@-?ZbUnOPDKApWD8=sOm_~Z_3kA7ns!%0( zXzf1Xm0L$F(A=_r8clYe8dG?B<69Ns^Q7Yu?YM{!c6(73AhiYj$izbN!c~q}DfH}( zGgod7(gIvh)?N51Ej~>$8<*I8%CUTV=rxkX!|WyzZ5}e!c}F0CDCJ1^KsykDPoxlr^!41A z*?8WJk0cg@LYj1ajD0zImHdO?`_VFy&e@6Q>&(GVD7-WlB3-x4{{2kJBj)d=uc#M$ zbCGMYNF8b#FA#>6pUZtk>0DWLeExe5DV|n^6~6>%h4g;*WhXH=!doIvcw0vd*myV* z%c(hL36<^q9&1h2(E{kV`*-xKqkX3II;zrlXgVB&)s=y6*XxgnzfMCW1{* zm!w=2oX9E;DP)P>KnXCt&*Id|Df;j`3ljI(L8(EG-fKJ*pLAx2QIg{&Cv6Lt zSEp-zXMewxQruIbPOf+UwB_&7df&lGL=q#jFb1Fyg#cN8Hc-L1APfAT3EIg<-yM*0 zW`kC6Q22y;Y83*Cj8)J&5Pfai7{!5v&E)TY%eZP>LxRR~NO9oF0w3$~039E9`axI( z`S!EqO&s1pnAoPJ+3e@%Ddz+wI`1B=IE01cNG--BQ&G}?I4?^QI#Q!7jg%bgPBO9B zjj+Zs>LOIj^hE_7zt+*Pb8(s?p!;0>a_nuFv$ACC^79SVSzKD1e?gdm_Nuu&z(( z(3RGxeFGwWbjM!6TeIwnyfvO9%ai@!G+YttANnHKv%QVvBUlR(;-kI_xXjQ&e+%1{hEZvigKUW=IDtq`9GaFXI7STv;D z0UpPoSrasy&a}BV&jLx<0%^@NAoH(r5it8f>Of-#K>v<==nIDNlM~2 zQk28l5Vz}-ZCrmKeoNg+&Qo;QpkM#WB@?$Ce2SB>nm6A=IlDn)mO|4fbE#k~>xxuC!-d+uq1W7KC{NAxol+;@ z2@n{psYv^yacQfvPn4G~@JOqPUsdWU+2HJ$(}$w|cQ35l-VrzQ^BGpU8Z3O!FM=>` zZ~7Wxpqc)$ufH+#Rdt?GuH{5`37lYv>`f%?@!M*X(V|2~l!|P*F#_J~2q|Qq?*($H z#1)xza~?{#M@S0wRDrP9C2UeI$gF=z0!h*s;GbS89Ff(-7&(fDg@VlUB|ZIf`^_Pt z<}yG zBBXF9E57Jt4EZ&@pF8Eh=%1tNNDe>LwJR%iqik%Gu&x)S-M&%LBA*e^s^FKZToS-J z$7feX=J170lKj&Yn1!(%h9Ceh{*oCyJz%^-mjv`>?XX7d{Xn1jYEAMaIhy+#pX~P- zN`GrcVxm@Oq2eGBaaGj!Q~Zlux-ymK+v|oFSGnY%_K>tw_6^ef*Mld{&?5=f))a8O#abZ(b&UIVtDV&!d*w>64A~gd-h>ZKSY+9k zP!=lWy411=?{J!XYe!n5k6q&!H?2KJ6b;ZLd2IV=nnP8YreZuF_Ug-a6R1!R_ZMJN zxmSDtJ|;_PYXo<^1_?Y>BIgpuUG^-B-0VB(%vEP}IRKv2RBe~nMc zLKb!@i%)qRk%k=OMGTUJ-!Jur4X7$V{4b`yIxLH>>lYB|?(XjH?v^g;P6Y|+?(Xgm zNdW~F=~B9p?(WXBdEf7x^Ig~T{2}5!bI;7)v(~THEDJQYL&3)sI6P_*dDTLhe5_pU zoR5}-T>nkPS)GJGt=lgDY9`M~LG~~54HT48(KwLdrEF#6b#(~xtpRT!jcs=7tMOjU z4ET7d8vLP{Dqh6J#WQ|8v2*PF4UZXO)Z4$T&@3l^=Jz}={*}Pov7YcS`A&4LL|vq; z{=BxfwjGTu158b)bNJzYt5V>mIDBQNWh1o8AIDm+ebhKhp%;B%)&Lh zNy&LY$2~ug4(U{d{eHe@)aTZ&y+bC^Yb+jKjl^r%gCcN0jCJrl-`H)~fXT1aZJ;=G zNO>W6j}#KWBJkE-p@NHl_P;0}m5C2CLo$YK4cXWt=71In z95T$aQ57B<=9&%c?Jpk~Z|^1I(}$ar{`8|CUH$M>#IBo;$Ww&hIw;LW8`d5t*V(7YRW1(M{>Jt;``c6LH7Y0f^j6ddHQbsvz(E1;k&1r#cZz+uwNZwkcbQrY z#;>sdi6hG{WWxiUF_C56H*?uNR><_oJ%fh#ZV;YHN~SUMan2 zb~5)J-eUFMc)^xC_v+tMs-dh-rg`tI`s~Vj zs1TIr5}eFpbQ$G1(!&EnKH*OT92UTrLZ~}Rb3(R8XG9YNq|^-gZoDsvbx};E2_Q?~ zL)MXWY~R*7dAq=dvl60s%!u*pI^BPNz~gSiGx(I+_mAuCn%@g)i0F^t=HGQPfFKf4 zdxP^|<6i{o>uV;M;~yJBV4iSWu~&1I?r&SfnJ+ISB={4>4}TLY46AWoeW#9uPhK}t z;Vm`Jm%C~PYABYBhIR}?sf-|&Cfz3A6z0FHstfo%lh_vL?U>-@gp$>|!z=4XDOy1E%E2_o`;a`-vpi1?FE5!Byc$Wgs+S4n{_n;(V(ef|9W(iY$Df9Qk;ICLVr zxeVelq!umY9}3VMH1TX(1V&o1y82<@DVTuND#2*4yJNqr@<#yj_ zfQp!cw?IO|{X!$yyeFKcA38epn$7h$ zvI7A0AU6uyIV_jUR=vb--F&jaTsDQ(m;}5Ly8&B3ap7TV05u*P7!xgp`vJJ2g@lEj zj~2K$c>XScbJ2uaWciB<1)qT)2>h+oAm{A@Cp{&UiOy1VlLyjMl39iL zMP+Nhg7;P>6tF+oIq|_`ee-|uD*<%{M?5Y&CR!gbnfHQII0Mz_K$qF7ycaOjB!DQ8 zsaiuq3>}&_b~^PExDU%eD@FW)W#LsN*vMi89tFI3;4_KoPvWNj1jW(4wzXC2*I-+W zCOZiA7Qtk-Bhc;~EM}+MQ&SL+g$M$VCRn0gZ4+ux0}%_IeJ^ssup7f z;&vT;dNOQOK)A0~9N*T~MlfWh&jdfT!^Cpn*2WNMPkZV&%}|iJ(uax#bYs2LepMJmAiiSyh5gp@tC(;pbZQb;_~TfqTLIv^+kd#r6NBbs`46_+AqxaTcW`7@ z$zlfZ!14&=eSO$@Fk7d!GYH;VyLV|l&B!7zchD9F&yN@H2E=_aaJRv$h6_)WlrorP^i*12dXOc+`=(IzCsT``=$p&7e%v_`Eq# zTmH&bpwyj}qW*h**pxu*OIRygDs+s>Du^`zZhocp z@Yjge!^mJoZ=r>xCTWzyKLzII~UxCFqMunXYk+i`|xMjSL>H!541=9PKr&!n0uVx1$j;~DRc3dX( zC=<46`R%e#+--Ph!AUA{h7LI;L)i=81!?Uq*>^K>AJN~FJ6xFN^VK?}g<2QJW=!VD zndf>#nVjVWt-6z>{GIykNZiIaS+0uS>7zf3izyEe51EwRh<%ZtkjLmv$a&8xIrl%c zJmyY3zbAPSir*wW17*iN# zUE&FweQC?4-FI34qE~x~%Xj*qHs$OPKVjOeHrFDg5IQYE9DaRk{l_zf!AKt&tL(|) zgyh%R?ja?YlluNFheAnAaV+H!U#cYazbcy|JXSL@+Y^u-;bB@Y!ZxesNEq+TA++BgW-WL@3XDKk@gq zsbhu0;N0z(Rs`NtON*f_Vn8$XmYi5g24NFm5YU8Gunn)n4`g{pZ`UMF4HF|D@b(QR zm#;6<-i>l^{LoVvxcfVh%Sejz+=K@*CSVSJ0iSIn{R`xSD`}9ZpGd?YrN63588gc0 zM`7f&7qvT}$Y@8D_%%@|W}~r*^d&e)yt%<)gtWq-p{gxg_Mua)Tb6vY0a;;wHXztc z)h7B^>>C9>&A<)D&q#;7bQ}kONt$c76dV%N3~KyH$058gEz_?w5dFBj^5b_Fwe`i~ zcUR&q!)O_1yVK>7KAbDV_Kpm-;>6)WV*aGvkp(;-x~!hYLC!AL9vJKV2WgFU-%lB4 zbB5=h(G!-R`gBJoIFX43Y%1PybW>Zu*eHTV5(M`}52Qbm7A6E#!Vi41X&sto_ZX@B zZeRr`vCUu|$ekBI=uaeGxL!JC&R4V#ngRy&<@eB#nO1Qsna{UX-u-tmMuLzFy=BBIora&%{}rW}GbpQnS?={=NM^!B z5%t5}xEOsSxpur?!t|BDUw0&lvsloT%`U&k1E~C8RS7tm)W#^_{jUD4@{hKgr#!S7 z5J+crpk8cmv^(mDpa?=>*^;R!0=+)kvbV5LbW6`FdhwNJfoV~A2>iB+*z+ogztVY< zrW5~DG*ppjzKWNJUW-CfuhkxsOs9yWvZlu_k!cyha1Q&!Vg6Hw;Air_*zA&=$A1iV z=vPKDS+lH9YsVRc*Ddm^HajSvAAIZhk&dad+>;}tCuEIrEQx=+Udg={Z?(4YQMD-7d0OR2Lp;pB*OuvsBlG|h(en4 z_#z#ve$=q|EWBaSYo65`bH{`?Z5cMwAzC||9>21vHfgq!aU|Y zg)+$5#MFKKV|P1EVq2fNDzIYm)Fyg){+&@46&~|%ZU?9ZQVj)0&u&&^r*omt4^-c8 zh8WBKBU+Fj`XzY0BwCY9!@GWSwm)qT{Z@cN?#zPEb+^C>Up|2~K(!Rfm4?t^^)SI; zIQev&pw;_^B`4*&z=vHHTQBI$&K73~f~->vZ4-ewc>RJpw zr)IgTGQQpd`iAuT_WOOt7(8}~*9e-L-n;498~;PYw;ousgnkrk z3oD(X*DxCAGu`p=F;>(M#l>gO@Ru%IXo}DMKAt?^1O)}pkC%>l^Zi=*4*6HpM@inu zD^=*4M`x(CSV+41uSBso47`6bNA)^hMopprv%|J?}xTRwNZcn}p!ycPcLR_Qglu>n%)% zgXmA%pTUozmx{Qu=CcT>$ATNo@LK8glHAX`vIWN9RGj{aD9I(bWunIQxE+wW?$33as7A4~2S{P- z3P1tbLw_n``grqa#qZWl(hn@i&TC!#SoCI#x+&FYQ7vFMCBWd7B7MWb29TNFONGgYskir9TW48r)d@7ROh4U4qTZ>@o(&!eiL@GZWi=NWyfYY?J`4)I za1yj(J-*8i`FLvG83(;Cz3OOyxDezM`l$$X2;@&}et$Z|-#E{2M_3q0sMs7qM{}k- z#ub4GA9B`ozc=@5UL9HyUy+Q5C=1rqNMnPpMOG268j1h;cY-E$x zC|5~!wxxSp=r{y;AJv*HHYv>#v zR-iN7!}mnwS313*Us8?cWuEm7-g4B3+SWFeo-YRI0sggT8v=&N; z>#M+ff};b^A6JQH@T8obyW;xF&X^nxAMVuqyW^yng8?6LEJnLn)RwZU@GVqY3~|6k zc^qM?7b9WvfS6HpAsSl{gnvTHvJ)bz+MJB$sXyS2y?p%C3V4M8j<6nt5(VFQd zYYF*isMmhWurir5+w}=>r$1-;S&!ttjbFTL*x8^@i8dT7IXEpBd-vt33H2(Q2TDsb zre(g?nNuk;RTV~QGKNwpzwYs8{pB9DLxo9~lS*A;BfLSq?Xdh*#oaWTUR{(lX;S8Q z_oFQF32}v0uC)zr%+1g8|8$e~h%ykvuc;r8sT9+cG_-sfpo@qgtKb>of6#nbi!`a$ zA?+qY}K{NZ*IrSs8fef8`q__9`kH6btl;Ho7 zf0dFSXj%G~(Bxxw_;o2X^km!guAkZ?`HWfs$ss;khb4&7-m$3dU~X^2=`TJmqUeXt z-qCMjG$B&W<1W8v`5yO#w%h+j%={8C8M76P_4@tXVJCg)!f3-oDzZZgMM$%=|l z`+PY(9FH|RNQ&}%}PQgks{Pgq3eNK^U4e<@bqcj+`*&D zJTyRpj0yTXL19+ddyhz&8lcQho2C~hr<4k#<0=XtseSV95HtRRgYb}L{@!x)A4I2A ziApBHR#kd;3W+~{jQLQ^Ud2rb3rwC!*a!KGjwSxfpS<6MS4@U3qASJ=NES6p#dhX3 z>f1aN_~ycnGn41$A+a_3x??H@*==@pvUA3^OGfCSv7olY1I(2Ep5#@a(?b_wLlUfn zrGGc8(dhcygvZj z-Wm<@J5K1UN;FkCT{Ngd)+EJbDwBwzc$R zrl8oooRDz+XeExVdkH>9MOo9{_NaL9=6&>j5q*68GkUIW^v!I^t7lk;h#N(ur9O~}UzFM?f za&*<0K~&Fw(Dc8s>HHB zS^i{BLb^?D79?PmUG~pGAH7wR(zjBrn#Zs5AM`)>ONqRnw%s2Lrd8qSp#MTa_GV=C zI;sZ0{)r+A!rW!^Z>GkFzFJFyKzKP1lsj%P^fJAx4fy02Rd!k4)^QLSBGY?*R>T zzQJFuS&LjNz3B8DxCGqLIyb-YmB;n2uyF^Hl0cRY-L3tGLVySjxS|vr%@gsxFVd=> z!4rLcSMqLBajt|)dOJTULP+6JDK5V!jmp+w@!^E}!;)9nd*1x@uDkvj)VQodiH^T$ zsxgt7OK99guZw9PfXIh)4I6A8r)Zfj+dzp5WcC6mhd%;azrNeJyGRFFD7i=vdHgQY z7Nav$hKwf^9BzW8-aG-&vyz^uk;IU`mve@itxD|mj z!;=+ztIHV|J)OoO{1fC9<&fgTynFY~0${2N+k`|!NLC)y8m~<-k*FV0BnEYoZ^H^m zD4_6vemwU8P}-{k1VlndfMkHz83_ZRAxMc)uop+Nk=8(ELwjr zq_g4C>3VSfZ7g4rg&L~GU}1ak{yz}G`T;tnhpG)dzVI11(OwZ|EXHfNrEHGFP;xkJq0a4+1tJAiY^M~!nC z%y?j@ZwxP3(0IMIV&LkULRc`DgKHYYZ9h{0P(u%(r+f`-1+cd{hz?axVh8lLTs}i) zJQ4q=0e1jA2Z7lRVV9jQ08Z@!r7rc?o{fCJdw7t0O;M?j1LF6oOKKO?;4&mS`ukxY zk5%7Z++A2}Ok414acd2w_Y8>Cu-Ba16CDu5Bu*2lvV3JW18VUst`_9|o#yTW1>=(F zHtXrc*F~2KN7qNiFwR}vg3F{E0F-aVAZlOzlepS7fVRhL4Fyvev=G2eLAkM^_r2Rn zH{Sds<}wb_BE~BsKT^J}^~HKz;EnSxOYkf6?oO=HrdFlGSkbBbfN|4bxAsk?<0{ZYZN1_-j; z#1V9NNXGQL*hap;*$=G&dFm?bRJ(~}(ex3oR^PR{nD;&2 zo&V5nmavVXdw`FSn)yG-)3x+{}Yu)Gi_kZ)Os2(r#$F+>_Gj{?fM7;IcFG zVkd#a$c(D_vm{y2S=R|>o}Wi()E0xu-w@HY3sWl{XEC?S3y! zj8<4k?BDt-Cr##y!|CWUwzzB;$EjwmdsA%QlwM;)S;YjzvcVwoY@AgVN@GBgiugce zCIe5-K)4998hvX4J5#JNFnflNX;WRpz=MlXb_za>adleyDr4 zTya%uMqlF37xu>aB#i&Xv^%U2L@$tAjyFgO!YBk6WmEiE0UE8ff?Jjx0e6CQRwIcC z3wgDAIdUv!HLf0wC>O37R{T+BLf(zPk(1v)9WgALwUXzXKp>{xnC&jP|{&l3W6IzLd(BieAx-D{c za|^AhVFnBYMXVAxOx4N)d?tzG1G<7J9de{nkFco_$YjNR)^9_j*CZE|IOM_4koyGEF-Auc=cbIsCty z`K#OF*!Af66|%VKLDo7Rh9pfR&ERyuu|IRq^U^~p0<(Fa5}ign)f)L<7fM z?)1S?i@3-H()rV|8+?5(yFk(M7}@L=$IVvyWSWh>GnZRtPyB>CxM&Q49tlyk&$mfi zr{~uaV;9yoX-CnYb#!M51dR>mbcp@ak%GUq_r%_S%zYY3=V>R_OaGyIz? zsG~r?*IP!;5eP`|bQpj-G_rL$Q;r0enwm?3T0?>+ zr06ROv4Gr$#)1cUQ}vi^VK0-9?WXdO0i9D4%kL&eK)%AAY-#Jo|CNijVs>6ze@pcV z69fJ?YQ0N+CrGv(uXmO#cG*@8Wq7l&Bjovrh0TDF9kt$DYB4bP_EM!%uLkE)x?tiK zl0LDE1vO8|H?FdO$7FxgcCN{-c*HkD(3&OgkU~S9_?oBB|GuSOdT{RT>h@NY0Jq)7 zSVMQ?!P&-h&yvfxalQ}OYLcHrYlT~;R0hX;S_tkh4`YEaSB~-^u{+E>!aLN7ojR+< z48BsKI)o(SyA3+d4r@XA<@glBi*o1SwZ``?F{0$e+n-ISbyLgmCX?Sel@>QX7scv9;!8?<1qA``45&kW_*3@j|lF|Skp_Wrj|p$u{t^rA^~r7+K|r%~4F(0d7p zA4No@rxm81j<&U^a6UAF@VtJ&a*dH`V<#&`UI(1FTJ`^$eHX&D&1xGVbRW-dgIVWZ zp%l4&FNJcZy2#Jk=C&WT=;>1Eap)?yZnN-D#wGW8XGhG~0^^!7Q}S$T{CD4!GZ8GT zrb-r(;=A8H9SUjm+;=3Y^wgzOYcA*<-q#MrZ0vDpnFPpcRTA8JGO6YC-0Nkt9Kww{sYt4fxIf8&R-n=rHuk_d5^=4V%19@Y@LeGuj$Qpri)INB>T;YQ z|8g106Sh|qCF39<;=Q^mL?@lXq*anQx1bNPx0k7uVAuYw70n}ulR#`sEM!t{*_W!y zhBoiOV7i-N$|3dpx_L5WslRZEHgV#c5C+VqgxQZnx!YDYqLp&#qb#@5vpRQQ4upoE z7`~`l1o$ce)PuzSg@As#`eAhkIOy z$<*}KL4ymLx)O}-&ngSwnc;(I_`p;mBk>kTPm{t_{je2XuSVKmP%{&ZR$(jIh^SB+ zbYf!?1F4A?s@<<4_HzymY6PuixXvbAgr&6b7$IQq<6vtpM!Qu3eZYAxg#OT$>)CYN z&nkRgWP(8pn;$QBBhr`2FfBe6(%fpqS|I02l}nirQKQN6A5Z4Ta8C~jN||nEt*WL7 z^=u6SoUAHA`xytNEH31Hk?*T_8xWUR`v|^aajbEQU4N)sYt9}eM|c_%2WWD2Tc3$1 zknt%w2+kfGqd5$@C58Sqc|5JZ0R=stdcGiIT8-Ec*@mwTLmA3Q*mQM!$o0x^ds6kc zw39gqv(^gb79Lmk*zS)1BAa-OgEvBV%mhsplSH%`eqtK2AFH&rFs-%bxfvl`>A;Y) zM=~j}F0?(+ur+2yU%S(-l^h8FW{r{V;bUvsDtZu(=k7#`MRovrf)h)A;AMIXL=tM> zolL>FhhbeUtunL-=hrrA;~SY5j4DuQ4raKXEE^L1JzJBhegegl0&gb}*Gi9yQ52>F z3C8-+mOY@H@p0e@q0OaqWECP<48BuoX}QL8%cKN-F=)le%MV^VFb|HzRNj#x58gQ0 zp@8z@hw~<)z4a6v5OY~LQa8jIrH(XVReEX4dE8jPpRQB<%%MZti@4a!)9$W8{(u5` z*4g2@aCtb*`F%d@x$ARtZ1AX2{$}*S^dB9kuP)OcC=4E)b2nDEqSGTR4=`%)7g7y^ za#pE&c2^Yqx_sxTF${$arQeG@rx6kdb|TVG7ta#L8q>cmJP_b?)v4A-2BSTu0`=c> zDAe&Qh<#UU?mxIR@7*={f`SFb9?|hll-tZ{tm+_wvIFi5m&&vewQb|gIg#;onnVnr zB1|-@4Gk2RObW!+@zQ%3hvK%O6uGRc^X*|;A?srV?DTp`wD|;ObS<59)}=rvgTzgy zy6M&`gR3Yy5XWf)?)SX^)F*r?uoen9zs0&Z3v=?8F_qy zf3BIC>ldw(9wG|97o;z;CiN5d%lgy(w0Cs!Mu*q==>3b6adGADN5TLIQ^AXxo`~A# z&K193e!rj1nA}_1FzBBIwtbXFSzxk?kueQh_gLO3B-6yE6xqb*>itm0$5)>iQ2t*p zGdet69bKe0sV9y%Z0vHF$l9%X7|jmz>R#1Oy*uk>KGdVr6mR4cBjFhZK;VRT$fafF zrM3y0fCiV%n3yWo^WKIxx(5TAsO2D2GKJl&b&HiXFYQnN2duM@+;EY%k$mMR#K*nlD(mme(3e`C!#Q!SsAa(a?%h`U1xa8YxO`YM6Z3l4*osq%< z7B-ze+5^i6S#67{T8#bY@PK!4Eyywr>(b7Cd+Zi63#tbP`9A9}>X(kusF2GM_8%Wb z_Kb^tb2>zD@;GLboW+ye!5%Jpbr$ALG@10pi5sIk_4Zepb1Ow?05>Iof9%_T=(`l} zAPaL=M3>brOhgvhdc6siHf5*&nU|BplA;L^YNAh|K(07%!LRSi$x8td{k5{6k4yToUryOhDF}_ z@XB{eVU48r-c(EA>6kgnvfdz4UiU&{2ODZ8D8Q1H^vf2DRJRb`k{eK{Sz+o65|j~9 z8>(J)vpx1`i+U_Q_2#6a^=q zAGcBtKLS1i;{;J;9gMrt_XC2roFDG<`YKnI z@kGojxSqGZ<6RUPJ$cQY_^NaMRTa&@|J8x~@?)2`Kosy^)ESkZ_%_{mrtGZ*6(xkKOl0e_q_44bXE+b{AjfHs~-2>2mymf>wTW6R57Su?~ItHug1e6 z99OR+BbJmGK1;WKX;m$7wN%RH#=RV9v_e=p@6STN72ob#wez_fmy0u^yI(hHcep!O z$Kr=;vYW)vs?thZ=GqxG6w7L=ETv6sSB`T3_)`?5YbIs8w^sVfXb&fH!g{$M#6DT4 zcMnVB{qXXaT3))Y;f%o)H*io1e*?NK!k7&4^jDzn&u8qh zu<#a2HCtVS3RQA+f#(WW?9$IL)5-+Ja!;L;m>au;#WtrdZytT!5YOi<#PxSlA{=oQ z@s?rMBUfA=32!03y&*9-i`AdVNVmkFzhTM zpR8m!;!p_Pox$b!C3?w_nX_JBYP)J*9~CO5@0a_5$}I{Gf4srEU#4#3U$}u6P9OIe z@oE&q`p*wm2buLVN?7JaZ^O7xUX>YdV364OrQ6UL8V#UrE|P#hnvGU0JNp;x%`4b- z>wU)Yh3^?v#M=*NhP87bksFo){@eMmaGQL(`cvQTlk@ci|6d&W&9caZ8`e-ncZyl) zQ`GZ*!qLxkbQ1k_1kD7NLn-5}nluSQpWw8Hbb4xVWyjh4J|riaklz06J^zf0E*dP@ za zd6wS3mI_UMgsO$Oao38}S^@?TdW7_SJqyPbk$0z|SR|6FnCaDKj1mDY?0=`e49+N3 z!j&?u$s>-AEiXx@7=K776yK}EVIJiH&B>;xx&6M)^YhE#mSUe2u< zxkOS%gs}+nlkPvRj}q19#F}yBw|IVg1{+Hg#W&R&3+HiN-QLMaapt|;qF?{^4Th(X z#nMV~tJ3PoL+R=jjEr^HYOX_Ldk@V~A|Go%`T^n3)q0pYR^O(`=rJ83rlf^tBsF;l zQ}cr9jWdY6hSYbsjKNl9Da^65H=-Lo=BO7+RPtL?dG87$jX=USQ{fjP1M3Fd8 zR`L0)f$)yrn;hGd393T)@e#gGI%x>yXkqQXBamFZ3tq#zP(5Bncbp1MdXhdh4o2h3 zwjIv(!z{*w{^vmJLK%w0L>9~s-X>*8NP}YDGja;4=CPlr2XX+j5BuT1C&c@no%`~j z0uWuTWMXAyAAkE%2)cZ%lrdAn|M2&2iq2dn(SSV|>BmTj>%vr%o(mOPVh~}oO^{6M z6MMZ6o7sW(cx^4fpV;HXdZVz_Dm}y>IXPG#72Kht5SvH3qwCR?A~%lS=z5*`#Kf~# z!2ry?LSjZ@!7{^5`D-VC4M&hgN*^dL1M$QI`nH?xG}Hfq%-@m) zVHp^V;arA3sw~L(%Hxdv$GYFo6MG_ZyPNoW31`gvvvqYYD#U=Wz=xDM&gJwcw;IWe z2Uxt^Nf-c<*1)_ZItERNl)e(>Y@Ap8`^>1(N@!~FFDWF={iB*3JB{c-uGOZFRmL=J zo2NR~GcdsH<*WB2Kvv5JFpx*}w6bSX{|apSl`@b)rw;J&B3!^CNwF@o&k*YP$54|C zlV8eoI&{Z47Lt}3)ny52g5UWs|B(VpmSnncJ}W^J8B;GNB`{I#nF4=-3B6e(TLf`- z2<&17dS9(x2kBnZgXjB|NCE#c&_)&@?&Bz)m+k`2C+)G*J?UR#Nj43P{`X7db ztPNlr@~)I5!%t7w|2~4~9%_J!(jXhYZ^mdp^-AHiR>5M_R;tq^X87;f0A}d^^`Bgt zlhg=5QzYdl?plgzsel|>S+6c8W| z3r+G0DqPH*X@ejQKs*)oV(UMbtKZB3e2-uZo87CElEZTaVE&e_e*i!7NG4Zd`+vVM|7*qQ(k__D z*U}S-`6q|ZA*pjFBXJt!yk1jS3U#|^cooV04}S%^iCDlancbsEX}ll_`oGH8!N_O; zhJ*zNzY%cVC0mN1S5_n6E8df-`0#}w@YO~O{y#lCOH>~OSDvDH;Mc`YzW-2YfYv)B zWfgKX`Bz#ZDS+$7ih#!<0_6XgfXw$fe>*F$!^Lmds;NOOQ5$t>rAiv54 zsHIdgfOOOvB5N?eZtEkHMwt>G81DnRIjflsonQ?(DOe~aB0?PcJ}Rb=FPlH3qY=Re zhX85KAN*J_oEoGi_JRetHKfwZ){sP4Q-*F1_d3C}?@gP}9mj6i>r^ZH)RIn5@KiMb zA15AR_P#DTS~46P|Mp|Aq8cc?sN|31nC_z2N9j2?PkQIRgF*udn~@n20io;p>4Dwz z+yeh=&+stvD-q^Sqb*>`%^WJ$M1Bh9((SFK0RxAsu!0*3^00ifEzW%|E@2R#4{!D!(6@iPL2TJR>6QcZOl#19{t6i{tx)oTjW) z2{Hn7vhLTJg$nO$8_fF^QfK*3Bo5g@6!)ke3qIG`FdpPIi2|Yiu=hqLGj< zG}W+!&#rS4`CQlyweHSOGbPv6`sFwKcQyef?;aZ_7{l+Kqyd-QaYNp*I|Q;g(=p)7 zyb1z6ff+gxI3rH%bq$`@x?!o_TRtUs=%(kwAQX4(Qd2EOxKk9VSgp^SmQt9->r1_D3-Wubsy#Mr!n0y`-Tval!z z!gNMI`2buDtsu*#6amIj*r0trm|?cHH~aNfhZC$uR48EJNva~OodCf(0!)=?5cV&2qZ2k4)Qz_jC4KS0)QmS0m_=+cFbJEY_# z+^LnI-I74xyY4}JJL(Q3ljJp>t&toXiHI#WQ)-8GMr=z9oCbC!hO1>RpJG@@QUO;gOHGr$V1LBe2}` zcm7U)0i1ns2M8kS^bi9HbZKWn1AJg&j||6i6K^X>8~BVIf^PZpa3TuGj&Nc}`Q7tT zX&7p<`{M!4e>M>tWN&~RGbe3}6mGLQHC!p5R1S3$Bv#ClnMue4;rTv zxjd#~h#eL|jvwmy@Q+J1ak&Ta-r}E zz?tky1lV$emUj491crDj^u+#;SpIWxzOoI80&q z50@S&1|FCa#SdVzs-?P8l@XYF8~{oH`FFYc&32*1sbkfu2ojXjAE_NT=07@08kVC) z*u8>&({TMJ4?e0%GlDgtqSKu3KfIdqnr(Pmy(ji zg@6j*P%*sMw--_X(-W8Uw?#0`8wJd=-UOa~U>8(>$srSmQq0dI_$|@#1+4(_8K}6O zz+Bo^vVwge#sd)VOr*G(cCdCZyEimm-8OKUaX>7`*GFg?@msy5#^gY9bYO0zj-`dv z#<*fznQ+UvTlk^=-(?go7~c5G-;hVIqTlK_9%V41fGU(-Ie zA)Kj-Vn}6_*RlCZ+=PvyWVOYtfzG?WDRb(yW_OICGjhJabyQ(cF2Yl%YmuPcp>G0e zr{={$w(U+SUxUtEAmVT1va1sHI2D5l)NgaGRN5`4*U5iX$@HKVD6&ygE-1mjFGy(w z+6oR?e+F1RA)+VV_5U;snVEhfAp3F}+nIH%xg=?;}DGq91bmw$^ueCpX%C9=ouY4+e=< zCNBBm+%X?}!)i{MCT4Cm8?`zfwEQ@N>fAA8H&U7FrtWo_|NMX*=P)ZXE04$={LvJN zkqpYDL9hh3fV8s!EdmcpDMFT&Pe$8{jgx~i=2`$$6q!VIphQ5oXM?d5y=k5_Rgefv zDCQ8_ut(^TO3yjc_pS@D{0pXQ2(T-E&hzt z3)y4ukTL;nDx}V5pj+*1b35ob%m%YO;%qLefW#XxFCoF`YPLBFvtBd}=2){4UN!6m z3nlW&V<1KxyFWu%o60 zK}gmnx0dPPPg7aY*-Q}|g$2pvZS=?Vc1cH@P-Y}=HmsAk)Ce2F-mVUYf0NtF!$8i4 z=azx40F6uqs0-ziguU7m_pFu3j~2PnZ8-F+1^m2VG!SUZM@JLsOz&-oN8=aueuFt| zT>j+HSavW}!zFK|zaaaLm-#-hN&Yx6un>m@&VYh!?D0YsAPrdr`@>&&NQGm(>H;=ZxnP=cp?$fr8c2ocYsHmTr~M^mMj_bUJX%+8u|Pz2hWimt!*MTt@+f6037;KX~aqqT8AU)lSfK11c+cO}S9j%qjou4@>s}kJH5H|anbjMj%rs|V;Y9YN@QLCk5df~@95jhB6D#WO0&5=lSw8O|)*mRS$BQEIg z5MQ^4Wt`Dbd+?YI8?l5#!sY0{DP`o;LK20BNM;9NKyREby|2)%A`!YTMu=Aw zN5n&PQKr;yasxH6YbP*Z;{L5?*X|^wGH`{|KcvOrY$Lk`cPvj*>G)=Eo|uwWOUkKx zP^{L@IPB8}goF0A*aZ!8sT2cE|4s4LQiYnN>5C4C);{d)Blb=#HlfiZ{R#0F`a&7| zXLHik$Y1@FIo=U_pz$mCQ%|qn`bml0oPSHxkuB82LlY*3s@3NXfoQ=S&ecUZjR`*r zhc(Cx`G6e6g3{0)ZEOU?`>R0wes^ZDPF`pz0X+x3+Ij=j2!yh1*k4sk6|jR3Z2QN5 z%5xGec|ydRr%N!D_XxNNnY~cOfAkcVsFq7t^$u7u#Z?(MGiZ}C-?4F7~epBVl zwRV0q240)ar~=nJLO_2Xi?U07&?S}Js4z^BUgy|14jlusb1#0l`tmNEpz$k@1omg8~RpX~Z* zB^=OFYhYaUM5)3?d_%E?60-R$sLM#2t7pTaKRZw%$i-c{^RHbkV!j_e>5JLp94~!u zjYMioXKR8rq~5F8dC-9C=Ug4m=kDCj!y4wfazT>CYZJ#qykK&8+h9Bz`HQ_+X5;X9 zbYfy4{TUZjdB3E9=&XxxeV zMAIjHKcU8+3r&|VMy)WvpLigQ@_j?1y55rd8Wl@lq~V?QvlW~D17p_m5^^c=VHiuBi_;&xaSn?$_;E5m-hGWvcW9XAY<#fZld#!2C z*_$hHY(4OJKL7@_k*6;tsQ|>T$xTCRVV6ayBgp);IOSiE&!FHy=CmG>5SrZ#a6}XF zXot8cXtJN3*`?;1AmaXnG*fHH+?OCLHA5VrkjhA))eJCM_~NkVNKNt3#IEb;EK0UH z)PwDlPSAHb-Oh;Pt|V7;q*Xsi*ls@Expa=sKl5#t4U0I z^R6oe55GA%mNRNzr{m}R#aM+rCCI}SpIOIKArc78H}qrwG(Ai4f0#PUpgf|bTjLV+ z#ogT<0t9z=cMI+o++70%cY*|WcXubaLvRTMmphzuzN-5xRitVL#`f;+wVp+-OP%aY z6Uvm`766lLwk?4~7-+(@0{SiBdOZDG1I(jDY~(NvPnB9xypp%k@qVe>^4LJNU8d!Z zy_HJb*@2q{fB<5Wf2iQnlD+L4_Te1>9UabcEN9-Bq05s>J)kPioG6AGd@~vEO%G$ z@Oeb*4-vK~-llVGiY0WOni>Kf6GZL4)r+Q48~o~i7a-YRUuNA)pkVp3`<5FC|M~iz zkbCVx3X}}p53O2bIVz$pQ?`7b>7g`PoA2kAZAy)gZPqjzDmeTS*@|#D=AufiAnu+? zP=g?jRA@y}7HkyvvS$j&gC77fY0W`y0 zhkT#A3#~dM(E1rcLf<2O*Vf#RH%>%_WuSn5HHrR${BnzT)F>+@73G<1YE2+5|_W zs`rwk*Y!hpWD}749u$x>G?s?S4V#2Zm$57^P=_wWGTk*& zw1jh2eD>?Po{sP75vGYaO(L$oM(@UtB9*F=`|z^`*ZvA2NrrgK*4lk*2**K5-F(IJ z*>U30-5oESsQ~95vnlX;XE#j->+>3QkbeSu7z(~|Pa3c^L0%K8^sP)k$g+1CN>mus zN`$knG`aDqz%|NRUY-<<1Z4BMTZe@J5DL;(eL5%UW#$KryE3Q!tNa9;=?DT@OhWn) zg7C(q5oT6}9G*ykm(V{t1*$zKdlM;K*x!7GZSgy%`@R-#r)gp>ccE}y`+7lJgf3@o zhQ9dC2bhYXQPzQBs_3BT7mKm8-V@Bp{^{ur4(h%9Nz1vW$kfJVyBQT(f4KKIJt?TQ zQ8?7Pai&|=8-r10*>>2j60)7lb^x#0Hhh@bfSkw8V7W65@wX8%LzW;cAh%^rYx-Z= zi`UI%vovAk9MhV|c*0$36b6lmdVjq~{qX!eIKdKCA8>KqiPY`J z01D7)p*p9-32i9omOcgk7TXXGlAbMLJe?DEDn!l;qfF%zVxr}>fxgc73;g7*wr}`w zXw0USMJUKk_MX>o**MLDIA;o?YM)mgd~wT*<(}u2Z?9#TQ+mSVi4&HF)Y~(91t^J$ z1rf-H-qAJV+~0Po9ZG0fUyo_hnDFMUN5OUp_gUD4)pP%Z^}0Qkw&UH&lT ztX|=}6}1@UEek;K9hDyZ@}f~7TRX6>d}`riW9s!=8A%72Gir~W^&wifIPQofedq5S zpS)yIrvU+|tf9saVfG`d2w(-B{W{B z+x7LHD^U**^$8sZ&u!b@&s)+9Ra;=NO%7~0Qv2^JNNq~chXG(nhUv`V@XrsUv3Hbe z*GdBx(hasNY(QUItkl|1ygw|=I#R7nrFUb4&=1^Ua6M?7_(3=#FFV$&j05D3!F5Lr z!r2cE=!rKmKcFpiJ=s&v3xM?rGC0O2pLrvUN@cK6GRQaiyv*Zg{zvR`FfKLN*4Zt$ixHtv}}vKQ?AzM;BnGQ%V9H~a;5jjPW` zUV5otep_+>LEne<-l@tUQ5KnPuD&aS zJNJ&Vp&h#a^eJM-4OBVNf(8uhQtxy?olAN^$~~W{GY!}M6q$dH5)O8fro?3=@ZYA_ zf7P!}$tnDx2XA^c9CxQgbTobn`tVgrt|{mXH+zT!t(E{%@@@UnpUA3Y-U@~|28}~` zrtw`ckjgQU;1o;RLjPa}r!eV!K*P<}i!#M5Une5ZR_bN>U}e)k8}7U67VQr#E-G2~ zsNcmAt|`~MYeTIFh(AU_*LrLbL52m~*v%KmvKYOpc8Hn6-!I&1)w{>eb_f>^zkoZm zhd&&>gZ09RC^>>{KE6hi45J*)ik8OSmI5z%MCuHDPVeuXgLkzKt;-(vE2hJ#%tB4{ zxtoD0^#<%x8_0pEz^h^W;nSm~`sgXQ%C*J;)$mMS^CwtEf~rFEQ!LU!Ikx-dX91s3 zI8Fwy2)_8z?6g#3kAUt|ClYP4pifMta2PT-4m8EiH2x&o@)qePs{~% zJWIK}z8*~~8itd@UjO}|T%*5PO4hg{v&Ye_&z*|JScic3R$bmoOFBi23Znh)EYv?x zVkqe=zqmy3gDLhHmk;r-yVS)b%7R;TA_m)3T@CWYPHr+XvO-1!Y%sMzxpU zXJ{_~p#{`|nNqI#F>Y}kTCDQr8*uX&>L&rY^uc{W0_f9FZ$~TTk|Wx92;4TCh}e*d zA<4LF-*B%Gr=d(|%RQZKTqf%t!RrqH2lmTR}_aFn<&u zHblai2(Rl|@;a}sd%30MgP`f1*Y5>*-5)V&*HyQ7hyTF5ifQx$4Kyc>-0{oZfems2&knrP^6IFSR9-ChCW z8o7xZQ^C~Jpfr-$Q42`)A|%dFO``~lkq#mObRIkzHLU0`$6U#l$Ft4!_W$ry!VhD6 zKHAk|FTkYc$F9XMhnBkWBCA#u^2ftr90UkO}5&#)|1ZK*(Tm>#Q7l6Ms7ei zWuL#e_gSXC%Ah^!U|hpw@}#ll{Vy(^dPUR^yP-T_A%rRT%EsP)cOf=u-RSf*B{2Z1 zmD^pT8hp-6eb~Mtke(YD-aPk49O5ya^Tgm+gB#903k?^A=Xu*gmSpep?rL@-z2ecR z(kI2OQk3gtim+ekmq6?<0wpP}lzq|@%fd9hUB0YsdE<|(<|o?4+9+3Kp2+?J`|cGz z(aB_}E<059J30H!6&*QJf40=g6t_QA?)FFJ@GOt8uzZT6FOjD|f)(4()U9hjNdMu7 zF3802M7e%9w*k6qf!V16jL^d<`;&|#CI=uBrQanN=R2`{{^^jy5!@Jxp~gGZLj1CF8S>4y0alKTg|=L%)`i$p92)qSGuO z9|f6U$ysS&vlDDT%kef03H&h0y03 zNJcWDcIv-4XpiwD_WI;;s@QkrIW|B$VxEeBOgN8#t;bE81UF>!uGxa=imzx(WIBGw z$a}n8Hl|H#VEfPdHReYXW|F&qjoIxCgMmjP6W`l0L`D z!F+a>u<5|0(yY*KXM(6#7ZNHw?rU3v*PN~p2_BNKwR+W2BDB0%s1tXI!AnJ;aZ`sH z0Bwrtn0dRB&QU~$2SJkq;wR30ck+myw4Gqb7Cm#%LyYe&VpiP-=Qp6*rGEnhwElUO zN@X@lfJtw*X7{y?jDq`I1~3#x0nJ#MRP`{NuTAqn6A7L#vYO1FKCJhoSYlMZ)9W81 zSw|R`GMh%dctHoy%8*&?4|T!b&KV;F#&)SNeZ?jSHItc*3%~RcUSV#N5_SGPmv9*O&V^cTbYOmJGm|)8ZVhda0QN$ZF>3cd0?asB z)LQjU*=o5~tNHiihv&M=U9Q?4NLONm4=bW6URkf6DD+#|gMH@FKj}ixmFxiTDjvqv zWnglg{Rlk;b)vl5!Hnz~0R5^v2D+ItR1Q4>;70&_z{~^t`23ZD4F0)6kF0-&Tqt5Y z^x9LX>KEL@eT3t^ZCtpA{a6}F;-(V?msESPwklP@Yqj}yZV4B}mlMd`t)AmEy5)^8 zVQwOnHncGXFj!s-vm1gy?tpWB(X|UWzxRpQ%W5lX78~Y}E@+=+bDaLFy-9H(@PyU{ z?3kU3%Vdxs<<1CCZ}BT4c@HKx5*zN%@ANKJb)yN%jJg_apy{m7^S1f~xJA)q7Qga2 z2J{5=UM25Ql7GdPeowPJv^bmBd;Vz_u&Hlc*2v*vvfb*}`DAk1ROuL>iWq6PM>0Z% z!~D9n_c*Lcrj09l6`yQ}*PqV(gcX~yd(b&dbCLifI4Bcm7UyJsbNTYcY5er_)RW+l zWU3H@Zew5R{vQB_SE^pgOE;tt3L{+ym+CNI?5-);IbgE(wZ%^#Cp1hLF?l$gRsi(WU{mVH9Ei?d7^79KrdYl9g0f`08U+Q9Rxmov8FEtpi(@2COO{nLGkVxh^u5O!L@~>1 z&IQ%!9c31YW^~seO`}@pIe~PB@8w4?LNs&q(6duWlO|ZLrabTcePDsehZtyEC=$^R ziS=EXzM00rTNEwgG$dN(hbuhY9#4Kk9ZUl|982>##SVr4XGt+8m7w-{c0OvTKn_9Z zlU@{V#)ub2T^njW+=tKos+3is21w~^rh5>}{yfUS=w5U%Lcev**y2A=QE-w^O zJk%gW1rakW0|Iygy(_7RG>30py^NP0Ja5Oe>+?m(*rp*F zC}s%$2rG`QLr3pu6!gJWeL-`4@|S@%PTi@iAY5Y34yVJ))0rP$T6WOQWqtfxxzMdb z_>Kzam{Q(3cK{jB4DVtp=NFNA9W4JHg$#yj9*R18vs-*f%}PnA)Z6-peVpG)eJI~PKRU&C-28KtpVX^j z%|2e|J}7p8KtzJ2m?F%?w^)$`T?{jYEAdjK0v^$;k- zI0X=8LHh9_7GZGV;n8jBm%3Yn_y672{#(j|zvNmZ&#pE7C_aqeZy1 zxS1_h#pdtw9^ZaD)o7X^A57Ca^sP@*&gF3ushYzP6n`h>aoFT<4JyhAL$UYbaD2Ws*L^H1eu=Jg@R@$4r;O&~Wh&+2-Em$wWaw zwK>lci*>U&lr&rnS!4Apqo3Nzh^X-3l*@EuNIpVQeTunJxmrQKNFjTbAPnaaN5f$i zW}2)`BA*l+{%}9ZAupMS&9eCaW;e4w0P5cytp0y7)6P zt-eI-gj0^z?+#98t4!WC27SAjM5ikc6qW4b&Smkb{7iT+*x8q2E_?k9$RuFbn9_x& z_$A-m)GkAP;q|(g^?Vp#+rH9IDYJdGg4ePVK>e?TW#Q35;v-dh4krP{HY%%CW}aUx zeKA#d!+N2Vv@^uARH7Y{jY3cRFL<>SsgX3C65|-Qz>Ny)EJfsZw5l0eq=nL)E;j{^ z5VHh+Bucq7rC+ri!}-3>w#j4(R>mT-t9Fak%m%AE16vyxHi`jvI<;C2g(Xn|HppZp z$*hHDl@=yGsjb>!aRiWCT_R5z3@#&{5YC$6lVvHEEBOfa`T}3?=n_=KKSkxmJa6az zlp7%saTnkZ-x~LxOHwdR#dMy6aPNCUKBl8)G!n6n0AL4h^FB3Ju;zt{hJd5YY{YwV zECD7=Z!xjdqZ^pKt^X1HWgmsj90w@#$Hg4f)|3Ply}@Oak0^_PKhO#QpS%J3j(%AR zA`12?Evj3wg0{cjw1!Jk~wrnwwNMGfWv<#Nt34Kb?AsiuM3`xVU7 z*HA{1hEzD~{g12(hStu{JyK_hbUsFdIt=i~5ZGKS)dE}>$9xIT9bB+qX_MJccPGdt zNzs8{!+L@K!eyvJ-}oF_`uD5tDhjF{Vd+J9;Q)FO0Z%}if)Z6G&B1RLG7mVAgCyJl z?M;#(4j$Lse7Km0?0Gm1@oLO{&!R36?nNou2TNW5qk#g_~-)rWplM5X2u#Ws*7S>xZrO@X{&)u_sK$zTNr>iv!x$?%}Vk{vab0qb6pD=(D{(_aTTQY*2ey zNCLwpNSKxTay^=0kUjro#60iz%=;{V!uNzOF0yqi-;#hBJ%09Q4KR5%zLOy&)wH^` z&Vvd)!N&WOc(gPUV(ywtkqAD!6gw`0{yvgClcJVc%C#Gx@P`8f2Pb07?^)hLYAIQK zm>Q|sM8xOUQ*2y2Jw(XKhM`}0ddaX_0H+MXH>5AYBXGc?fv87&T;)T~f5Eq|hxU|-OE9Id&^cgNNwr$wSI}aC7 z=APP?UHy*w%3B=Q&o+j@tlD=6Bty&Rng!sz$ctha-aL5giF4YC?&o^8Qa9UI2x3(mwH=ONVL+xa4S7#?_35LG;x7CGvI)weYS9zkz# zLe`U!o6CJzy&mA{WNLaFv7@}`R@5j9`8-9qYOnx_7fSRqeX#XDAkRw5lPlK23PqKmv% zu3T7gWo~>~!mSUhW%Nito`+es=BotO_x$FYFFZH*oIsnMR=8ndUG>$E81emqY)Z0z zMW`*vu*b^>8r=Uq3dkYP*DwFU#%g>-GxV}jTPQ)|{Ui74&+X@yMsKpS5MQ}k5h9$X zJ>qmL-b2Y!iT?wd(DM#!ku^XY^F!kK9^(TM<=2&Bg_=VWMiwezSS9;|{rxYM_uXz? zC%mBntWc$!`woFQ7HTe7v?^lZkv|dm_ioeJA7Np@^_$^l4^j!JoGWO>Yg+7eQCV1` z8*Fg7?Xx~B3dh;i!x8;1tBAIN#;@1vtK(W&sU&8zteipJ{)=sf1)J4RU*icE zjPXS2$_VR$D0WuX=rz=T!FlBcUCI02UgmiAc4ctlzk!^|gw4-#uS6PP*_X$<`sJuB zl2AHLwzOxgFLL%kSO?p%@o5~b0u3ltTrO$Z$>em2yPtfB^?F)EIh=MDk-bKZdfo3z zOeUVw{AB*+1LX3!kwk+F8XN}{ph>jL>9Z;MF@~L9n02#hx89M`3u+KOm*UF$4x?}h zkAEZzwT6)LSs6q->nG6G`Rp9dIN`e|59~^r`S&NXrwqDaE;-2l#IvGtnMfIG2_VE~ zYnTIr)pLBSODTt@89@A;pKmJuXDE)4e!~v<;4+wkSjOo3yKS0)eYftc?V=hW-vZn@ z%|L=8X=kG)*!aE^zr_m?4C+L#V7GBy#}U3kh?h+23?@%U&sD}vX2SA&qcDzle-x|X zZVm;=c1w`;Gip1E@*hs~x_ROkDAfeSw>?D)*(4COjJiDq&;Smd zhL*%D7jZ(dRFdn&!Z-D*4>}83k0-BO-0(6PARA5oH31+_$G|M*CWT-^qk{lJ;J7Ev z1d{nwgUgK<27?M>*ViAt^zjLC5hK-1*h{?^qm*a-HN*-E3WxkYpPM9*PK@1CRNY5i|O!OQYY`VBeZfq+I8CbY;w-*9dBFdJlHR^%tpsa#QNIXQYrV#x5L` z`_p3-Kr|?`qr7elXWcGc#D}^9L~RUa5$@HH`IG8(Q(Qz6(E_7JRGTjpd--W7Hl6L( z2CSiAE>$ci*MLS@_%N4V)WgH$BhazfV!K5mo`_L&l)da{D?61Otke5NH;>~s*pl4_ zJTa)Y^{r-#%`T)2N0c?Rv&^p*tlY2!MnkR&u3M)t!1f?e$bb7mg~tv*aD|;p(%O+% zbuS|3uX}cz(Lu4@72`UDWeb=?2*uIv*#QpEm4{xp4+sz@M#b0{G{n~KQSYh;oxxdm zzSsy}tANE8_tCqHxE?QF+C0ABe>jUqH|N@=L}lhs9YbpTmig zgS_kQW)3@J(go*9Z6g2Z^BrUhd9N^-xDCdV_OrFzb+gk8o`QpP1!RB3<=!R-KdUxW zt$dSRm!?6lR!R~qyjVe77fJWw4#0rW2m@0!nK7N2JDR`+v4=6AU%dMm%kY4I-Ko+) z_sEAy4+G<)PEVU%|C2&%!?j%{1!onLvk_1mvkaiP5G#mFV>Um3=yCvEUmO^c5vK}T z8!Y?Y=*%ehTlTTKay>Hl7g$b0L_qj?z|u*)P{m`aj6}cf5&l$8~($8 z2*^HH1lwQGHp_SCYsTBBaTsuC)Bt0viW(3n|!ijN1 zm`R{f$xa9G#U1D5_+h(aEF_G5J&d%J^i;C!3 zJS6=P&<}lh7L4rRSfF@2ozbWjY?W0DQ}-P@+f2`nUaltfVFO!3gZplD9nt9$9=fVo zY$mm%GZe*O$~PrI!s9`wY}m+)oIplZA21xR@p*J+sS*i>K~k#J;f!V}s55$4oB^O) zftF$cqZrr>kNCJQ-$)#4*`$(a>1EWaLVaP8CH82#y32bZVYm15eQ{Jszc}Zn2f#2j z*VwK*wD=tqm^F2E!`|2mvr-*?GbHm>10-juxSX1mI?(n-Wa3dqKrsuLt|^Vi#- zV5Jqh9BWeCE-`*|4zUa{SJhgd^Cnl%!;Px^6wm*=$oi*R{3D}MSr7PbU|^ln^E3Qq zWYK`9q`+&In5qM$UTox7g70vK-#+x60U|wpAfuOfH{GV*>S3nsl6T2(KgaE653i0} zh_bzJVGNU_3Shwxljzj*KV{tG1^G^eZgW16DU>`y>km&##&r+@WpHq>(8otA(hZ9R ziI1G*L{&*iDUG)DU$rve+W9z#9!RUx{fkdj!xLJJoPUDV-%$$Z)vEO;^#!MWG8T_b zpLL9m@b_&5p_M5i0%Jf94-fG~)YtC%bJl-GDHs`;+fJtevs-nO8?^JKkyTl*WTk?Dy2 zgBpI+PuU+huH_xh|IteSvi*RC0Y1)fIToN_;@sT?7cgHC>L{)Hp~aJ^4-a}n8xJ~j7LND+gFP$d2WwqB3#plgPU92CogUEyNDO2zA_bqQXm zJg4pnY#WyAzoHz84DTxfJ_8XzZ(QNAYi;r0UsniFEwK1Z(U9Hf{8^H^;9$I@^Nv#>kV2;H1?Hj9kTAI!HJ38*yj z7LR5_R##7H5)ZdLBhl>}o8>IMIFjB9xi79@dZn~cL{M!Ww!vNV5`FCTEMiOJ&wCwq zX~|2~dQDniS;Az9v*%BGzJrd|nwf#w0NWSnz#3KSqt9c^Xrjf#=QQFb+hoJPiS(KRn4e0&q8g@Y7u$FdPl%xlsOBi+*G!S_aPzRI<3;`v3%LxyCq{BWR=6G<+iJ zX@3+w`>b>+I3OcZq^AuvP0+tfZE#L2yW3n<48JZ4ll~;j!V;`Fx8>VSu|jJjVOE{oM=KtXq9L%N?KHlS};A}HL_4e*6+L> z`iM$eqrp58my}1J6EQ zatm?O(evMc**t;4m!|Z|OF6E-FtVWFVC6Oy&ItkB2u*SYa?b(Wkl6M}^+0WLPa&ZI zVVfc&;7#>~d3bz;-x1v!#fz^17#Co`7VFuz#WqE(M*-D{uLfw>6#^Q2An%+Gn21>C zaVleoKhVz+2b5m&@aZ&oSg%yT>%}ud6!alx%9M+mT;`ycgOJG0N7jV4pNXl-Xe2T7 zkXBMXyEdg})4RBlBuF|mh;PpXNAh%@{bi6cvUnj|M4$KK1ub9wSdA4`fB|Cul$qu~ zR#w&{pv`jvCDEiqW<(BmwIC1Ri=dl3eiaD?LWsc8>b2AqLlX#iT z#xgxJ$gm8euzdF%fD!`sF&k<7MqT)QA3Iqij19__%f4}wg zgn3MI5d#?T<1PMX-syyBYJ%*!W2oy;j1L>h7TBh|Cy-(g5f?WE zSVcmDInR+^&zcYF%+bKtBtM3%CeLtnTTF^c=(#EYSIRBPQg@C zH;I>mVOY2CiOI@1za_g7;wMbTsug;Mm6VPH2p}CHURrQcWH}-!!bbiM_be%ts}}*c zhIkr*TWGrLdfvq$J=P3c(0!TPaf=Nvf@6JlWv@CAM_{>zP=!UB>#Rw+=U_ZlxlDzU z^?pJ!r~D8!2hh+MG!CGk6Wip!iwTh#HNpWJJlQkUDODceZO3e)0J5SyUmjQi3oT%n zb=54uxv?KJ9_p)Xnw2(+(G29UTg1xO*C1~>c=SXyxD^bbHs{TA8=jJ;`M2>k zT|`7$ta9_Z{rW_mSlORr>9r9Hh}Q68UGf;s8Cv76M47G~+-|8OcCFwXzA_g*qf@T4BBs^mj*Kb=@iwUjof~$85fZax1tV%|R)H$$9QrQi5=b<#ngp!OI zs>b5!{)KX2@nxMSn#sWNnwdhw{90&!)#`2n_vlHT+LnSZ{oMOcq0D2QC7VxRjguLy z?Gptwy06&T?GsS3Slaf=mfh>CL~NGa)J5RdZ-qR7o} zwwxYP#?M;i$T>?*?!6ScKPFnPUWk->q*N_=nV(jCW4}6((^k+$4e*TO*FbWnK@Nvg%;LL9CL^2Xes8-zbcuehQ z9Knf7SgFfSY_ONAm;W%% zN(pVSorM^5dFU~I8t#ehzg`bbWZWLAb<|mbmBpTDWQ$XcBmA6_&!mY(MFqd?+fkx_xLSdJ91kT=DEIY{X=dr{iHH9{BYz9O295pHGKy!W_$QJ7scm{@ghsb%CwE9B-qcUKU*T~- z!vP8k>X)>d9W@|5Zp);y&hkGO?8+Ww1ZyKWnI12lWKF)b2>a>&QL;8q3?r`>7^`+Uj;SBD6W21^VXBzUpxt89@KX&N>B?4}M?&e5FZw z<8c#*-iOTL1tG3@|Ru|Of4m*t?^44BwBcV}UU z>S9psZB~wA5zXUGtZ@4#q)cWow@s)w8+x=*cxEpL2iT8ID zwoCviSsN(=3$EQ>TrP<&CQ^OAm|^*&CTb=erslE26mFaXpm;$4JzYAA@{2AcBg^h%BD-}tBZ0v)!&(|)8$y*% zB9UBl;-{oO-c$rUjy)%hRKZX8y>&8+sAG1cX|nB@j%z#+h7Ar5=n@?^+_oZS?FYOs z<=9T@d^_9R{Z<~va|M=(7;+6gHGk#-eNa$mHw+XM3CejLt@M9hlVx%U?u&KF1_$vZ z`RH*KI3TBu8`psmWDCn<{&J7QQ_64R{6joB8f9)ky#MBrbN|Yz_f{IVB6Jbz5zbe9 zGwcf+p5opaGOUhqk>cEgIx&c^)57BhNUQH7RRok+0|xjVTVYRYjW(&-O?q6iz)7X# z;V|gaP^M6(GHjjk{+fPgHWWy@UcykW&{8gPQ3!WU6qZVAB*_{n<)(U|9aABm^3Gm? zNTE^3Yv~x~DF>(CP8$79qVV6V*8|isgEW9nGJ9z#d?wDn#S|(M7NClCjue2Gu$~7f7nY+0xPrX6}jT)SIt2!aYhGe8p+hB%GJ4ZD_h_ z6gh94Ihu2=qHcbXi<@ZdnE7m@>{moWHmz{0GS=C^em`c z%yYOj;A!|u{iHS$L15EX@DN#s^SceE%AhL(h-+PjlJh6#2^uM&%IbYQ3@wZ&W-{!u z{bvV{f-<}ss|!jhAxjp>46s581WAI_TbPguKf~NP6)38F*jBO)06Q8kU^E!;*z%>m z8#k`@?$|4o;WxB8AOE9faG?C3=Zqo*?B33N{0C+n#mX070L?ql$oS961Qi$Eye|Dy zYspth=OqR761jfU>Y#|zj$Nek4tb&HR=BkXMlTeCzf>>((ihTjgiLb5>n$eHnkWGs zi*UY3Xvp0FpzuA3551Hb>S;G8(Sg zfg$4GVQaLN@vU6Hn-%+LsV1mi(in7_(2N^lEnTWV$5(A&XL47}>y=Y4G254AjhWV= zZtI5v&Sv|%7@+-^5t9V^?};=}jd}}>Z!{kM{KPgHG7v3|{>p!<2sz{LgAXO(9c3Jh zl9{ntcxBYHyfY(tN)n z47poS?fmKc>!ts8F2#Qr%4s+tkP&^Dc|&Z1yxNGXAquFOLJ#tv#NhH~N7*b$`uDDd6-K|yUoy314d7NMHF@x?kHbTuDPtzulB zO}K33_bX+*fGwQC=pf@wzo*N$bk?9R5C0GV4oVFA#`wR3azzV8A5QLhRM<%uI&Oj~ z{`JCv1p{M~`(OcN`n{t8x$Xh$^7{D6&EA_0ZhSQAybz6Try2uX)QnUGzRjC6&9t-K zOOrI!|GVD68Y2p4yWRqt`exZCWJt5a)sP)G16p>ug6oUI)duggbGj31BjfytVa2yix2vldNI>w?WZs+L zYfVV3%f~UnG2w4VO!?2%`MBGQh_bypR$rs6mv<*9i2+9gQ|%` zM0xx5zyCA>w7$4>|91f*dZ^uO`uA?f9tds!J^DYw&HhF%^m6Nry}q3vJ;e|;>TB)~ z;JihklC)VB8xNm6)j`Y07$5OYPVknmMt09+&lao^)QStOPh(@^#=EUdj^PAQa zHGtZ;D}uhZwcqFfy`OEwHc_(oz3=mL{EhB;O7Y)aJwV{i9Q_sheLPg1V*kINKP^@W zV4pt>2v@GhnY*`v-%#YL#qAVp1^Fb7S=&>)!;@7ZSD?~r@`K$TL!f_6l><~Tma_%W z3RyhH=$SoMc!?27z;t8#==dh0d;^6<^Y+u@R0L`B5d!vvWYW{Wt5ND)ih_pwRM+hU z)LQQKovCz@IB}nr~m-_QgSc;GSF@Nq+OR#EEKH)ka1B29>4&~3T8f# zbAxHW5{*GykTNEq0OXy9taMLFt}i15q+%gsu8}mkigLUc5JC`d2G`fut8+hm1jUMs zI{q~{O{C5F&3^z=ZY1C3iV;bzem3bpIndY$D`KODPAVd!zWL8yD9|GUOmPFip=vCo zg>isEf5Jc&iD)wF+W}-tm^*9D^EnjNkv9MYZz|z?|v2+@CD<3&1CG2gaG)RVt_)9BL>L90_ErDXPky8 z@G#rRsqJT2!A_KZ!PxIfHWU}Jt}-?KSZqJR<4 z9vHW@(}4t7az=KHOX3ug{afe9F)6-NgffEB5eTq$1!niaa&okF+Seh_HGg&LuOK%`aZtE{OvK*fR%#IU#cz4CI| zE(_tWq}0ofbXjho@d*3L5$SWY)Q1JexXzm=r6Jh8l zEHMBEb6Z$(mjSJRQN6{Spw1b!o{h+!3o}|r>Blsc5 zMdZoTT}n#qPDKkne7zqYVcjZ#W(;32{AnbA&N3)V(YOc5YXMge#MHYt9<$|VquRNj zN*!YGfZb-o8u4EX`)B6|mbh0JRO&dQUff;oFLr|l#cEb+6$Y4?zcj>w;9+@A)sP#N zVIU}SZ#})9$NR*vsL)%d&PO|$Mq8y}F)M?+x9^;%4v^P8T=k<^p|`~psD9N&GcX1o zQo@C5L&LN8g4cW#Z}b$PUmN<%)xmjxB(SuUrgpsrD0{3M^a;K1jsl&~OST8&)dN3O zg0XptkFal#pl!dnFBH4i`^Gh@7D~h_nZJ>pWvda+y1_!4XMjy#g7KA*Zj54TIo~EU zatrrw3Bn>5xV0lkIA2SM#r@q^v>+@XHW+5E_m?>R5r(`pmvCEY={$}e8GuRF1LKK; zOwd;f5t=xdey71AG7OPxdQtf7IQ1QOQnzvFy!^M*$v&-KyD!2wmwhpyZ>C&k;pU_A zuA2SLi(H%%2XH%bYxviTzt#rdf|qQyMq_X1PNE#ptBVcdsSm=E`99qi5D|$_6fR<# zO2me>i|Xt<-H8!Na(foJi3sh}ug4(`SEf$ST+PFzahT#cA5N6@6PNER&ectuhR1FH z4UMMJDq^8iFAu8YzyqMs?Fu^Q2%KB9kl{`lA&`P9`1MB0T_(`R6zN=cFKOa-%*|Fn zN+=GiCtMw0k*M&hLRU>V_op`RJ#{&bs!*66Ybn$z$<83dP5!jUQ@y}ezid;97z#l| z*+f2Hn0+@#aX)Scu_W{gd{8wiDt=UWe(%v@Ik(rp^CXh@iZAT|W$Nwq-Y|>b2eE>Y z#;}!aESa9Z0gj1jC>Pz%Z^7m6(dq6?JFEE5!BT7{jl75oEn1aKg3`lwFJ$efFikHl zNGFd;JQRO_|EmRAlVc#u0{vq7A7PAqHZLYHW*$hUZ6Lq2Q7Lh*@ckDxR(+R<^v!1R z4bIr=3kVAn7PiFIG14zI=<{oh|1^{5?Dh~A4G#~Fsef8TDE-pzU_YOu+@GLN>c5P4 zOMAKYI)5~-Q_`ymk7&{dyihRwy4{Caw^-``k>Z~R#0B>y{9aqr9gyi2gO(<$!~(~X zqcdm^UG375eUEV*N4xv$-OT!D329*>fjDLUkA!>bttSRB0Mhpq8np=Ih`QJQ|=uWc&R_5v@q1d zkur*M~-5x=w;pQF8MGg1P^;S%@iM6GKUa?`S-*qj3Z|aUW845k3%bnuiY2qF%N&+%*2?6 zZ$}Q#Vuur%%i>7Ttflru?qJ#fOkojgHLe3r0b`{t@S>%yz>cZ(1inh{>r`-IR%5sb z&$xEaP_3_$a?jsjGQMviV99yzZ`4VvuJd_=`_^HKkfKS+DHCAWdOM>a5O4Ls%^Cf} zXAz><5aa9eTg)f7{$?S5Isv_%&Xqk&mLm~rdYx3>B1D|XiNEv#3FAF}=>M?hm>l&8 zJLaJvH}h%o5*NwtMXW6xw#(G?cmeW^qqT0spbV;|)!XwGS-8Y+U)cJI4MvXkUZMC9}3Lqsu87yZS_03#iqt$>J?C^ZJEy z>bjHuVp|zcwvm`u{k`q!2%hCY+$#8}hC0Kb~J1`FKC`oN(DkAe}DlsaBJfo~q)+Ss}^~ z?jAvQQ6JH9b#;VXCsF0;#$POV+Fh(Wbd|o(KJyRvXb?&TUv{3c0|00_BZq&E|`(?bk&-H(d z_GgSs6>;9C*LcFJO~`6)SKy({73iVSO@-E^aBSv*9xW`8C4#@hlOx~MZ@GL(AA(FR zYJjbe6u%cYT8ifb_v~R``^5G5^ryG}V&E$l`TW;~c;36au#)yn47rLog;n@q{dfs%ddP0tB{G= z`Xf|1uLf*Gl#6&PbgIH;2lXUk*Ymo96s-v_XSiDJ^_?evIKh2dBv?j*#`z3he+=9( zBF(r2WN4h+60*PZ4T>g5o-^bV$Q@xWsm#4V+)}JSS^P}B#|*0~bYWkb--<2-^LE9W zEPN_+T_#g{tVsZ?5k3{2#7Y7GP|B- z6Xn=f@diciD2=m}BPPO8z=OMgn{;{o2t2yN?dc1b0i3SbKduRehT?1=fbN(K!DnYN zo`~I>eI@#LJxBML{;rp^2EyPR7&x439tbWku;p z20Y}`?O;W&|4^kWHsi;SUl9irI4KyP?l1aNgO&wMjkew>@)lX>lS{wt&vJo`$Zgz< zmTJA3Z}B7IFiyWyhiggBQ5%BWQV0h^qZRiHH(xW*K_Ibd#GtU;Yy}oe@tTzf2QxG8 zvaS%O6;Auw?23$#>wWSyV*{uMOx#~O19DnrGvrK}3M63s%}5q+6H~ztzejlQ&yV^gjX%>#2wF=h%MXM?kZ84SrAG4G>lQ|zaPKzTZPCX=spANd zQuS2>1--#VrDihL&~RjY`*xWe^;hr(6^R@i5&knnMq~*GtDf0D#$0kP9T33unqm85 z5cB}>$0Ch#ij{roucXA+Y$$WfojTNh+@C2)vinpjT$dhDA83uOg>w*o!tD}3UwQZP zAwpu;Amf7{C-!F6Ayu6R?6iJPLktME_1CPv{J#aikp~=qs$Vumh%t724Xi0x^Djlf*v>U51hZ1DZ zPt&+zOyZ?NAN9DhSSWV$9+El*d3Q~Zd`<499h{HIl5ZM}0nXQSB3_JZD$|FX39<)_V_@ zY?NKJ3(}Mx+0WFGoNp%V_7Q!^jkRbd!z+e@sPk<*#vE4J5gs;!u{H?XQfo)|_(hQ`7s| z=@=(9rGD@BgREDdlgXMb9zEF+{B^}@GDNfy?|2&c7$09zDSDnx`yprC)MpNu@ZH`V zi$`lH>xY%2>!X$y?^GMMqhODD9yHP7k!FaZhtHYlOJ_!i-k}PYfzYRGxrA#ULYJOp z?7GZw;^PmuIP1KOEly7;F^bqpfuqfX&Fl_Z{%$=XF^epvg(Ep|VK^AVTMVL_X=Qvw z?>)%g0hEMT?lGzP1j9|38c5@$k#>G~HD&u=1$dv3PtzWxh|r}rnNtM!fk* zIVSP!VVNR~MMZV}5zXBx#XwX@HpLn`^Znff^1Ky2-mbSo47PW)tt4AkI+DvT-oA9- zU3X=@MQLYFp&A&*W(#&zLj^-ZEKR-1en_imhj}`EA{#AQc)WiLMi!C1%e>bp0c^>S zJ|9TeFNDDvG;LIV(g^R|^o0<5oYDDV+0a$uip7m{o+PZZv6fPz zWnA2(swyiqDo#zEYgJO7A>7I2MeN{O)D<^2Xa*ImDY6v5*(@(kiqQo+B(B=C&`*jUDddmZDX{U`Y$`BPcl$;p;2}0!9vv=q1)YUTc?PH@n;zOU0-R*d?K#^cym>2)Vv=jL${$T zG^u(!gf#eUb>w9NAwgW@w4Hiscil^V`(dmgj(^&(bkPQQ9%I6XQ+*Oj70d7sUF9L^ zs_i{rHn}T!x&9`jz0snX_NNS@w_gdA541PYT5J6%DpNQ)BuJMd$?%qa@|;J`u#{oy z>Kx&NGfzm@HJW^LW^eO;pT{bHqMD5(@u>D9CGf^GeKiRxEDKBSBgZyK!RMsoi&xk2MAh+=gC z$#(Din7?f_HIDRQX?FFd7!AtdB5N``^GPo0P3*Jd0I8mGJNRr9FOFm_QQmu>MZpT@ z5O-S?_QfgvfVD%wro~=F*TI6=*V_z_W5{LO^imw+qJ52t;s-W^eSO|Q%Xt2If%>x( z?x>JWpd}@=QbnCVX-4gGxB~w_S`%Yl?lp(plZ|FCuEiJ<@Yb=THnQYJ`5~P=%mOqh z-j}+vV0$p!kw5#Z+5J4kG0qA#CUVGYC1{Coxt)M0fx1c6n+u|A2|r95Xn2i`_Zy5G zYeVLzs$u$)2<2(ky{@)X{0dRc>A1r)=Y!!WL9d@&c8;6ut@3`vefEkac#nG=t{Q6y zfAu92obehkmb}+XGhuTW8c33Pwe?+ZN(RHITogF;n=@|GEd=YOq^OIgv}BVXlu57X zE#$&$@$mhg%}92PdQ~OZ+a|1WBO(wkmHiv(czZHS=(67R=5kl$_x=lE={ z?vlG?>Ez-OmXU?$a&yqpfbGWP8#kF8IqSnTzvRzl`nV=Y{fJ0tni#k*;6rmK5}(Zd zW09X_+#&l5)Ycchmh<|x*SYt`MOEKP6Qn8P^G$z@*6%lvon*~u8{?Gn_48%bG9oaU z{?=8u;v@J(8~rfl)}#A7u19SaDPtPpUxOUvtPpi?l`EeOEv=}Q|6ReK~ZG@BbmV#el{Bg}@E(>DS8J$%7$~1t}MhTgY|plaI^&#Ex_u(>}HW(o-Rh zRDu@BsfBs^neHi9XcJV3;ZLJvFXaJYxkSa~rS08q7k5)?BF{=Ayy-E=Ix9l3XAQ(FGUGfw#s_F|~x5yWt#-~zMS_&KSv-5wHj zbTF6;*FPL5y>L&RaXZ5iGyJ%d#MU zMt^d8+LSX|l4T(L%91x97S?hm%M5t)l-9T7Ne!j-%p3o|+a%wDg>8FHAK&^mKr#A14W4$OYmTfC21ez;h~9Yb$yfX2 zkyl|vmDzK^=J*r9wy(iJtAP|$Qc_Zft|@%$W%EVIn}hYR0TNGFd7l)7PhD>Mi`3?m zkV710j^v`mi21tLMK!e7#sR=}HP+DF+s*Z0_tCgq$SuevwC>fPK!C;NA{&i+rpnFB z0SzCwlDs@%p(qhH1yTTRkTKeSDOtqN1JY(TApQ;DYd)tSAbYVjX(m^)w1oiP%$(B# zz!iAWawcR-s?AIOAg%u4DJS>Q@P zygWbA%6oJNv%L2!n*AUIg~|j@$qR$mCK5?-TMCfF5TEv<-1p*5B!d_Q@L8pPlsSbG zFWsKxn5#|3X!-%UdK+NttKwj%v$1csXKUGZtRp?Ue*>}JHBAxrSydv4{owL^Z)mE< zneEN~ZDLd6IW?=V7{c#;0tiVxn||;b zXU#lV6b3_){`73YR{II-T<9jSHeRaDdw8=ek`9-m(4l~c-o$e~Rmj=<**8zhgut*H z!k%UhguvWTd($~{x6{j{jQNxLW_he$5V(y9G(>g|KvV3$w`A@8%W|2vL$1vVZ;%o5 z-}|xf5AA(pLbvyWOh10bvLEbTDmA#i0!axS1nGHEfH%!3E(B+h=j36ME*&db)!Fv3 zzTT^1y$be)y}?lXo!Z_d`OfYx1=BvTvIsv4&ci=RFIQM16Pqb>W;zC+(hp-wMDwr`n6eVgtj(f-TOyN*FoVUhH_-KR1Qino5RfqvX8oC| zez^?jCjesCQ#)JJ-J88Cm+6Eam*0Ux)5oYj4}UVy4}6`y6u_nqF#}-!!ciiD*tz50 z$HCjmtw4|{`0XxvzGfzl+uvDkMzc#%2VfEXB}$<)b9f;dmLRe~myqq7h=%3a$;pqj)ldTG^UGZv z?8_K*7vvj$FEUwi*>Uv!u?p@}S0Eqvgve%Y^-~+&FI>C}NG0L{z3`3Frc;$n z2ujxI77AfRL?8Kr7`f1)+qf^VO2S8tsBpgP!$X~7q=~dF9u8GwIq<{DNr^JUHCacZ zOp`ztd-rU+IYMpvzy%9xA6pXoRyE8oVZ8swS0>-%OdGpgwPY%aOR8yO0mF&Z!o=-+ z^UpXcNHlE}P`j-JOkCLz9dHuy#o|>D#^6b^DV*xjZjHGfW3Wf~oM3XIKKh}bsxl_L zV?^9KdapxEp@`5B%^C>=Wn@df2}dm4*L{uE?-P z)00R4Z-4H&8Jmbo1^0i66N-Rwe{iUF`_lx6_iYj?-vDJp zEdAdK`)hXdKO?cn!v?it4?5Of<-gpCJhKouG;Z7}(GvRaNPvO?tVI+Z<1KXlEkg>h z!J*k9^*l3N|1%P7=tCJ|4q=xpSn9v$k0b$y!h~ime<=Ptk|{!P{^c=H_88Tb)sm(V_|t4GoGah`DAGK~!!|S8l)X6K)(WEv>hak(Jti zDY_GKE>!L9c|emrXn1(IL+`nD0ibiNMRbKZZ3(Y?^nf83^*(kQB}Qgm=Nh$jGx?>- z<{0X$feoq{F;^?V$oE&=3R9z|pa=wNpCwRfu&x0ct!NH)dLDx^<{0+gUj;+~06xkB z^%0HG39zABN29JYIcCowRTY!_W zPgmS8QfM)PvnzyCU6&{GbdNyXOB*O$b{0dK%yt%9U*^m~MI1i@1Vt3+FuA0k*1h6- z@|6dGi2M5Maj0Ab-uTgDDI+LO!cHen2hdCKn{^P57wX+pNGWq#Y;#8pD-(5%<;t^w za{+9!CHe8i(Y#JB^G#KvTeswmjTwxp9nr+pcMX#6dG=OV^ag^LE=MVO@)OqQ)`Vr> zL;s|{YznBZu1>GI?>rDgAJ?(St}`Qs1Kz6gAR=LLaWMe&Ud%5}4^n9M2+qMpltrW3_dq|@(!f>g&%`T(G0kT z9B+OZDg#lzkfAYdxln*XF?1Jh0`+kg-~e_G4yY%ubpe?3A!E;!V8ut*K{%lD3QGb( z7fax(c7uL_!WpR1!oe$JA&3k?jwz(FeM2>a!pCW6ouQ*dXo+5*&}?BzVd+41uLQDfru_9`fP z$tfw>JeGai88UL`{JtL1A-qL_i0nqwy!SP(T5M}1J7q(`z8Wl!uM8-eD6Ag|WfMim z69B%T6LZOy)gf6*5vL3wBVq{;3JQ{CvPa>Ou0-%Pkz@8D)EHn#{1`~}s3~mX1fUnJykB3IcVJRdVO3ulk_pmGQ#;{ z=4EqOLv|g_Bu%+nWe-{Cq4^DQS)@Z-<4}1lR)l3g@Jcw zM$<7@CyG%PT zcM>DE`aVKCW)2LP{74m z0lHDE6J6(VKOoH0u_4xPNCgX%$4GwNPrDVSaFpO4pn+Sw^Dp8|~| zukI;bL0bul=}h?gY(aX_`F`5rC`O{PnXhH=Q`P%5*&X$Y~Xj>Y^d_f%Dy`O z6j$f|M(Kw%u<^nyp;`hYIN$fQ=y|_2s1fx!twf%IvN6oYp*47Kmmpl)aa!JWQw12g zDMX)Y37~@95T2KZ5Y6pwz!1aVV;Yyvkbs4cR9s#9zyw-=T0;}ok(6aJ{X@>cTIl`S zP)0<YsKb+D*aiFhMxJiWirSkb^)Vff-SVQ>5v` zps>m;6>veR-+j^LSI+m-I>TXf;!n2wQhOBJ&&(To``eCKoEQS#cqN(iJgxRhCv2PP zCf_VmO1t7fTvAr0080nEGM+BpZ`|S!tiTUQ_O<85>1&809|tIegF&TCTy>EA!1`6f zgNnj(8&EK?q;S_QuWxROlDr;& zM%mM({Y)frr?(l``1A_nd%CZXQ0@=iKqqP2hFs`zGs&*6Y)^Gp4tt_gXu)oxySw-WlDcqP7OZ zhpMx1JObqp(Q^x@b$OW?lVHd~lpy*R-fhXK!LO9(pv{BO<_voF=zR^Po4Y$$FKBtK z5Dm>&kl+{82-$jko@OXWm&#kV{CbzYN>a#ARw<=xM1JpFuwZp0N0ru;sZFuM9`_>! zbXd@~S`Yoe9fl4=F0HNQ#l>$}eA24on5803!Q1_n&MIR>k$yVm)^QsnwYp|uRD1vDJ}io2t1ZQGM5{8 z;Dw^j-7%46+jmmFlGE${B{=p;lJfdG%vu)3_`0msCq$(Sj>F-{$5Oq{kX zSWDO~$X&~N<5$ycK6}!ga!=T04I3q9U)f)=Z%oXQz{)$=t^>-$;5hDUz!9<1uuCMji^+0qJ(Az5%v%Aw|c7Q*y|t=AnEVYg#5bh z&6~ETbC!mb?Y5$PbbZ8}_B>e5Mke^MULbI_#rFMc*zU14Erc;H>q1xgojYRs_)J#X zx7$Ib)=9-)`fJo=v29x1x4Wmt$cE>n(F~2xVyGUi!J#6m=$E5eB(e{LM7$X+q~(M*}QOrclWrz5hE$3#^biiib|p6lsak z;qa-QyA&o`?amE20pg^n1iT2luey1pGR0iXugeM9&>?TF*$^h_E%L&5RX#|1%1f3C zGA}Bnja48ZLtVb>g&?J7rB0Vk_mRv8UWMDhwJ`af225pPervL;v`~u~`o|@`@|6}R zEiV~Wmm@>B_XDPey8)%iw8Czdb0;Z&4KI`&C}}&2ghbOwP`&Lrq8UJ~p1%z2CB*L~ ziQZW*{OnYQM{vh#rSpkG%hUz)qOKOYqt?ZRhT|{h^V51fJ~%$BzH|N%L23SoFT#VG z7;0REfveCK=^~$xRH70nBiyys+qWTNTa=j7p{-ydN_j*3v;m7q>kD^fnA!ppbi2a2 z-bfniH%j3d6GS3on_SBAZqpoI)IVkLJ@!&zB?gb5gqEl(nKIdnPg#yFvM1TvHRS%~(0|!bYQVCrFfM_b*a54k#PCa-tr%b4asl!8Z#!=z?z}4(8SjX$ z-JM$VzfAk;b^-@XV(ZLAy_`_YHZlYLkFEqyg@$vNI^lczH@ zQ1xThtM`0|igIQrf4hL)yYQe$$>Am-h$Bnii1nA)hCmJy(k&UJM&`;ev|XpjNDXO8SdS`=M-|QeajV9 zYX=AYR&4rP1soS1KokNNe`WqJ9W`6r^a`_D#{GNG9J9a@=;%)L!3+rwkIgvilbds8 zs(SLVOqAsT>2+=X45dUb%k6LVbQn>a1b*u`kCWwoYNG8C?fH)?LPmvk$8SFd68`5@ S<_;G4QdW2*|5^5V;Qs@K7~53< literal 0 HcmV?d00001 diff --git a/v0.20.3/img/wrapped_ridge.png b/v0.20.3/img/wrapped_ridge.png new file mode 100755 index 0000000000000000000000000000000000000000..56e53a93056eb9ba0d9210f38bd5f3b437acd69a GIT binary patch literal 148789 zcmY(p1yEc;w=IlAf+jcw26qN`4*`O^Gq}6!-~p0AfCP67790k5m%-hA@Cokr^4)v? z`@eUpPF43htJmtiySl3D^!}!%B8UB!>@5NU0=E3;PZ|gaNT>)1hz=NN{}8F~4=w*X zjCRt}Y7WwJ(k_lJ?wYP<7FJT0F6LGmav!;bgai=~n0{KAnreJzV;VAhZ)!R;#>)KG z-ACi=*BA}c;L)y8s{Sr$?gUk4s*%wq3EpNe0tzP?A-Hv*3nQQ7IBc{haS*dfLnAsN z^a?;Vt^#CYF*7wCMW{`JD!!FGp`$#-7T$!H9pDy z&+vaeF&bM>Pgh|M4j&&Mpbrnw#odPE-+BpgaB_2SbF=?ruzUD9dz$&OJA2Unx5)pe z^U2D?!rji*)6T`2>OZ<><}O~IVl*`WG5X)IeYw1SpNd#_|F#(E+8kz z|7!mmD*7K(Sk>Lm>Ywv}^u@VE|BLzmVE^MI%JCob|7$Y;9qE6e|3VdiE6VY|X%l~| z)WfKcfFOw=|4B;A7x4syo@*pCC+^1~MlJSR3WE*>LmuM`@Jnp&RXzp{%h#^B&+30L zFuebKN1B$BJk<7QeDj8$I$pv5^!l3n^?v+hDJ!ep2^t_D@8{yPeCScM+U#-K!1nUmc)xaobY_)+YVvUo3yHoAe>-jgq1%Y${ zNh{hpyZe>Z3|cv}0mN4S0u}KK2Lv&kKvje2x8Zxyh~b|C_1WDlqZQ%dL{LRP>;6($ zHWXSR*BrEZk?UEy>v2d`@l=C1$$R(c>MZ`b0}SoLLw@`Yap{LyFMojwZWwt=&rRirAcM;hISclv zHiP74$r)DPTr>yeYm8NV#MBPoEbu;tGzq_f<}qKGzjq25(6!Q_1}nj zD&Z(_n2@=hxn13Fsa62>r4)lW_(jV1U)jOZ1x_~&G~(H zfAIUp_4*BqA}JJ=Rgj`cPOq8!dzo%iZc}{CGK?`gOs&`VLts;G5bid+hOQK_1v<;l zA=AQax6cZ2++7}4MxW9szVZ6niuY+baacT#1#}FSzrd=>%X9rM z_o7MMocin;aLoL}(TO%&Fm%JDf=a#a2gmi^(bhredqX;(3KKn71mlB4Ob}Ggk=#DM z?R6wXqBJnVa7jtsb~u^0Bl7~Q=ftrx*&kXE7HFZdX?^5+>r+(q@w5HRZ`W#h4ZYBC z(q`!gr|;4wC0)B%g0rlkvv|x(Et^}nLh)lLjwHK*>GbDyXZ2F$W*oT zSn+g^ScWci6^8SMIVAjOU~)wbSCi_=Ya{61PvRIeKfi?OviR1QPWFBxmJ<9H3l#Ip z7i`Ow$2IH`JgmCvK{G9^XbbCCt)QfVXo`2~8Yqw#r?0syOFPR4coM3zTs~Z1&kM}e-@5WrH16Ya=MhU ze(JvWiWq5}B0}vzQuiax?dMdVF(8Q8E4w78DWzq}9vEYE<5sDpWGNWP-7MN+vYx5M z$pKkcon7RSPVlr^FiXHfr4*s7yax=VliPRbbC?n~VVe43roAIZX}zXlBbZx+vgokF zhMyzFT)v-|D>34CB-jq_N|R<$P*OEeeB6;#B@zvEb2=_i0BpI(zEYFFh*4mOjaGrH zMI)_nyuRKf{p0WvHDaAg-wcBf%p9%gI82Y?B;V>vnGc2&I^@ymwcg<}@J9cgp46N0 zP19V<>ypvx+EiY97kGZrh#L886|OLp^~pP8YB9yeVmB`iO&2#w-J+|>uK!ImNUuC( zZvH5|L~c0(vmmKWv;fmJk>4D)KRxZ=SUzjFm6bmEc1{MDflA3@$o`m z_;(d%eAkd#OD@7|~VdHy=10xcNcsckt9O(Yjmm~{p>=|mLadgopu0S(4dI!zFS2la#Z$hqFiRt1Rbc|eOGyI zUrLxm?F4J~QXe4(a<>_+I=(AI`&Q413>9CCc-GsVJciKju`vF*O;Pih_vSckgizMc zp3+S%%`it*_^Xea{`>TZZTdVA8`d%p6Uw@HNN8`8RPg{$FYFzsRCxz*WtI)0c=DS> z%NS3V%JWAN>bQju7l2eklj?RU36*4xC3P!|?P7l-D0G^Ledf1OYr*71F=EyyJc?pr zb&YHGBa$Bql8HpK~Lhrm+vLeL;4QkxQH-0(+< z4?stKq99vbU$XpVokP}g@>VbB<%oOXET_=1HI>RI=avu|L@W!D{NI90VW*`8+*}B+ zWq3y64-cQ_?yJ@BpANn>%Kdu7^cm-DAtZEg@3czw- zwumA%VD3#2Y{VRC@^BNXNEd`RF35Z<{dF(jW!jF+e!&4^#q>@uXj(QEQB!IwUU}Q_EdUjh#1NO% zbFh~?rl$_iB$(I&5yJ7Zbn-szpr{K{?@`FLjlCM~8<7)LufwBqe` z%_n4fz^j0~QYR=%YiKSM-+=I=?CcuL5X!MS#K`2#go9Sl_E9znfE}H}1OngD`3>cq zelr~k#dUoiM>Ht$K;FFCNfro5UoluVw8j{Hr$_ijaqD>Rf#}N3X|tC_yNlZLi037e z8*J3_eSc74kRxt<9}X7U<(F;kKmXFcir{-K-x&NAL`no_YiM95=F6IIkPOXGL8kj` zOHMtYN8|t7vOhtizP|pjgCP(I3qabxdvoXDYDdE^4o$Lxk$lweC7|5=iH0j5;H8lo zHU$}or?J0I*dBY2X195+cUj?k%eq*~dI3{P^yEc%uQWybjQ zS!a>JiKr}^k!(|%7?sk|wjq0?Y%P_$36n1Fy1KvmSMrEAIRd@ z*GthnaZcOiE;)T)aZD49aY%}4holHRmAW``y7#|C@rW0+ukw4`Q)Ou}0C4X&8^$0K z52be)iXq>~Di>P3jy(acjRyr_d14W9EdDz;3_R%wv5>5q45d$th>G{EtifuAXi12J z?8Gmln&GM|5!$JHG1PG!nwsZ6UP&*RwRI!8*F!6fC9;b7z9H^zfq{J)Q=6zLIXZ}R zC^E3tSihvW7EOflgjM#kz)&+|aUW)>*r>W{H|ECyLXtH3LxnJ3bJF%qIss8Y-oA{f{gOX8fBOs<- zq-Hxr<(^t+3BtxM{il7+zl*9XuExRMB{+FFiP^I|LWLT!3F9X`l0kvAKSe!Wpoe%j zlFgr7Pe$!NUiUFiY36N|VEJmq`3x|YavH0S!t&~vb3SiEzzbs=#H2bI_I&1c&5@YB zon)yVZ$#Bv2-1n9Kx9R7IdjuA?WGBr={fgF;&aP1%I>49Ly!Vc59QS%2<<>MN;DC0 zyM3&gM|TuqQ+n#pq9fh-upso5ENZOj<*y1~EV?Yz)y9!qH~Kkk%?UIq5TbKcoF?`C z++CsSabP@-S?XM4v#?Fh&%9@I!VDQofdjqZz>E6&3t6^M!UaUxV@H`Mfz56J{{${E ze;2~fF=WUj6a9M(_?I>Gjg1hxr_|2kY&nh*zH1bSJ-@0MWDGPp~KAqfszMLN7aRt!AlZgfgc}OT&d-w2Bc4D*hLI zbeEJ}aqFLc7}0S!FJ4|=G``T}r#~v*rUGBY%n4`j*U;STn(Sb3R^!jeVMJl7(23Tk zHP2YKGblnoxIOIdK@?8Nd;q)snm#Rcaxv#V6n9Y_iyXa)pBw4v3=tAu9f2v@9$q9} zSg^s^4b@LO0_~uG*CeyIHp#ss3^hSA+H>`@c@}c^{V$=LM{!b0&!1VcICO&q;31#i zYkpJgoo>XpRraEvo8>#h9q`=jPIBrY8QCZ<@Vln}F_&B-_c*Urqr!Jxt{DYU`JO?C z7Mz4wv%)Xl(GcOKJ*H1VAP~e2E&8K%)Vxt?*qOcD;Gha$sYjji>}};{&T(Yf5~|O@ zrEc6^MBP1Cym5d8TFIbx$~I&+Ean$b%v9vtY2BG=Yoq4qbI;~U%Tbz?|Bat;B9;j; zDOk-SAk;C{lQ{^Fdx$YSJ^CKHQlYt%%f{5yBz4Uc4nJ=<>qUD3T4XI-h%5*v&strT zl~okbLzC4v!~+*6Y6s^CY*l^>!?Xk;^rX2TY3|!^-0m$`M&?nbuAon%oZ%xz#?EBb z7$oRtfJQ33#dvLxnZCODV z33Z2Z#W@PC@CNd%vnkW5aXkO@k+cIygt zeB_foxvl|w6kGfozxMa;TafjnrH8OQ9}~ZCI70VI>5Y!{xgn|pCLoqH2oBm;MBHo| zOmJ&nq;cTZ8{wynjXY)(Al`4Z?5}DZp=+{%=uLl82IAGx*?(7zZgoW7NFqG*;u_Lz z*pagqyuTN^dWcFuA#gAI3#?4y(loz@9we96lv0raWB9Z&4Y+Ms zK!Jz%CyEkKlI=;zS&v#PnjAkn^Vk=|R*;%;smivQwKo4t~E#!s*Yr zs2{swp%9U3w98$b@7=Vt>8jkkxDR*LOh3D`Jj1QJ7Ev71wsA;9Dw0%* z#nPy=V_%kE%k9Kkrb#-d00Da!i@e&4I zuVmQ0OlyO+ovlo2JSo?L7vPX=+&r4D^1F8=*tyt2O@MMls|#L5dzw#_c`>+PDHESC z$gMvdjvP?b-*a^P$13`;K$o)kk3nDN86mdfciMV(xjs%mdfNgYk2;ii z4f+CX`fF_fv2R}_eN9sXuq|&s?U&#tFdc@e=s$=FystjWED)sN=usP{Ujd8hm|FvbK z67<7*)Ht5G?&EJbE@`fup?G}Oc{@Qo@jZCWezy7AR&`AoG1G8=8&;GTSQOWCJvk$h z3cvK)+L`XsUO^_h64;;oaKUINm(;uBp|K-ekZR$yvc!WrEu^Vv(=HPd&Zl5R)!Z#P zM?5mY8k_8ZMdoZ^yLTVa7-n^?hrUq^-=~<=dY^@Hd*<%YqmWB}U|$?1{Yd@Q<*($a z#Zud}oJv*5WqSqQ$Fyj8ZXO)DiA=m7G^m>8xheGqG4TCOD!18(=cfgMnYqr^A3)(( zwxbrAfYQN*H-nUl0UI}RExXI)2b=1TQ>BB!2`PM$v7+Z}=>dzkB>@xJNXN?q>()#K z{No8s*rSaxxSS6wh1C@lA9%tu>oJK`m4xeqYWK+mX)D*msv1r-fp#6`SPbeEwrk#q zV&&*HGkH?yMvo9@i{RFE@k+E6Q<_>qpqR-0(lEl)s_W`8Td^&%6w0W=4zVea^h+h3E%UjB&p)Cmt$!>C;t9xVOB)0pYeO zF`%aIeo7h+WZ%(HOmw`bi+PCoU*fC#)rRs$3Y_q(_s&YXVEHp1lS*te&H6H2%uXcL zdR+62hUF&kiqBuUisegoazaW^A^4?+K|C6G4J_N!qcOKSQRa8P@zVv~zZODLbU=OiXFVW*zpR#;FT8w0|{aNqWS6sAFPRtTQctIQ)XIyOQjn20!s zsuf%5;4rHvdX{qu21J&)t~HX5H7W_C(0;G9jV9Bf-nd&is6kf?C?tBfR(<6Hm9 z5yFe0uaM9)f+30-T=rCf46*buMDd1|raspdt86}T3O^jsT1u&bKg z1h@D4Xen#UcVF_sV5Gq`(|}KM-+jH`w#0%Gnld$#1a1w_Nj|K%;y7Ba$9xnFB28rc zWP?QHxeo=7a-*)!30bTFS8yGL7>53QzlMPC-x{=gvpY?dVqPCiB?voBNm+veGi#_?*@1nU{^VlIA57Qt@uS7TccOa57n}Wh%0s)$V2$b=D_CYPR0O zVcr4NX{wpr%XScdds+r;;*YBOwZaa%-d-Be2rLa)Y`R_kK7kL};h#0j|G8pK$ag_g zEZ116RTD(fIEExHF*Ve1sL&p8YrbL2tUETTPb3DW?~JhmwOeD?+im>xzdlli8zfEr z(Jm`>dSt&V4+AAy-R8vP>RLT*nKWO<1WGf&Kuoun8Xf`Phn`M$RUkr+wp(gx4kt z*Tbc}L`mmo`7|1x*Yp6bA%o`cgR}?vW&Fx!Rfw1Uum5Okxs3FAH_^xjR2sIXCs%J& z>NjRz|CMaN?+C7D$KkI7nb@mnp4ze$HHikegY(GaiiWEiDztb*`Z{ZA`7(V5o6DklC%qAeslb5) zEEoSVfyOL+#`1%BuPt;BH4!x_z*I0%A%1&1IS(i z<8mRaRfbh}W^NW1?WHayWjJ*c@;i<1&k{Ane>>}>8F%jJx_ZL zB4gxLqAm@6Oz96s?(m$ktF*e&9o)4fp!C{A)kAn%oon-^Zl$sPxENf}*cGo3g<-oO=W3?3tk%46aMWQHKb_NBstk{ZaQMGvh#&Gdax3m!Xn#$i%(M@ z#ONACZqVwX9a!jh7At}IHwxbxgY8NVAqE_u{QmJ8miTRNBzj_a+h-m%C2a~240Of5 zPPVy@agwbc;ebi6Bu~QcKTv@ z?Nh(7Y6rl44gdsq#{}N;@uVJRg2xN3od_>YGv?0^KWdsdIk!A$M3S3Ya~S;G1JwuB z`yEXMj7mlcT$7po4L%{Q17G`xW^lh`L7twM4J7WN@Z0pq*Pw#LU}3Myt}Tb9o~6BV z3m<{mR|K9z=>6fv^@+>ITEb}m!kXi}mfjj;Z16;8z=NCJvTeZ{j>PcOEu1Ton4GP} z>DkStla12_%sC+HeY!yxcysNO`jD>RG1nZGt{4C6?uXbh1?GP6syj91M$ZB# zr_~QVlr6KctFh#D6v0Av`hQvXoU zAmN^^B!<^V-3%GYNB@+-A})h4V5TnmiPR1s48Gf=7R2i#6K)!p=`x*LEJ+CQGHqN} zene{!^$n>kQ=U=t1?WJ-&C8H8X^hvYp3I>}hTTNMdQA;;IHG?m_N`1G_s)yc<#K%I z!CY49`Yqsy(*8|2Dm+?M)Hfq9>xR=M@G3%*Emf@lY4>r1dkyU`6@=1no0R8oFV~*x zg6Sq>-F~G=D#fkHwCF`n@z^34Ak{D^_($S~|7oW(dq#;njK*W+9lAW}=BfXIg!Q&f zPTGR8e=IS1fc!o5G}ydwEA;eR06gFHVzp4C!E8*!1Af1O-<09pIT-VLfPdND>XKd- z6jYSB_PAJAlznfBMz8{`PKdewn{S#AB3KNHCru2^~Ju`AsAW;@Ax_^DK5U}4|74>9VT?mqGUZs&Y&E8vwH zh7Jf$PV05WDA@4tzA|rm%n4S!AfK<~3~t&3&jfA8B0a8-HSnbAlIe(y&sM1e5Y;CkwV9bv?#H*ikDp;E*NZpKFiKp+dJ7#=~RJd z{LuF6?qWag3ZCG5IP6#F?Kj=ul-~8xVbCV9;e2pX;=-I(YLxS9R@cjek3bskV4&r| zkLRw{tCh0C6Wm4#+dsbU#_gV=_op5csizcvA*9W>OqaKTQ@;kpzLK#ioug@@mO}(vCvv5S$+s;NZ_M(SPzP=i{?)jkp zT@o0;_|PHSdHyRg(4s2Tx|24RFSz{~=W;$(Ratos&7)r^Dg(3Xsa?OmL19w@ywXD% zbmV7!aT1|XfD`o5nM zGqIqiT+7s@-IOKy-uS1SAezgZb%kpJf0{^I|5j6MoIFQebQg%K-p~}i!gbKouIY&_ zsA*?^fy-!Qc;+t(4qFf={W0O{VL^#G-t;gVCeSMR54OXYSmRPRNw`gCmTbDm>nYM@ zl*GDaw(pVtv(jS*jPZ0O45bQWl@GlkZo11F@Q&XpG)Gkkbah-saniA!~vVDhqFZe(b8G!GPb^_hO*UR=Lj^sp18V+g*g zOw73|h#osq!1{WY)Sr3+AjmDy{_D4OH1luGo!QyvDA+tzf z8}n(DWQ7!F=sxP3Cj?FK%!WWvQuE;mfc&VoSP5+e^BkhOK`ay$G`b!+!M6r`#z{6mDZZ{=Te$?dunXr<4ane!7Po z3(w+dC(L5OBor0FN?cr_9ZT%w35{vtz9V9B+}(dGpmHSbi5>oG0-VTH+WuMNiTI3; zcsuE#zZpa*P=LDSA{HGFbbbGqd?<4}lTJiNC2gIyrp^5rgyM}PkfE@H5Z#V$d%6;_ zQLkYv{!U2qon`o!+ZwQc#4gO4ybQfnar3eCk4x&R6!^+BScM`Z;*o7bWGzhP^)Q0h z>4MJx;oIQBzXM1SSJj$)ii!7|EYF^rtF_ZCg@xz8vgz%vLK{^LL7{Ro_v%Q^xK4)+ zn*lB42VJMHzi!q7nJ~w0HJkG__y5io-y#Ta4-QTPMXbI1?xQM^HkaUqtirEA58};3d62EY`4jCE&{LUlZkb^XcHsZKfMwEps^?W-a9gf_6&pJ ze5C*K;Mc~QVhYGx7~Xh`!Mua0wcSm7Q{*rq^38fITM?>To|jA^JXI*LE(6_@P!Jxi5$n{N?Xy59M`;P#ts*JS?$@OA+=%!DIHUOdS5*^Vkz;ok@LAa~ZDgOd>l_0NVFC59L|Mtj=tR0li8?!`Sv8*yPg1WtNBcf`ZZ$-N!e>od0F zPyN%PKfJWTt_`tXyFN5p+U^wU6YGI?XX61=>^^Ou99WO;-gT2boWX4liFHAWHra0U zpvz4tB-9BlnDq83|$lxtvOE}%YSpaxh~_vhc?f@ zx>q|+zTO)>9Y!E2{btoLUBsG;?J`fU)Z0B^pX`ol&fUX9Xh~&w=+{)VH z?#fjx6#igJ9j+A9fO~hKU(M2rv^AOLP06lEX2{sz!$j8G7>8Mb>`oAa2}rjl?L)%~ zX~ZTXhamTie?GRm&;efrovBFMP2xx&tjGaWB|7gk3>rMS0>#hq{m-(4Cw~Y$eK$Cx z?qLlYP-XOVWONFD`NRKlbqM)!GpnH}nVnN@iB<_~NHvEv=$kS+r2`k$-bi*B-JXQ+r;+&G2RMc;3lsAmI65k=%*rG)i#nPZj{lw2@%Fg()h>i&^q z@_X##q>0N5e*M0*wR_}`{t#SXwpbGBs8yhjt0S`Kein&P=E6rDWU0Mb1WZKakAISr zIY2D+JA-Y?oz33#$DowItkuU39O6<5Ey2mAbSHZRqF&uV# z<@ylMLvDj6*M>=TlTb6$1nNQTVwFSWuPk2)|I83&-Mi+2Mw6gOV`qq>A1FaX3Av!u zyKTi4-r1?jRwpR>eK~GnPW2`a6zm=1#y&p{!~C+PV|e}s14nR)!>Q{~T=ei})t)u2 zM1B88dpA6+=j3(F;_x9lNK)6ti~5JKYp}56#@b{zDQX<6J9+K;rS&MyauMrV;Gjtf zCcqNXX{iT9w@hI-VA|DX(;tO_Q-3AB2bjKJ9$fZ5k)`$7L7j1W-Sq*K=QV)H4jO$r zBvsq}QLpWq(}TQvPi^o2bZRmddCpbD%~ZK`MEN^UrFW!u{IYJ7hyV)u5I4}TgxYN* zG@R|D+Pa9VeXY*Z0GvajdyNC{cCpYQ0HXD;BA!Rm3&yVj59AfJzDM06ms`X9H>G%8 zR71Py8wt@ko%JYmQz^?jwzcd(7hbZyN+QJH(Hxs(`%|YWiXezWhLXmVTQRD`4U2a& zF~fO1f3+Yf(Kl@~iO&(N#kUw86fuW_?@DlqdkgQDsKZ)S2*fcmm(T1#C`SET_DU2} zPDaO9-jlA-K>rdCfMM^aZ8WiF(z$A2TSn~maSv)?pwXF}WV6ZsHe-Z`TmE#!0fh{% z7fa{#p0w3f|8A*hs#mcTawBZ!{;3JTG2P;IN}WRyg;n5k(u#ZC{PcP-q5BdQbeFZ` zr(T^z0WR17LVsX(u|M{)5?@v$mLn*VHn6`eLML{VDl*xn#97)&cso%DH@mGG&*gLT zCq+LLt{p%r!_RTLV+n8Q!yo%UdX35UyNSYR_9rBK8n+ebQ**i|iqDVBgNzXZjr2JJ z`)0%9W#d&|@8z{Sa`l`Rx)_(|xz}egwq@(vdjnc6HipK4ti11h$EBeg zz*6xMbbP zggb>V655IB;mPV3If?t4A{@*T=Qa22HnLqJ1O`ql1Q;xm=z{395+tl<6NFkr&S{)- z8T*m^?>pMt4>AkSFFp{f{Tufr#&BVtuB4}yYr+c!>&+rvm}x9A*6?NwSs9i$UVM*s zLIVY6Pc-GDQoXSg!0+mt=b>NmWhL6{D2K$~sCNKJ+5Pw0+M>`FFSwGXIE8fZ9%}b3 zM^k!b^W6us#m>01aL0dDCg_^7;naL*(!NjRw5hyE;e2`u8V*`~PA$wx@0#P?2>G#l zP~D%?r<(%lHYkFqn{jv0Iv*c>`B?vBr=H6Gzia7hgKD?P$8PH-Mt49gMaf2 zRD?2c)-9MMf|@mK&zvmI>%1xSPG6)CzB^PF!ym5W`B6;#1k;H#9Of$u_@A7Sf!iS( zci4?T+pO^8w$EjI8<6c5|3={fG=8^qupf{QW&`1qNj`BncYohqZQZHb1}-QFZw}$x zKEfLaU)b-t7R8aj60sZ7ZeuDJp>{Iu)a6FsCG}xsiRf)4fS3q;(FQAZ1L4BL2Yory z_xo9LV#{+=#efd!1<(^dS^cT8=b3Tu9T8H{1QO9iOd%B^F{Bf%*DQau(rqy3B>l{Y zB+fJVjoesiC^lv{l)4XfvHoWe)6ES5_Q%E1k+*hbRD{y9F;tV&NqJ_!-8AT|ntaez z*ln6LnBI~V=4zIsZf@S)b=RXF_3UD^-X7lH7fpiDM6tX})qi^|7+*yLK72Y=O;^J2 z+;sA$bgOZS3%IT@Y=os?*Ke1dYV%qg?SBgI=(q9~8=pH_~nYY2V&4|gTCnQxqU7- zlk4>GT}cm*L_CdQ&u7Gx$nM9RP*=T(c9NmE*V zM#l(`ljv?`^_8^Ff{}V^N}qN-MTL3ipR%QGZc4wU8Ib1jL5ANfR-TkpY@0fW$+^$0*ukRRVh(Nc^5i^MgCxSZkY8|225W!K4m#~KC3KS zrGV^?#l81SbSKs!&wu46dsW5c%J{Y1M9k$9T|C_$s|-6*2V%%odGGYdx>^Y^4q4|C zsXZM-I<>Vc^8$S+dQOqI1c(}rWr?4j%)6Vf0k_lJ+Jv=KA+K9Xh(tN^;ST{Cci=6_ zPOSt%--3xx&{4GHc3@w3cQ$72HH!)Fn5pn9YoXb5E7S8_TwCEd#hv zhm&b4FIzIa4p)kvGfu$;0viCu==-4Cqq(wsRBou9FhKli0oiA!zHwDf_xXzCXrVRA zwVOTi1td}p;#PthHf)8?nvk5qzb3;=ypBT5(bCb^<{vI-*))9Ra>tS(-qU(5JC&_= z1FI3>`0_|1_EhQHr%Ry!rDC?8s(70JnV9J!3O@fwT;yL0O zxH;N0%Y3YdW5hW{_(5kp9q&IV8Q%5=x+}fJPKG)CD*!usN;tQ$TfK9;S`Rp#dOYpq z$eAjTn;~~;>&Bm_Ihrj1fPyyIug*XMQ<|z0%8JQV+bec+PtzHGo~NMT!i^1Y4(v5Q zj4hSmve_@M1II2?nW)jF>5ct}w`YMqQ}?>7R59lov@am4pJkbhzi~<&d zR#yA#Nvwt`^4xbnara*B_VWpnPjWqT7w{r>BQg-F3*I(DDIcfbJh~|F=bV%K-^}^g z9oE7irY}bRo3WkenB~szK4qc&eADoo-ODOsV@pS6(7F8(-R3on%Vy?dhvt-nVMp)8 z4$cj$_fnw5HY3}CKyX{uK%nq969G`a2+=Wyjlj!kS3wD=%BiFiLXGTJGDto22b=d2 zN(!+a6L!{DqGI0dT~AD3eKI+g6_H zhny1sbuxks6t37Fk1awRF2~_$<|5MD7y+n-{n5MJ979liuu|BC`bjXh{AY1 z*8TR=c!ICOJ(uQ!@1oNh7dvb*1mm7AaW!rqn&gMlQ9X*%Y2|+1&!Y;Q)g9(Kz>J%H zL+_`yR?_k|?hEz*UGEB`-SM#|r}5lS*S(f@Zl+9Yk#snjs;s&A)a^^jrJV-v`{o5M z!BR3@VwaVtJDU#y)*w`gA_hzH(ca)r{Sn47V=VW!a1*C>@D>+yZ^5q=c$}`mQzFGIr89VDO*I;W&_OpnqiT@WtNij?aT;q? zThE8!zMx;7*OgIYk+aZWXWZE=6k7JkiL9M#Br+EoyiN~)?4yxizUK8!-d5ha? zy4k;;K&tIPKha-z$2mEUwzsG0GORj^X>F2UH8E|J-{~1&onyvGPNbINOYjy`c!VSZ zf-jh@4U^r9dsR4tUv_dvlUM%QQp$@R9mEkHNMO`41~WA+oClz?*jL1CXB?OyY&Q6B zx;$W<%+fy8B%iXD z`Zz5%;6ti9)0dFs2ZyYQ%51%#$}+)ejqi|jc5dQvZ-H_sMio?XxH{t+gV9~pWwqc> z`?87ffOg?gbOF-xbZ+om`WKvrR=hBr<1bs%0-I8ezeR9;e=yU{&5SA-Hqh!PT_r{9 zp!C@SoOo6guz79fQ+UmKY@P(y*bMr0jh#dsrB<98#o*lF)DIsWdkyR3iLdr2?tkrZ z7)v4hEdKOAXZ1dB*(lX4)(`?zleg^LfC*8Gz<)A>&Hcz+5*Csgi_N6bE-OPq!5v@39}fZ_Tzf2!mMhS)SF8lcnvY$B+Fi)^ za9?TC1p*kxK#k9|C-YLwH~!i7qol(dh-c0=$E-5yzbU` zw%vl2?6Nz6*Y3fH%xm!{ucaiQL-`W5qSF31wFvw(g^Y>A>BzN%T~FUciQL8uuWhSS z6Ylb{xsel4pHtOa2Mv2Y^x^7cKvj+{pTlM>Py88uIwYulmk`QRq@3r)_r&e6-W}q& z{Gtt}e_680QBN^8{CAdV+`7b?7jD{+NL#V#|5NXN-MTXL6XO1N-?MMHc*f+^FD6#V zm7{?XT7(f(#CsmDl)wqxZ;`0MJ zh6t^L&-x1e31bs1p1YXcuwjP*uX+4CTcwLDQprhi2kb2UIS?$Ez2ZoZ4O>AJIptlv z*eo_>s8R7M%JjBejfETXP&S%4(FeZtTL%5`B*oqVg?qQrBB#0e;!9x1NjK(zd1J*N zY(*>9)p|RRr&69BP=1)cp7x0s)$S0^Bot5NcS}cJl>KP^4c!80`q@;31_lmNpk4kY z8Td3bg8soqg*bTaeVj7Rax9Es5MQjkU-7cSDTIXjFg4$|gW!Jp{Co{ef=j$tmOO4^ zGnzNNGFHDZU`QZ1UW8MSMPG{C@|{jT9QUP~L|&pN8HI}ri!98A;*6*$6H6r~1xsz$ zh9Zo?p+0XJ$1h8XvuDYSI|H|q=sVPHVl{1Z5=ecvwAd_kHu~>p1_|zs_vh{gkE0G? zft?5@Ys^{c=C-2xTdML8oI#-LbeQMfyGM<&kb;A}DCaGkL-;&!cUnd#GFa9)&+1J+ zV>b%-kTePkU?F)g=usH<(Ncj*M0`2Bh2%@QK~b%FUXggAD@%fgmfi+~T6elo0#YpI zevwmFDxy63g?bEEGB91*Vo>#WR^+Kvcq6<_i!SKAM&z*MG)wy5-FR_eqzmA}@cmXT z3zvy?qO{V>9&RHw>sZQ?duz8-aN^P>b2c^#=0OauCC3?R$E@9oZT&^x4Elkp9tqXv zGe$fz;L(=nkpS5OaS!TDkCr?ku9*rnRA0XlqG}r> z4MvCqB0Ma*{@^l%`nV+&bxT`0uu#fd*(nIl)iD1n-fLJV*I7w~9k8DHd~#LswhWH1 zt1o<>m(vI<`SU@_Yj{MD`6(>FSeJX%Qmq<)_mitgM23!n0TF}>L5Wm?h=>3Im-61* zHAxwM(19y;h$xG$t#TozX)OAcjik?QtvA>mKlBAbT8g&ad*2x0Ah=Wnhl%~n3IvjGc%`{$MPZ(Yu0 zYS?K{2)@rD0AcBY*NVy#UHyI+%{BrR2Ic{+ix6Uv2G{x;qFzK}eD04Iv}SI(?*zK% zd~v84(+bjFy_WglJo2;MUdXwlKRw5ItjJ$M;j=!E!_ZEUYGCW7Sa1=K!w5>tMe+XR z`i5wN_h!~xlsiLb*X77_s&Igo?3ql6I1yI$A4ba4y57tru2$?B-VhGG+=4bXY+y2X zV$YsEvSIetl131Sd&j0l5FvBf==3!l{Xmid;ugv1s}Be>Xuu&T$2y7%@o{7O0%}7` zatLIwX86NEoSy{}N{k#J6@~B+i8PD+3elDr?n>cDrEKQbrz8ttQ%(FR z3^5~^_~@IMZk&oGRciw$KtcTS&f)vT$NPfIvui2=EVSlPNn-i}sG^7vS&tJIF++Uv zo#`P3O?-+bMl+s~e%8@1dNbk;hI>3+p-@c){iI4o{Af%itt3|B8EUZyjr*a>K;;<4PQmrGH&-;lcJV@2cSUukr=l})1m17kp( zzZMpA1t^%r2u%PJXNS1@z< zxXo1@cRcN27X{Apmr1JA7f1bT>?jMw*|;e>qA!oiP|Npo-+uEos%zDq4m)IjYR&hm zG1+bsoz@dt?V`mhqik<;p?f8-F!{!6mpDAY;iHSEAozS}K^MH4a>B>s=~F&mvXp;< zx}1N4dYb_QX(YeTV*3I8n4dlpn%!rEc%v*S|8RWRG_1Dpa`CTxS8O5dlIWNOCWQRg z#hbYW?AX9g3`gevs$)he)oQjy0al_>15Og9c;gQ1QwD8-tCNTIskQ} z7Bz)z`Ow6(>e#VCd*_3fuhBpou3&v4U+mJr=QAOM*+TSSan#RBBblWV#51f+`IbTs zUnjW@!TzAz=QxySxBGMZGH~&yXDC#zV&fpaElu3+DPmkH(G?nG@$F{i+tj|fb>W*YeCzsL)w)=F-jqW z(n1V+@BTae%i|9m+~zjyl}~B zmt2ULlH7Go3_^HysoNol6E}k_x`OJGl;M0tl91CklOV;w#L;wpJ%&_X)h=C}OK|-S zZI*liTe!1;4{qay`EgzfrGQ<%6u-%K3iJg&?kQMwyyBc5&RDonRq5Oq10@ci;P6ad z>!e&#AtgHKxcX~Zk{uK@o-`nKaj{z>=k;^>usnRyhQ$)}d+!TFn+c+}5ZIO2e2++k}sY4LoQuBzSC$>uYKGJqEZ1uFe zf4JQg9-`V1Sg=2SA=TG2oMLv3#fu+XqspkZy9Od#PC%!6K4a6^z%ThIVIMD*t=w^^$#x1{gZODUvR)yfQ$R5{_TdZ3 zi?O&N=$Aq@^1+Biu&dA{%}r*)`|hZagl@KW|cI;q?lISNmy114>oAWlhS zwu7LZ{DWxtWl3lP$Av>0A@dXJIwz8W9$R$Z=Slnc=No2_%20B~*s(b!4qA|iI9(v;wH<2ZUgZjyAxv3edQQm`qpS|`M z1xY2vm|2R8f~5R{;35|VXc&YflblN&cvu*?i-J-#aw%xIau%Dm_Q@;?ilw)y;Jd5c zjUf~4iod2jJCQ^$+rAn9_wz~vy#=&AgE;9UVQ%|H!TU5U zKKun&g85&u=fg{R7lNa>%@w1+BNQ?r<}+T!*#h+gOS4OS^S~iH@#wm;3 zqk!YP&Yx67;J6C{;Q4ga_(^F7(Z~lk8`kb*WS8Ziv-ZJ?KiiDZtq}e4m%w6_nwNXB zwh)1`^puUDMRXd^#ZKdzJe!uIvW8HgbGOl@`wivjyms%##qxhNah%jw~5uQ)!N z%_<(g#1L`z2D@TJl>V1mRK$rM*H&@$(e!c4ppa!vIMq1Y#nt%_|e=|W{tC(aQ4h;)(zoGuRd`CSX|{x3$Sa2=A@SY5JhjWRul;_Kj34yyNc3WD+JAkQrv zRGeVq>LI46f2}W2F~nN3C~pqSAnm!jXDr zmfg!kC;}{sSO{89u}jBI=KwZo0aWHy9sd>Th+*O-p;N(W9Qmfj7MK+BL=4h5s4P#iX>j{n3K&z0KW9;h zT!rAM@WtRN4aBg4y?9FAuUvegQKI=Qu>^%qFff`MTsm&rg=zYU0_niWNY=#_NhEr z3d&+JkYqP83GqV4y+UwQmSS*~2I5A( z?SNYkQ31c?IfrkvG;*E&}wBqO~ zPmPnv2^hd*9LG;UN4ru|icxTwFq)oA)NX*<2Qfkty8_O_V~3&XdY+2~@UXfgRealEVUyH0@ivE#;TB`|l`HTtAeW~e7Ym~k zRt`-OP_H4^u38fvD~&?&ed=-aR42ROsN?rSwDn1B@_FchPa!*bXfnMR+E|Lh_pmdJ zTP3AxcV8Fuqr^J}K5|rE9V`kas7~=9m<>Geq|-qdr7bI^p~GmYjw-ne=hp!SPGHYK zD29!JXD2$f70TVS5{z3i+u1=I3!Sj5X2@O)KhYhNUP#Z=a>Sa>wn%nP;V=NC#_()_ zO(2)Ay}~9vw26HQP(~@()4y5_@~;d=cq+=?@Qi|{^t9U+gq!FXPm5}B! z_~R24fTs%rsZWWnxQY{HFSq|h!6~h<7`TPv`|^TEEG_1TfE~NdaX|pt(SKs~1YkgD z(IR_n4FGBKBD6qxS!rK$>AV+A3HlrOVN%A~4?n0Z=xa8eyGxLRlhRpn{d&!dHmnmuWo4y2jT48XxJ?zi_EsKI zGDWqn;%_Rk_)p5-Su&;DHmfu?1A2PUxxT&>zrPdEA7Z6Yzf1sFBuIKu5acBJ&=$jS zhjT-eleut6+bcRNj^f$hBFOHXK3-71SY3i>CMm-Yg~ahz-R!XE4U`@ez~VkrLxWuy zc-3AOix3BOxX+mP-w6Rw6YI_!X9A@IG+!AyH~J&eQ|mCgSS{sLa=ai#8QV{ zg8U^4F<*F>7W$3iw{6=t=W%Tpw&-*MyFnI}$LEJWfE^~kFBIdSK98o(C++vi<$%5H z!ncmkl&EbqPEJQ5(MZ{IrPlbd^3XV_KCI4%O`1>7AYo3Dj1UyWNpyq+UMX1-9SC${z1)Vf*&}J~11ON~IaUBCN{`N8>GF}~j^!@1Zw)AZc z{(-(!oLeL@eh!QtPRyxr3RK>*@^n{lYE#LG%Ou!2?BI`2Nd3ZX%NBBbgD0s3zTndf z%Lm#&3>f9yHaWf!1)o;a#IbLdJGDq?o&t0-BrB!oatytFvA7Mf7JEI6)e&DdROKu&C3;w(GMA$T%SI z^(gspI}q=7N$s3-s&HNo+l0xI95m#qgyqLX;ymB(2%rw9u^QC@Y0{+fZBD~t;4gG= z^NkTDzpaDq?T!j~qO6`55-lTsW)m>@Xj>p{ap915f#J)sDdizALQ}V?@YUrNJ@mV+VrZMO|H8H6xbsGx)7J4yG(NW`>0f zq=mE(d$F=}+wA_GLIp zT5|KZN2&d%)buug3pcF>YHy8;g zo2EPQ^KAycrGT9X90UvsvaQG{ZkvN1f?MX|X7jTPw=h^2Z&ORF9=^U*3dbo&V}t0KN*}i|T)Xfo~M*pJ7(}U-dWW9cAy0X|MSGi7@G$ zayU%po+LYuTunLvUJ}!V4w_n3h=COMeuX~ zF%Tw)j~@n?DYuJ`On&30AiuAJpO2DIu1sCCr+{_PjKW17+8{1u_tRkLAU%R1zb^f% zSRl#nZBzM0p>)wNlJ<>Fv7Pc68(jGn59hv`O8=0Z0vJ5`z^=lnz^)R0I{J!62l`|K zT%iu-%M_NE#Z4Y|2&3~=DMvdeY2)V5*B->dj#Nq3wap=NK|C;#$N72Eb^#ndNp~*_ z68LMRqAjpFk2~-$giU3fD7rB=Q37`S!-Yuj?rkw*%c8&^RB&GncSzlL-WS8f#f@GR zV2g@xKghfMqZbCg%r1{Y)9`1$;QLC2rm#~!21>rn%ZIAoZU9bk4F>zDjhb^H15c4Y z#^F@9r0_yC<_m|)u(Qyd`o@U#b#(#@s4pY{{Tk$d!U7mW;>SPVZ~VM(s?ePR3mDJP zCseEF1`UcUUlhW~0>z}+2# zkKyz6u1|1Vf87Uz?Gje%+AC!C#bThj7X%O&*Dv-*Z^DFc%JP_tw&GdqWpnYA5>deO z^O>_AiBNKY;4H<2xUKJEXqO7N)K|>#;;+M|ZGkYqMxROg-idrP4C9v%4SBMq$p@)? zBx)Ymd~KI6eDi3Lgp1uNPcq^{M_*4rpM;~rxMN_*qCi16P8_6gDJ03qX!*b?zobSei46+rX;t73oO7SJ(?Zh%8b0_*cA zW!{yyIb|v}FINw-D;HP`=UA&WgLw|wuM3K{fZvEo2)a17$!+`k9Sh!o)v600MPFX_ z!Z=}*j!y@9yr%-M_s!)UKPX^e1YQ`wC|Q%%S^MM;I=(JR=`|Hx)CuIIvPye!n>3|8 z^VL7uf_(EHpOUtZYdgcgt%CJ6{2%xTejjq-H66t(s!s8P;zIF)XlYqf&{WzRD~2zl z(e$z?=%Qw0$ri>p^D8P@5ccTKZ?6-J|+;CnFzW!3UnSbF|N;Smkhjv8(Toi0no$}IHcczdB{k(A5 zqcT&YQp}F?p_h$W`dq&Hg-PJ+yea7zDl;o@y@w$(1A!ehn(7&FT`j>tO=+ZthB~HS z#si)``3}DL;8nFy8eI(Pqb%RaCd{CIIq2grY|(*wYXf_qh{tD^B{O*|J-~%+A7*H{ zZK#+wRv752X+T>mSQvj|$fjUz1r+Fm06LBQ2r29f=hwE>SYZ&mWyqH*Ko-B_#nFb` zYJ)hr6i#XSc`a|w7&6!Zn`d(llTUG=FMI%RP;A}0wb;R>5N8kI#`y{8xpJt#d{-juPZ7DSzdrjvf{1B9f#!^_sm+0r{08&9^NmwMobNfRQ_#}K4 zM(J4|*qPJAnDLGe9E6;9=JH0~g~3GN+8n$zXc}?QgyF+!9`s}}!K>ERt%83nM`4>f zR@>#vM|F2?6GsC$VTI~(3dY5CxgAQQmVhp-!JoZabY+0bQ0OK76rv9WCyqS0WFA!#YIg9KWgC+L?f)s zZ_qHfO`#mGu57_SeOky%qkMZoZ+!fLA7yLTuA{|Eme9;Ezof-Wmr^yKxL&k)5zU!B ziz+u%@NXSpAgq_gREG}jscpOV)NzZ>wB?pP`Ii@4P`h^RsC&0=)U9hb>d~V+_3hV> zx^(GEojZ4OS}dN(a5SQAt?^LuFVr!{#v#<#gM-gZBqyxwVlSWTgF#x+a}#9uC1-3n%WwE6*~WD z8t*<~&*ww_rYfF=aB18T}!B zI^i=~yLtt!TeqG@4BwS{_wB{YK)2225hI|`d2)3sV z9Xn8m4js5Xu~VRwGqLjb2YQt+16f=u2|f7Ars5|_4_->`vJu3ANl7cHjcD^}2|RcmPR;>EOV`EqK{e$%yUS2Fdu zMO#i?`GTQG_inT$$9?+rp>BL3vF*0oQpb)Rv(zp1Sf54Q%V=9@>{WjyvvInnh9Rn} ztD~x#>f#m!am=v@5`^!tl2YZJMNG>?63ok+(WK;ExWj3Z0-1BQqEH& zw;NXQz_4`jQd+uf2~GOq3z{+QOIo*f6}>-x9Chh7fX?~xDYWMvBWcjUK{Rl?0sQmJ zt%N*e#wTb33*%p0xw?LFRGJ2x2VY(o^Rk%jky#ig;U#U)cEnYh26XNdNDf`mQbxRPMMo zR=A|G+N9KU)BHE(Atohcm=CMFv}nP%_8BPSJ!$QlJ`pBd({oP70rKJFcc^68Nuh$h zh&jtuj>-Th_8l2@+KE&~lr3Jels*_en(n{i259QkqJwzuAP@tnaD8G`%P>t}@#F#iUlc&!^>=|$-!-p$H9&MGjAB@g6Xf(WI z#%a4S2$vK5wf%K5hnJRMfS)+=GkWFK*J#q`pUWiOw|6hvYO8*<@Lv;z8YLjRn?s5tHtSG&Y)Sd=FkehGMq4B0!^H}jJ|W+L3HpT zqh!H3V88(C)oV*?*S2liOlV^S=&PS}Wx|AOFC3D@Vc|_RKvslw`!3zbxPz(z>xz_0XB)*uJU>||P7TT3%| z0r1{C@6liWeg_R7wj-T<(#f>T@Zr>lZw+*4-$7A@p2(UiN!#C(-41J+e&ON()z70W zXuz++d4+|GXzJ9d^d1igfBgOLX#f32(a$dU869}gLDZAC7MU1dDJSsMi|AIyxcpwcKvJ>^~(~Bn%>wi}E%q@;IKP8+nR=NGY znGn6nz;ygno2q`uYq}|zt6i~EqlI?ng#K{6lWFeZlm(ZH*V_o$XA|?0k6KA%bR_b z4U`@gGJtmA$+y!fDc=cl8QTFV9JM4$&vm2eP08GpcKlyg>-kc9_gVo-P3nZYg2M0J{Y|{AG~=KiBWD z8*u*v57PKgKcz!Q9YCW-?N1|i9U)hHiU=HhbP=ht!Vj$cnf6f}#J0|sAPqc?QR;LW$+qE}yem1ZwoMx*u~NeAq|KMmP= zsEi4%`HTqe>ASajTv=3JDTC~8C8yKv!KO!l%mk!;Xj`kcQCQa&IF~SP{CK+O?z`#f zr=FtAuegF22j8ZB_TEc&BHS?neS4uIgd6;x230O z)}p|zi$ZeYHkCBw#kL|QYS-yp`AYSZvDV39)aWf!*u>>GagoPt&iiyoIj%^?7v4cTeVf_S>bmZPA$!(D3@P6tlnMFISg`8p zMN}QP)6AK(=#GEfNw?j0GhKK6_4K_{PT_migM@({@Uv};0RbWCg*{S{=U0%uA#Yem z2_p}^Fu^7X@K3aVm=9PQyy)Xw0d{!B0x5U5V8x1+^u$w7(m&XK=bwKb9e(Ixe2}ZR z-0KAmx9yP+ZhtH=@W?KB;MKXAff6`jLE03RP+X#v-?9xxVoPTuW2i4QS%6Tk3n z==+Bd@xELb89$_XvM(y|2rpzQAVfw|z7t${; zzk zex&iDi!S7ei6|QdVK~(rP?#;gO=9~1-f8H%uen0x*4`|3vJEcjuZI8mBc1QzD0&QY<0FaG` z7o?mo$gLmHaQh&9e(kz-G-c{!df|ot((nI#KOJ$%E_BFY2h&kUev5Y8aYv!&;^!xa z)gebHrvUAP|2Vp?{T=kYlck~)##n6yJ1^n;^mpHLAN_|X(=$&$l@2}h5WYPyP;LV# zzQ0IBxoZnItvHB94-eL@*RiHG-b^PItU#>@-BmgcctV=HlC#cND(R3Ib%H29Qt+;L+SMS0Gr4`?`mupu( zIB0%!=8w&ZC!bRZeb*w7_j((bGGbh6Ak*^q0blf^Ooi7g;9xIljgF0U>AvmP%=(%;-B^X+vlzy^n zZ4R{f-e^!&la#4UO+q||DD1$`2MbhXWu z!8WYrPi7DXp77(2S&9IE;M2xv(JXQ*hZYJ??&Ju-`C!Z#bMWCum=Pm(H>1aVs9;j3 z8LNZMS$bMiNUZ*r(OYB^J7miqP2Jix2POdt)d%gkcI{fjhem}A@(7hxmFDSZpE0}c zxu1F9!G{d*u1Gn)U^71`J-=M@)@BI=Q-nNX?JuqjNJCCvR(?l@dF9ntd2w(Y`*kaG z``_;{3wgny!H3uK+_-6U$>p>0O42q=`2@dj!rb@3gEAgG{nXQGpMiw1n`{4kq%IUM zjtA%_r42Z3dwyEM+;#UoqUTjS?k!oeG|h_NpO8*BNut@@3cUtnW%z{!#)K&QVSd1; zRG}n`f>H?+r8sz?zNWUy)Hmo=X0x?ipo;3iadIniK5%^BUC`z)SYXaQ_dK)vo_m?+ zpMSydX>%#p#Xt~Ew=A7bn+geZsC0qQE9Dn>Dqpy~c5=7e35mqJta^;fye#{QBTkbMNIc7c_`sv9pRN8mnedW=*(@r~$pRvc= z5x13RquKuj4e$u^TrH>lGXYvJK&dbIMo_s6Q_8gJn}nvyg|`Lb`FniunBR|o`coRn zZ<)OB{s(BpF2lt)padTr8jt5=9`BfMs4`f$?uj`zMQKKYz3c~Gj|@}W50r5DO^oA{`rsc-Vr<= z3V+Dw9yBOc5lIkx;lvf_u$^vBJ#47dgB#E8uvxUqAt z7)m7sXcrO+#VdqY7!Fqa2+-ie567X`nrp7T&a7LvPRez6Kr#uxWGz{sO%k#wDVSaT z8&k#G=J(!zzu4g1bIvnUrcM<;XeVteNsG0jMF1SGv=lEIy0F?}X}ec@<31X1+HJMH z`8VG((ksM5+P*MN!m)54yI4Ja>d;%;yq<4qz52!*repts=8PZy&`h5`L%_k4CqEzc zE1P$1thSjpV}{vpzk|)+ZvT7ws<5tBcTD3=A^zo!1K{KGilYJ=`M@i^nwlz8U7fk@ zrR7Ot@Y7ob{CZ_`&wcmFm~h>1uIJlLi-ZKfir6tL9^cO@(r0?b@bU(rAFD z9;7j~>Y)_Zku|MOpm_{I*6NX=>-kZG%T1eN-I(-h1xn0iZ(27cwdMJPMI> zW8kPQa4+q>58gKi9delIyUmW~-S_(()e+;^M|_BO&wchcBS!AdZT?4_056$PxBb=^Z6P#DRD(AfU^UYy=*5>gio=V?})4E^^*`;hU zCuC)ua!!U+sXQ8IBPGj0-a_$#r}|=ieEqGrWPCjS#P66XQ>KUjfGL!IG7WN}z)!}J z+^T{~M2CVfON)dphD{>m+n`bKL3M7Kn}+hp`(C^- z4ZlJ*4JLzOI+vX&rn6;J%H)#TB7;$y6b4`XA-Fovnls0oe#VbvAQ&^&9zeA15YN}8 z320w8cN}ClNElSg3^?dQW>@Y^E$hRB84h3mKYQN+SY^$HeORRh%HFeOZzvm)4Fa-< zihzKM100A5Dh@=&Ep7p~G89oj*-Q4`d+%9hOG^g@@;^^tBmvO^}jXIhqJYTSHf$6Jv_l$TO-v)2L3FcQ$&Kcj~ zBS+LzPd%mS@zvOINtA1C9i|0`6w`Pxfz8XW0;FRk+b5hme?j%`(^tL!!H0Ta&+eO(`z%rG5Z90r~ptujRWRf7BKVtH4%08mR`E++Li8Z=XqUaDA{mbjihWMRh-H z+~jECj_mQG&&|S*yJe|yW_Qh`VgAgUe|d59TCG~OM`(SqhzshHf6Tb#HO^ z3Oh+gsi(U~a=$)_hd;RZ1+=?b zym;6?fkwW*`0`6!UUWlVdT|)c4`(6A>kEjyB)i2+a~jHf1LlF>9MoDaWiBnXVFI1- zQ5eK0Ele^?PkuVfVf`74?`}Ha{RZr(Sb8$dI0Msun{Wfu!qsr_Ve26YTo)8?51Mc$ z?!=K7aW!qyNLH?1CcjUa2Gi+x>3cnxj_@pOemmd!?XUBd4&sf5dHldmnKEs*6c548 zkJ$o&n`QuwpTBt|#sh6Vbfyx+Zs(0Sp}twy{$d6K{uepUm?#BfID}@o(JoINdUiGy zdEun$*|8}(RV`V!Lj$?y(%g>-Kc+g@DWjxn6Se)oaUDh}lxewlsh_Uz)a9NS8_b)c zq{%pRToU{Ii!W4ItvYJ&zJ1!ypA`U`UMKMKW5d%b^)h{Oc<7mhs|?QUna<|z^yfR% zq{dksnC2+X>i6u`6Lnh1Jv*@V?GH>F_t^eznlC>3>N^z=bZ~>4PJS)svU;*>m<}5^ z`7iBv-DOW9^85+y|LBkr3W6rW%*ud2`Fe}Pa#?(+T}?h+xM-1%n%4UAm5(?7JLz}3 z_>)7N{Ihw>7Ija%_L@fAUZLZ$T6=uc#(R!sPwmgR*RQEXe@{`LeEB2J+M9iKi?0)oeb-^G_Vdex ziK^8-?bUhL5oFl@N3^8*+jPl2Ia@z{^0+#G@v@F)c^X89?ha0KxsLZ4dYDx!Ml^Vu zKERy@{&7eXA8(G%+Ia3+Jo2*j!mzlwIDL%PeUjWkzl3Y?$Un|qqU7j8Sk^I~-R?bj z@Sy74t(ywUQ50Kl{~&MEmi^@g3)Ayyc!aA3Nkh2mLVFu@>;9Rod2;uGWBQx^OvgXZ zbi~(&*(b|k%Y6ClaW(4I5o+~DGghUq4h`$nqaYe%??05P{)-%k3>s{K0})a6$>K&P zCV@R2{pn5|*sk)Rvu)q|S#{!Er1rac1801{7_BRF!D_QaZyU0X;PJySf1Ryhf8uM# zcHus8@#8 zxqq07yJq5g3Kq`vwm)0DVZ90|T^S0tW|CkPPSa9e3cYKVEl@l59zs?SQs&K4%a*TD zQz!kPo_ykQ^~u;DR5;o}_{k&co3UT3GZ!MJRBEXtA6`=yc+YJq1)zFi`1~^pH)xdNc9gBr>e_WjD8e> z@87jqy))_+^~At|>X+aCR2L$mbe_J-r)kXw;&S}9v^shElzQ{+w>58k|HF@NTOKyt zrt$xom`~C)N&Po2nXB_WaLgE6bH>FC&H#i!-;Wu!zTjbi}K#sHm&1GwlxHknxOTW>eq4aDGBSM_8v(zn6qg5&#HNgmg=3iUsw6-byRzf80}`JU-0J-N($9b%Qo!P z;Oy)w5w`ztwv!0Q46sF;wrcFyvF=j_HY_D*tjm<*dZTYjaR1xFSq96u+ji_w_jc%{ z>;C5nlND~;)9BjplyCz`owAumKTcF^034>EY)J647 zzwYXj@BUPmFP_67I!8^LHbbphw+Z}aX8V@*H!PnW!;8{)uGQ)aaKUH+|yxAJshcclAn$`rz9sD&oRfbr0Ir9}A2^uYyI|Z$>|> z9)4l8;@JPk4`WormL1fL+4I%3-zKQ)*v*(am-aKRt54s4LBkw8e5Cqk%_g;f=VsNS zZY4Ek#vHYB`4ZKkny3e#e@~-*aQhlnE5E2Ozn`QQFPyIi^=hY{digDNHQvm?Q&U!3 z4>4H1nB-E>{sJd7XyA!1HIq7>!1Z(N&;hk|`%V=TXI4b)7!d(}SPz|_sOT8AdBp;h zIYjL{cuXBXxLf@(b&8rjcfLA!U-Rf$r!7&mr z|Cv2Q;Vw}%=IbBT;bSKt^roH`PW*4@{eN00NUq?PvEnf+95onA&c&K5TrI1fAJ9_; z*6jdk*sL7bL3q#B4XPn3B~yhk_4G4OYX+~_wy)Z}i_$fjD|x{nIb1x$tfCOE6GlE6 z)uc(k>fzlqe$Uvqe%rFy=dCer_0e-0-76+`NU2GGE5tkRqj9EK;J}-?#-p?S@#mj) z9ns1kXHl#lXFOl^Wevn^T4994MXR@%f3%&_??<0f1N(PZQlhqM-ms>Q`_Z?*s%Y0haT7cccnJ=;sCYgDI#ey-SPj@BmPYX)u?i|g`Gsc`Uf&%>If+Dp^!`NGiRG6eOf}a6N5xr zyR-E1LLM=B%$)e8F7N8C`*d8pQuF6`?`!|P$1kYeYnSTxkt1JIxX)CzYGbyzq((cn zW}8XJ#OF>OhF~~Q#~t|eGpb$l`uhF-ag$VB!VR@|{Sy6MwPtnwd~Dcz>e3aL@M#&t zg)z%Q19r*RyDM@UAGZhv%P^+EEvCZQ(U1ia zVj|VM!*Hj-z!B;sRtLCecH-zE)u95U+vi59sCaZdl=>VM#1?{QR4F-7fm9M~knm)5xeRYRJ%Gx)E*Mv{`3iJBQ@*jbC9|ooew~lb{EB+2LxB2V+(fN&5F2quwWw26J^I4i`rBw!d+(qK z+t+~2GFTjcnK)5Zs#w`=!O;3MeNxBv$d1(^d-ffM0BAaNTUM_8v2HEyTefXe)x&DJw-W6H!m}*=CJYP5 zTewpwcbt3P5$(Eq?b!#^p9@{90if^Jl?zmh?gP}7c%}AiGy`3DM2y<}_`fjXHTCj~!__OVys94Qb+2mB?P+!Ca-{mG$a>5&}cQ)d=-alM<@OLyxObBVSd|JwH^v`tnd6Z}w6npj%paFOzp) zXZu+I-5d&M-+cSM)@fi0yXE6Ob)v6b6+U;I6E;zwefOIVB%KptBUHccEmfaqU(e2eHBc>w}S()tox`2;KR9FheouN~JE-7G;}St> zcE}zgIV^AKyR2R6t}bXP<)_mkz1G48*$9b#bSb4}IAazrS`xSKwUx<#FO&Q4X(`pp zm(uB}lSu-^UcD-j7tc!e{N*GdFq>oz43y%fO3TknkIV7%k+@uz+Pb*k-j%OTj345W z91fmgj86@8bWSpH@?;6CQA3^{G!WLPvS_VZt7&fG-^r&ZZfVUcIT|Dv<|{v@@pMkQ z@ErrY(nigfSMMFO$}Xh6D;fF?K7IAQ8+kZ0Zk z1@UL{Ot^Yk#{B-fl&{x9-gs%S6v-PbS1-kYcFUCni(D-l=p zGgClz`3u)V-`%viCBV)Nn-UAbj)zwTt#m3zaarAV_a;! zl&@G-9_i6qHf`H3;aiqV$2Rv!FgnR=@v$0u3tZD(AbVE1#찜i{Xp30|s~R}v zkiedj>?%3)6q1qee<}T*94dSF?viibc}o7AyGFiz`&kLhmR&02giX7KH3f#*aH=Ak zw7IvdlrC3N=aHIn*&_1<7kPs`(z<0!nKO5;+}p05T#33O(3{ZLN3;Do`x2Yh`LucG zD;y&44tYS*vjoZe7|-T^xt1SGSJTtX9IEhZDZ@T}SZaN8^J^-L!eLOr7?(O!(;=RB)hX zW-|5vqVt~)WN~&G?Bq-*n_PL5gUh2ysB~e&`sB@5lnuf0REUa|tJILaPrf9VzIa2T&YTnuH`ObZ zbE8b2_T^}j*3;R6|LC!!@(c#FAAbBv?rwCqj^?wY(s{6~6bw8e{@?MPK?^)QW|lJl zvrb7X3WdcW7`!|BEo}n4SFZ=;w~W6@*REZ)Ibcmsj!Z6R2Vwio{v#ug0mnwl2d@v7 zG2c#-m78}<<({i$d>K9 z%GOrHqsiFL80#!1~wmjFX_O*g1APGvI6dP_DIW*U8G2Ytb)! z>yD8eAG;&@@xpdtg)A&DAcd2mKv!L6&?Q$s>DaZqypMKu^Ll3a;PWpv^!$a3>Sw_s zrKEO6GkC9@H&y0t-y!vCRYUo1fOZBdH{S`DdLAyit2-6e5v z0Fyv$zdbLNYSfnKxQtSxMokU#!l{Gu#^^CJc*H3E?O9*i(~lqap&a;0Gfo1h&zK=y zyL3T)Wt1l#e@wDv3)0~}+nde<=9M{aWXUQmI`)>ATXd4AAL|F9prUMEze;xOKQDLH zMtR||9i^Ye@M1{Jgw5xS!0NJQhO1W3vDFaw%7$fF6(qxOMLJgzTzZOXkg+ zFY6#3E}Hq9tlzR*#!s9seIMv7f#|c+%B)U&qN1YY<4-=5r=NTbXP`s9mW6lot-!dw1`Y%LjHynW~LYU#4Sp!b|C0*AL3g4hy;vf_|IClK=of07*naRNL*O zMT?fwr%!LtA$guMojAJ&b`ZY0h?Fo$|8OizYhGacvV4&#xz!n#K-c}PJVEUIDVybM z%^%2@LKbZn41VS*0+*gWdkQq_rAgzvZ&@vJ@)VYbF#u*phkZk)Px?+?8#6)vn72Y2 zg;m02BLQLTr%ap5gc&^1=735^?xo|PK^J~}zl?bP3Hf2uIT_HpkrXacQo7yOOEN}X zl*s5QpKvKTP~hu3b1H zPd@jB%vv%-!^A$fhDf+5u~e;HS01R{SfVa}DRrxa$b_+9ORE>(mylwGq}#(oC8T04 zxq9WC)T>xjzWDSbsoN2EycEf=VL0ncd!*urJW1h=b=IYGXIZrPA8FOHg%m1WNP6{r zK-Wo%bkvoeLCRIGp>y7Q=(sefQCc=^+#r2l`UrUDHF&!Cg7l=z#Js^X&`f~9Kp~0L zxGA%L|3x}H@UYz5xto+ORSH5pmVK&rmPhQ<_}-D<3?3^4Hj{J} z3hdxyf#0bonZ_&Qpb`|rz}%)j5@hO6U|ydUdc&nEb1#@IBJH`~8wXIFQ6gITGuOIHW{#ORo)^Hp&NMf0DiHmP!@~ zwoqP@ELl<(mYt24q<67kaz(tOVJyGlLqB+vVZ{`SZ)8 zk31|ZRDtw_dtTI`+K_7sZ!QmmM#DMC5Pj236KWFmgd{w5c9}{&UwRz z4YG6R9vL-qgwBm5v*QFkh`U{Fvia+X$^MwaaC!Q%mnKM19tyRYq({FOBv(N*+h_9P z<*LeS&*sM#NRWnBG^B!jIBve=C{RKI(PY`jP>Al{`(ardQbv}nSR;Ws3&{38%cWYy zGTN|Lo34E&Q;z(mZULoot-EEh*jPhONPmPahJxB=-J*Jb3^LS}4dojbN)KKpQp zy!8H98UAD+$&)jOj(v2`CjGP}wRH*()czb_{$mcz_CM)R=|1tcN*G&K&L?Hnn;(zG z8DT2(K*&9g zuv@|r$@=q2XXBqaaahfo`Ip*#;Dq+K@kkRJ$Hv#+4!ZE${+T>A2y-0NBfSan{p-xj z;&|!OWi+kES%jV!q{q3p; zCkz{pPfz7pU9a%>bc}1;)ycyL)Rt|#6cp5Pw*9bLzj3oZvW7xwx-%X%zipl@D-F9) zX$~ZR!Zuw=$rq!xC`i0@|;^-Q>1S%X;yWDE4 z$HPv+&?ow+kH-DZBB9(y>FdA_*vLS-l^b?xu4mjbf_f^NoUvX>Z+Y>OvpMN|}^WumRYQn_H4s?^!iC>nUj!1sJIw3h>uxz@3 z4k#KYk8kstlvivN)^&n5s^p5Lf%?Xk@NM}m4E=<^6SpK`Y5cJD9gZu=@9o%04H+^_ zEAbJW4f6%s_Vt@q@z`N;^~l4$RaoQh>a^?B8q;lB#_^?`I&jop%mlbFL;dBbA?mSb zN2q8hZXVvXQPmF>_1o;ktIy-3FRAZ78Krq?r|U@c*+YBu_g@QEYn0e#T%`1c;vMr( zzp2|D6l+j=NbpvsG8uV(+w8$udKPiw;0}5G?lw}gbua1H=RwJuC8NOhykyLrMY{Fs zBP+1%)vs%73E8|)nuL{;Z$5ZQ9`4p#GXFMKN);<0&>EHxUh5^#zWy<4q&ce7 zH49^XXo`S9e7E0>%XiY1%iE-%tsnh?RBIOeE#LfeT4qlAKo0L%B}2YmB#USKggNL< zxq(#}ZtK`|X7B#0{5|zI38~ctW_BxhoyGeq8*xcmuEd!1XDP@-ojP`u<^3L&S4Mv> zl`2(~7BCqNX+bX|v%**gmhJMn(=z7OCuBR))jzpkzWnTCDA>hmW#6->&Oxu}A-TJH zX^A*| zF%|2R?Nci{gD$Kf!waAx@PK@Prf#ldC(U)8>o|5*jC>D&z#+})h>qdmOxT);!#h`S zjGh<6n5WaRZ(&_|3uE2*s%IyR)Vy0>E?A&|yz=r((g;_ww`@@p z+*u5)i1YI8H{Zy(PrsJ>)v#O$INU$GifxvtsH>7ado~S&-G=U5Tn(IxRd(N0UQE4iCuP4eOwDH$srjR|8At z$CM`{6wofo%dfsJk3aT^G=RWD9@GMq$t)QNCdePUBn864nGTqC@2{Ya5;c}hhfl$T zd!f`g&xAqCbjl(T5fPFpa{#tCFhGDvtZN$<8Hu_BKZPI1NsWuK(iY0P#Y>fw9v$vM z2WI$;15H$9gkWV(vO$TKAzvnm?*jO zm6TUsdrj`gcGjK^D?|ksmWE+fB@YytOBF3FJN6xv+VvVqp?taA_oa9t z-j!m;f&}!HCCk;2y0vRcaCWoxLRi;T7M!H0fPTKLWXq9LszR}{=*`RW#K5QI;OX1`J%aH(zFHA8cLDHixiR;4Qt71oPx@fy`a>nS}E22jjwW~)*q+rbd7pR zhuMPb!V7Ol3B4pp(F= zK*>Y1g2mP~%j9_|C|=HxL;4SSMV@@Hqs*G}lUxrhAcr^4l|ps8%0r!6$n*){%B1P@ zIfatY3RUIN0gvGrW++xa4$H*f{?ZCsE0-;l{fAFU^Y&e(KUPxm=MI+LTi420Uyqk# zVBCQ0d1dem!=*{B^774?_vFdpBc;zHPsrdQFG(#Z2mL$d7%hk-Pm#n>OJgke$@f3}C^>94)5<-v*nv{<8b2O2kdtj z(47(e{c+@7AmGL;xil8%=*#D2;)GwZCtOhWqfXatKOoK8c9tg|c}VhR%Ovw={(&QN z872183G996k~fD8kf`vZ^2KLkWapvNk|{8^^m}xGv}x8*GJ}WCojfGpj{jPgZ`dm> zTDOttQ+p*gC{*4a`MjLmw_cWPIV}DA_Ll2W5i;qQA7tv^;ESN_(zIPSd9+tonLqU> zdHvl_B)CXvDA|4@O=?%UrA>M(7t6yxe`v)vN`0cg{+@}`L!IT@?|+oO5B1i%8-(qy zSG?auJ7*tY>YMeTIf2pK0uXtE0RISoLCQTkE4?;23@`N|^ZCjG-aq`V)Z_CU>nPlRJBP17& zW129t`a}UgldJC_9I$~NNBGPPVO!TT>LWfbR^sBXX`ae~BZrzNu<{g-RfeD-jx*px z@Dh)9QWrrBb)58xL_;8T1VAJ}#Hm>0#N~*oNm+O819Sof;GzO;>#~T2z z`&WqQ3Cq`BF_yr*f%VJJrj3YRuQc z*?I7|x_LcTz4crduwQZY&X}*&iL>GAuSw(BAE>Rn532Lwr`6}9hr!1G8!8GmQZAm@ zt?sE`SjpYp)w(S^)yCC}b=+A?H)z8g{kt|+KTcbSeYZ$8`w#A?H-Q=8GwRTeb*chj zen-O^Fz-44&+nDgYpYgo-mXr;UiyoVcTsoU`>2XT9c|Tm7@`I}Gepf_woYA%Jg)}z zs0tIo)zz}Ko79TMvs5D}S667+LoHjgQLSD&PvbCurMZW~?pu4KgD*czpG~+pC#Yx< zn5j0l*FEdlYkD`|-O7LN^zJw`Jmnuh*!}64n5(MSL;cj0-;D(~md_2>D?&*+Op@6x^gL8-QWHm zHDuJM>NI$0`TQ9=eB~Be-%C)Bw{N7HJn)p-d+?B&Gi{O%d*z+6uzP<)E&uyl)v8Bd zh3!-I=Z~L&FS@BsJNLoNtuf#I-nicsl*&|(x`otxpM9$?L>jZysi8qqig^FBJb+ah z)vx~mbywpi>d@gMI`XYNb!%OFhw9BYaqpXD@GJ-M@bvTcZT_)&HF@@1Ww zEr++9lk;4fD47dxPmK+|!@%Az-PahxV)8dk;cy#Iy|?&odAEZJ*A( zZ5rce@bebWna1CD3!C`<1PiF_9}gcnqLbS;;|VJPRv^#8$kn>#MlWs75@X%;hRcGc zjqf0>^}6MmsO#klLL#fPU?XcB{nWQfFS^*gJ)gRx#;d^$JoYcm& z6q+b~rN*JKkj%Wx%UYU7xXstD1b`ipk;#R#gyd!^gzpK5ex7dinl%zqs<_msUR4L# zG@kK%eRDei=3FH>Pk!N*$_Hb@rF>`!eVQV=;h_hH=mW$XTl{mwm4>eD+H<+kW{lu z%X{yW{k!Yn+TvizlOI>Ec4&$v*}Eh#;|+PO`=c^_@fs|Rn)A9ZzxlB&>D)_p?mI3y zvE(id?vs~ZeN9SWkN#$Sq{Ln;EZ==KOq$fMCGjv8(+EaIYIJx?n&4E2VD|k?x8`y# z{G7Y^q@ed&b4T&|5*ivJ-CDPmOrh znO8oPi%<51p_+{H+#6$L_^=_84ZADLW=@p-CvM0mzYLIIYza4N)=EBo`+1qQY@_78 zc}Wh(1jeVG*&g^FE46`eLoNZ4jm7VRBar0oypi8$-(zR13*}reE zHfqH_AZZ=*+pNHlxFnlywMl5~oeQ1Kn>Z0JI%!=Lew+Tn_;E!EUb@~q%fYy?ZYQvn zFD;ul(_0-$arB2eiE%JPkTuKxk?e&^OAQ<&v^eQf5DAnJV-cRj5#KJniH9vmB93PD zgfJ{!8K%qTN=HcCLfpA&rBtieOgGQdk`xnG$aK1k~WFX)(3nMv0 z<+TwHOVPZ!SP*myH{@E}P3&prG~hwj;9U9T!i9?vI10-fpZqDmem+v~1wT9dH8~%9 zLvrWJ4&8yPdg<#r_RwNuF3Wmp0OiYKfT6Uo5|}-Y1n0~r(GihanX^ajvXU!Tu)7F1 zVfv?d;ez^`%ki;?Abg-TVI z0@%xDIxe3UE>cqS2K7ARLGuzg6HbAE?$Y^7vUU4ndF#~|<=Lm7l0k!CkagP*OS!_i z|PzgJ?h zydHn;y5?J^_lMsZkB)VdQvH~i1`ozcOv@BA!lafE#MGmlB$oY}OXML15! za(Q!}zLtg@UlN@s&$;+LF~Yz0=A?mt;`(i(=8JItf@Onc=f>{&-*jwPB$if^^O3P7 zrY)m=5>>7~oI8C+PM$m|wZe>Mu|k7kdF)Km2iTQzA8?>4%S98eE0(X6hx1I1>|1si#lX?wg(&N&xJ-~rE0S2_fX{Po@ zToh*Z*we;8Tmhrn>UAY=c960Ls%Dd4NNva(@y}&EWeeuYk=WdFbm2T`uGf^D8DeDL(Q}eBAVF4b+$sGZe@Zf54VNh} zqkQDp3CUNugfwq?kJPSFNv|kwTDMxRUV}z`%q5vK{~sw|y%sd}+exV+1z?mhMppc@ zKo%}tB@z%UO`5lqb}bsovAx@5^6xX{dak0fWchk&)uf(WIDJ%R&zvP&b{~|&#Y6R} zpvvV!cY7Lr8sXXMFQxcTXlC#{<|k^``BFnjST zsaUnPbnet%3OW=sS&z0InL6~3Zj4Len0W3rHGQtz>;I}3CoOf$bb9>vmXyr@?>f>Y&5Yn019$A)Dfw_@u5dv!Q}(xWNm5e6 zav9vYjt=<(P6uFXi@h{BMs|@NzR~5vihI-`p6y?SxCXm z0kC22VtgjVA#7+z=aEY%_Cwj}S&2-@B(>|-mp=U;k^5oL>++Q-$$*B$`yg;h2ke&T zl4tI#CkvMHcqKZ2z+JO+p3GiwSt^t*3hi#1ay6Yl%fL9-B@6I1LIS@tW`qW~qdISL zfU<>frNJY0&IPYRg>uj?ZY%Q_EWoTL0r@~IallR;(6q(2^3P!iCVQ}bP`&~*GH%29 z-9BAJrDAX%Ns!fRA-o}9ZX?Z}4Q0>#6(nzP5R{&-N&p6FTLl#{Q(Zp)klf6iO{&+d zDP?eEZpGYRVf8ays@1F~GynQYmaW^OJJhHPCt>w-mgLQs2kB9p7~p9`y-?v|@<{hu zvT(&(UDE5ZSLCf12g=X0RzO)1v(;D#iwWjPpIuJ94U%|G>ZDV}iWQ|prw+1u)f&C3 z#&(>P4On_h0k7b0%aekHbrga)kXc7uI8)xbeVddoTUrYS40Gad}lQylJ%g{GImExhLzi=JSuJ4wvnZiUXgdkjMLe#`)8(ng5!8i+jNvBbt=leE$@~o^Hxh% z*x=8B?V!jjS0VTZ$R!;2eCx#lviW3`JOJx~*`RS>1q#l4kA_2lydm!l?jqG;+W63g zcqxEw$zwP+-nLC^nX`PWG;Z2VR?QeEogeHkb64+_dvI)U+=$-t)g%ZVXi#KrT~fTH zwNEya^qUY&LZwIdhoF$WLzj;Nyth7rE2~`5wW}p5*TtQS;SIk#mD?R(DPUpzVRl<+ z&$#s0vp@_?|F(1DLOh_b%dQ1SU5?arjsce zQc`~H|Ad6q50mE}YA-4w0yekJvTn}&MPN#^r*!V#Q(hbRpqz(*5Ezt2CVlpn^z6_< znp7(&XAW(0FWs@F#m8QS&HoEhtb1#zQ6{@|xbGo!?rmfjP7$0d{jk)nSw-SwqGjLg z)e=7{7D7d!JoDOT@^G()`ht+M1p{UHh>`N!?4>w^8z4I)F6bp&cbABlD;JNsc>oI~ z2)`ilZXrQQOvEYoX_2I5_WtE<=0;(QbkCD3x3szE9vT1bchbLqKPg+fv~Jj*t38bC zt?s<%=)W(^ZHJo8Tejd1jXK)OA|a$XKj6x`^5qf-9K^wYuEG?*yOR7eae|yWv_~?< zUzGPhT_LNs{RRFNiNuk?=m2xJnrSQ7X(B(5$9embh2_2XKa|CjKbDN8+sOS-mXZ88 zg7y0^~Hw3z8zrNt5q41DmB(mAcZZMH8$K;bi{K3UV^? zh7>MUUOKdDB3mPJNi$q-G-uKmvixFUS@G=)5*(OC?m{1R_1r%BYw>Cs+PkGxxx0(> zc(u1Y)4#jEQzCr#8Y$7}0eSYBfl@SIuw=OwE{DFICqteehAmHA`t$WK^4>3NCH}Gd zB{TXKTYbK0OB&gBZJ$oMnLYx%fz_EX2q&x7td&O|eOPkpHw zPH!CVxKNPV!sumXG)5~8GQEAjox*!#lGMT@9{!Za3Nfcno|0onk4soB*wTgb7l&Ko zIcQL`VN=<+dxu=hge?S|!5{k0RLK$qodG(SBJFs8f7w#8rW`zWM#}f>CFMgfgM)rS zK+ZyvBX1G;bLCMfSjZUZ;mwa-2fQE&xD7lL(mwUl2h#8!7}>#*#46REk(yyO^_lAH z*RsmgBS&zvTp^uJxoQnyFaDTp-L?%&jw1UH9g{MlB@w2i#$ETy57k@hPJxj#1muv< zex4yYa4R8QV2(WU&X+SVh?u?Sj9CI@|2w>i@LV!Ua*g z_aBy5znX!|YN~6qw}jaJ!G~p6jk>aP??I{5<9>Pd^-&TVkM*C-uyNn8y-cmrLU7la zR0?Y(+m9d7N53)!wUbeRk;kQ5(re(0Qm^%WQWVElwey4SF7#6UARh$b=wAaUcfR-n zZ|U2QZKe8vW9Z`xpl7>debdihKJf)t?<2daPWKJyN6td$W!LV#GJNRsI+^7$UocYg zzm6l1JciAqZ**}nF_?_BkkZA?)8n}yK?koxLkqoK4bnm8U7LW_E5&C z%NKEMxtSISm_9d@nloj(iHVR%vo;UNPY-sNPTN*X7?eft!MXao8q|gK2|W?q@@H}1 zx%aSiZrfhBed?41U;-5y!}KY|x8X_4!H&$(i{+rUcX=3HqT@F$X1P8z=`tg{W}SxkGv55I7KR4wa;2aN zPRF#Yhk~x#&$v`ZqfQO)ZY~X7acw-V4sb8fezR2-M!s!r-X;JrVJ|#)0sLtXlLw4L z6)%~XC-;_%moPZ9WNgsIT;FV#Ny|<+w>n>gqfJ{L#-Sd4e2~n=Dox|N@6yY&T;*Z? z**I2l^H%7$`Y;XY<&R@L3O1Yxo;q*HXdQKGtLdA$2#?lV90c~9&EhydJ--zYSI5yu^*x_5z_Z zv~T-5saT_zjQ;3(>CqF{ALq;{moA>gmJWtu0)g%dR`?Sg1go1+3g!)uCCk^8-{-Cd z&bwsYnpHA*;A3+C6T@W0AXtFqDFat3)GD;PUP6JYf6y=&Xll_bf7H*7kcUo=Erf>2VWYGvF8*+PzDvSWn;f};RP z=NBwz=4p6OFP=(6*2yL@HJEIav<0%j+HxBzoeV-E^J$LC>qpoI7gu`3x0VM9&&IQRY4)2>rehys^L2-5Uk)U8|J2~ayev#xew&tX=(wax zIKWDOr}+oT*{Eryv1!vrvSrJ9oUFK@!C4+mD{PCqzv<6kynsu#PU?Zl-!!)y&vud- zu}3hVPk$)?8HhB>MVh1Xw1=g?&Cd>&ww~IA@n=-QH#&RDQ?JHvnOPcJXzEfP|znOJ6}raaxr)2C&} z&K)oxS{&!~L$pa!3y*O*5VEe=f%qa68;+4aZ6mO(#B_=87H)bxZ8-?vGY`*io1c?j zJbPcRY&kTDM8!KBk56v{l`~Hh*H}+8@5BQNVvh0}nwU`jI&rn}?9+xP#x^+EZ=04l zSh%)4P8e*)&hx#AVXOz@Xd#-Xg=Kv`zpaf38=lrDayA_-IvXB6TD^J=^cOA*)C!Vx zo@s^ePdYo(h>p5~S=LPqOs?f}M`BA;hgUMMX_u3XZhc{6DbE&4a! z7>Q%DiROj<#UOQc;$N zGj9E%inzx3m&aO4y-KB}a1N1Kv**c)jk{#l%z09~bbk!3G1w9^TMx{RRv!<+;%O#W z+pAE%lr-=2g#0?@b;*a7{a1%RCXc-H7r0Sr%Z+hyFe#2SP1|*qeoL3h<3nDP$L_yJ zmdu+bpC5w#le?g?4B_I)!L|B`wk^&zoLahcHO}2OdCDO&IzkW5jB^cJA^VSCw9(V` z8Hlz~v3vzNc=#|*UPVie?8cP6XWx~snF5%v_L1KC+zz}Hg@V);f{lod9nWbn|MF$a z zK<~Celr3FGvX>~Q^&V=4g=v2Dr!1Zs+xqu5klMI}OQ&gIUL_WLt=Vv|LICs$JYDK& zWdtoi_XOkR+qEi(0U}#=ZdtLx;NeeFFVIr*(!72LfLp#xj^@eX#`L_n2aQPz<7(}Y z(9&|~&>^W@5vM>>NsH9gl?PVj3-$%(;jRkevSq_+;Pr(&L0a;$X$YTY*}FaVqGT&o zUmrg$R|6*m-YV<1pI@;?16j9Xt}I-(O0LHjmJx5hCqsukBOA6Il^9I)TXcR>Vg=HI z%M~bDPTqg#bqto-q;b2Rvf!&=S-W|&9Dp9uh;a+CE!G?{mDFw3O)4h9L^yEBQ#eFk zAN`5Un>$aI;qtc<73<5f=h|r771v^;Wb`M0=#worj~Y;LEm|JBDYTddSG|5)8B!Ma zwYa+WD)pPm*CAoJgC+g?deV&i!Q;z!H)<%~|NM&_KYl_gSE|Ip1z4n#K{V!f_9sdI za%Z#B&{Gr&QiFqY;9In4k&JwEwBE=4?f2iI?HQ&|)vaE=N{*j6CTrHMk>f{?X(ha7 zxX~;ON1^iK=pJEs>kJb8$RPh-#aTT9ZxwS(r%0Z{ANQ+y9PU!YWhvx3iby)zM$GrL zI^w~PCtJ{wU4gL@-lXW{&)d!0ncfNxmRA@?BSBcf{346u?SNNN73PzHNtIF<`s#7fvF? zG`^ue==gBv$)l=|KQRbG3I?KdaIS2mXvwnj$bhnLhy{z3)bGq2iXl?(?iTvb9RbS0 zRcjdyqTouH&BIwdbKxXYixx%bgTDZ2jXP5K=XQX}`BM2RCLR6BbA@1=pxY`Rxfcz;fB2< zM~=xJC;|QP=QNox;TIV+XrR=qQ`gmbo5qVa43}72c8`+2$vKNpa&i0#vvByNOHx7b z(t-iXY{Ts{4i>%6ho6Trj+RNXu>+HZW8o)>l+1@s=NZMqVYtP?8P~phK24)%*^&-C zgu!hcJIHwEm)y6_!^ZLaw*I&K^ahuH#GQMcHEPz7(WBmlIa%m-Kq#Z*XiJ zU&5CS=hMb99AZKuVM`*kL^15y!t65~^GYrslPdjw&ERJSr1XQD2r0R;Bo~$lA?cel zjq}|Z%bP#xN__=W2abLMf;q#Ap$^*JdymYWwLqSD{84P@`zQRu^#bX!(>d`eux86vp73pGhTW-ZZT!W z=_g)x_WxZX1(J>giTlsG;FCC6=Cxo9 zb0;=3XQy@fa)d-i#YoALC2Vy4w2W>&ooTFJYM-9vw0QXkhb`FANw^XbhHaew!AZYZ ztaA!m{yNV(<~Q-LTBRCvS+>eqC{%fKsiTuO2*%G1$CWRAFJHa{%ahr$QeZk9#`*Uh z`9Eo{q>ZOCYtk6$|K(5Fo%QW}PYD~6)sWT{)X58cX9pJs;c&-+olyKdV*=-c!*zB{cJ z08CuwY2z_1^@At`SX_)iphI*#XC5|=&67AdVR^!~@zU$bt-WJVK&4Irovokrf`2O< z@|#D4yfxB)+~>B10us^?)7%T*rE@3wXw0YDU`PN4e>z?*shN&U3dUn&bT-I9oGBhS z=t=3=xuc94HA-JaO6iqwolKM(YFP$OO&ZU*gh3`v5eMBdnMPvco`K;Y+BOf$2F)CY z;~+t1za3}uurTPJQU-7KJzwvB*f1acZ{@>#6L|)9qQ{cTNV-F7}jh*%F zd}kbIU*Q1)pDZK!*LRyLh%JPm%vt3et}dtJfXAzmui+|NfKRncDT%XO7C8M_ADn@o zf^brzR0;5->EjrO;r=+zZYk9Ha=zLE1ugz#IQt44&prvy`R)l{r*@TrEak0?It>Cn z^YDcApZ>Px%$+-z)D9^j5$LC_V>vyYE~^)R&v^Xy&8vj_|A=^$bu3njDTd#U1(>}c6%<|Di!FI`xUAu82 zCRBgZ-&=eg=;Ox)%15a2CR}G<#y*PwEFOINTV_IJTBq}cvyOM`xav&mey90oZlyT! zbO-(mFTuFuBcxO@IePSn&P{Jkd%<0o)ua5r(BoLh@|b=Dz+9EZaq{L(3a-9|S6?u# zzpXp_eW#vj#}F9ZyqN%jK7A;;Rr9-O9efRay`?(nTar@dSTuRbRhL8SSE?t2| zEBNuoF0$9LLwlutxytg?r<1vqpvzY}G*s&&y#M|fRMasoY^0SCOqUjBBLjFk>u2Hl z!ioMi?kSv6uTiZsw0Di+qC~JDIp|=Wjn8j4BiTU5(+S(NY@Ybo@8sY)Kzn^9UQXP+ zMoOcn#V?J?Q%t7uMqLELHhUSKM}*t9b&I|oj_)bu&+tpw{)vUMARD>aUrlW2zB*GU zy@GI*i|H-TFkfGe>6p*|f+McZa*{SY5d^C0m6+6&!wJjzZs}m_$(f%GOU;v=f0atG z!M}H(?yJ1P^BnZN!6!ak@#G#fm)-fz`nB!M;_EG~^}7PwCFgJ}Wxo8zqNg`pY+UD) zf(V6L3Uqc$${B9oEe^!Vt|-vY`a5}-Ww!n{y|<^0lU&^<$pc3bY#PGj;P3o)=b^_; zFQc$KLgI@PexmQ~y^;1=vcQ@vXsR9PbUTj9#g{H!(rsDO3h;Qxh2;-#uo>pB^Oslr zh36~YZS_Pnio&J4Q>V_zlqu6>^n0)2h+$qs1TfrP&D+R~$>U|hXK%<%*uQ4+v!DXv zqAtmV@$buSsetP~M@X^!x!gv4>esI?p=Cm36;3IT5l9HH4CSjtE`O#=j)}b`XBtn! zv(hf(6ahOu!n4bF-f*(%8Rz`Db5gu$5!?)y)rG$iHfZ(DpAN{0pM`7li5 zx?pHFZ{N9Ju0Y}NPC0M1wPo>@r!J$fK=8O%r|Z&$IpMQ@^&C3J5Vv*u zAfwCt)TuM4WfRWbR;l`p&R-XlD*_-$3*4;r;^PaXf4A0B^S}wIQMS0OT{>G{|9FB- znYCDIR0v^f(n>HaM4sGvq+8d{GJX0?oM31#-1p%iYa6()0y3?Wz`hvBS9(u>!ed27 z;`-aKzaERLkHcUkuc=#`+*T^n*}OTJaG#LRtFduX9M%kpH{&MaLk9NFeZI;__}1CH z%nXfvTzddN$PLI)B3cynx_#&2+NkunXfI2)6ZMJHlDXIZ+?Vl zX&1raGLlaC=kE9cL@CZKnAW>X5+6}4Jnyj+eOZTemUj|E{C^`1rmtMNvaDIT z6!j4K8`YKzzde^>Es*>s6WGdN328ewbEtho%p)y%D1wUwH<-yt6?C+5nT zE6EJrO)FP=i^uu%7bF{2uxMDxoAX8+g7BmPbt3G5LpU~_eOlNSZ(VEXm*}jYZ6o9( z=Ami950}#dt`0MPwoLADB%&Ylbi(u&&Ve(7M;~R=c;d{8AU>Yq{Pv^+^SU*?#US}7 zS6w*1#K*_$h=lJiEGKT3UwFdlI&LRz*r0**#dYaTnlzJ2l`7+g+(J?SN7HlQ_Da^7 zb+!*@Vae$>Z?332s=-E2^^@={p8h6g{4@&%WG}r3gJ%@1w@I#&k}q#Qoi_nf)0KmH zFfQTg_rwsH{>`8A(b!*Pa8KOD-KnoUKI~n&r}^DFK6ltNp#VJ2!gRchrQbt+q@g{y>;Y&>BkZ?d$#PjVXc7NixU;YUwl#8v}pswOVuQQfqaP_qZ=nP_|v{OR~O2b z33Z3q7hCc6+t<@JSmMEVD{0zqO=GPwY#BMRF?)l__z9>FDsa-$Bo*N(ywK`M0DHx-+>Hj_7Pl{!g}wSBg$rw=RM}w#Ejt=IpMlUQ=fyX+P2|CfSt!iD zayju%Ih&V-Px@F#I;ECYHf?fGCrkzYF?EtO1vxPh6TE9aOB#n?23_(X20f0@9*%jj z4`Cj*Gj-xOm+RdLHct=qOs^aTWk6Eidwv)WU^}Wa8w> z^1%H)q;#3GxN5kqRH;%G?Y*=VD^^q=d*taVwgc;IdR~N7v0{1jDf)ttTk6Q->ntb8 zki;>}_6N=Yre&BZH?737J&_Mw^!0#aadN`=PrQ@V7B#M9ns$^F6AJPyS9;>iJh>GU z85yNl1hvU#mk?>|fy84x_cw8+pNq~KOB8@YPiRPpeE8n`LfyE(y%@ z^%~TdvSrIjmCBWHBCRBhYT<&q;2bCqp(l0Bi+!`rCuwNvz6c{Vj`5QS1w_VQAk4}V zii1|)F1Q&_H#|c$B0|Qc{q0J%8p#KfzmqP#2FS+0$4P~z9p$M3y(D+G05tj=i4wM} z0L5^TNw0?JFE9aX{lS63+Mnu~;D!MN>h z{5Rjr2cP~b{d={Os?{pu2EI^q1mz?rBt({-GXTQnUoc>312(Ct5l38G55&*D+ZitX z*pN72GmQDf#9Y_Y){el&r8hW)S69lTOpjix7bZ)}jYG&|65NrzM!fj4L zfmvY@Fq>3_m9x;$GPr`bsmz`|SALu@O%CllBl(I3NF@k-x%1}5Er7YOMNv=+7A&Co zhxDX1!SlFtVa19Aa|=Bo^zW%c}KI{rC3%m^$~^NQpI41n>t)FvS=0jpQh zLc=-Pa3UrMJdh1~57~2MbJqd+iS-cx-p|G-=n#+vZ3^{8T4jb|{mfa+axdEo%WAsJ zq!_W?u*|l;>@#UB-Av(?*lAfonDLl~WwP)3C7B;@aS6lHq#$N%_q6MPrTog;^WCzd+{&z}%kW1c804p3tF9LHs~J1QUrGCzi{K8gL8o{)f_^a=fBgm%?H7%Y}qonG^#KJ@9a9vlqQ{iflv1AK{)aDnY3-w z8mmc_bXf@9vkpAJlf-e;U-SxN>y|C08TyQ&&p#(8aK!%5AzUNAV}~qTzD7R({0Avh zrVu8+HKYhu5OZMCnH%Fru_8rbOC^^Sg0MivSeqZ~*ZJ-&x%GGYrN_HlD6owxEkK#J z2=;EB%uR ~hq!dh53Bq(5$@`f2(g z4+;euma9Q>W=!Qj?T0j3G6hIp9%Bm1jz$!(89ysJNm^WN)wC+^LEnwLxHkb-tQ5$b zA7w9sfiNFd5Ay0g>|7WSH*MM~c@V}uX$mT-rLU!hr3)J~8y27IH*S>WD^|(HixIlK zhYlZ=)39@%HA@DRHCncA*@$EXa2sAuITyYk@7bh&{puJ1W3-Y~?%X*crDoOFtg>^W zLS9H{VRRChnI%L>&FZD3K!GsSk3iw>3d)fa4Kz1Mghq=_go6!dAlZX*0w%wsAw<9! z)v;r;dif^Ve>hq$gzq5g5}3OP?ndXOP?=zhKUnJ24MW4p17YqeI>ejkAWNf>;12>8 zJJQTpVXz&Z29U-d(vR>y6RR8u!IRt%?#L51H=VbM|jiBXC zIeF|9ur(_Kg^E=|d9%o7@IlwkUGyNtj+kx4SNZIq=quck!U|M`W%-I}*uihvvQ;*2 z+$77FuaB!KKrZrAlghC0Q{z<={P_Bm4D|L9XthT+InWJkY z_XuMerz5WXqwWA#H|)w02iA+QlX2wmVXTatmNRG0$$GXU z-34J%k${OvoP>r#07KoLJQpVkQAd+zk-}G@g5@!G+$|l@hFi620e;G_>&wY2DS)^o z*wUJHShHrB+{iEm`X&qI2$b6c(N}Xq$!&$495cY*6qMQbagCqC496M@#9SSqK_<3q zwn?r;@<}DsK(u}G7PkzF7cZ_S86``FVA2t+Z<}1ZcAZ3AiPB@!HPk_7Sey*PrBMy) z*T#*W4Wu@NPHuTQjh`jQ-?tR00)c>T!GfjI9(R40D3}`qItEQtg41yYAOkwxUJvz^K~FrX2US)eJCc}~SiQPH zL4Zr#fmws}fS<*+RL7-f=1NEOCwEbk}hYC}9E?4ZDG*pIw`CFSK{mLLqOfpY514OzeIgs8R4oanpX@4K%n z+Ct)t#~t9~#dh${y0Cs+!C;3wb=sfuQ19Nj)4aJfZPr9D<5#KJ2$FnmSV_x)jyQ)d zHvWpuodUG&(9y!p~en2Wx11;^bLeDZCQ{aj+_;InJzZYYvol*71}{ydf* zv&3bR+V#pouqlq!8w~u|P9aUMT#3}HRGeYbk>=j22DQ?GRai{Nwlj zg9qiuAAgef-We^`Yg9%j+D@7^?FpeEo0KY11f3}?w}Hvi|2J`cm4mc zcNPFz7tJ3Zy1V;GMMM+>c`Xnu1br>^wO+fsMeOc)_0?BJvBfR~1APs!0Z|E2y1R}# z;Q4<)v(Mf=cU*HI`hORm`_<;m?Ck99?5vL84Tqn}+FDGpO!*paXvtBy3l`4SZ;9Hj zLvu8NoH||oVI90;qJE{VW9L03gl=O8?AKjt)uz^@X)|jsCJOFh#AqyXTUrPZ%lZpk z^M+s0NHu6s*FJf8v~~nlR2$R1e>137K_F56m=Pm%vs{KSz#!1DNkePVGED}fOT`Nq z2onAsVZWVbv1w^7#GlI9Z@>R0zOht%W0}nqZS1!54sy=cLBez`hYbTYJ6ae>BcIGk z54q7sZtr5N`4;;KOEd^{GiJ@U7oLCKu9Cr<4o=pLw5=vAwXK!v+I|~NS{k%c|Jp-s zWiW$pmy*$==s`RH;R4uX=>0n7mmyka0##i!v_eOLBgiutmX$`4BbnFg2k%E4S-PAA z1bBhqNuBCxlC)fc)-oM<)U-)mOJA0*_AgamLYu24Na3}*gvN@}R>5mFY(R4ucu|?s z)g@SRcFr7~ays7_K#w`*XnXzjH|_Al|L!#Jyi0X2ZV3IRlqL*1EbH1?cJW0QO8c{m z>Jc>8%d}hWw~5QmDwMWeJ2w|>DoD$buDWWayjFGfmsM&{y55^jl9qMD94246haGq9 zEFM}-LQ_>2&e2k>ksyFzwp`cBf?aiKEUH!0`4Zp#VBdfHjaAin^`gA|wr<_pc`JP& zKl;wS`1E6Cj1|^T;@}CPGx*IFF|=&a!gkwj7bjNF@DKXK&@$$ypKaj4=k0?bZ@Ulg zjyvq2LqY1RBWlnTQ^7E8wHO6Q^_J00=*tl!#CXJ5)E?Rd5f&6uLAgk{j9mx_8DjDo z87!$_BcbJ$l{n#1-pLSVBw;|}-t$)ZY}IPm2-TPTmQyb=5Ck*?L!?}vU6c1}G(=J} zD$hj=m%H#VapDxa{oY6Hg_mBj2k*OG23$Kj%&1%OLr4qLS&%%3qPjX>Q3J~>AAIhV8uTj&^XL{oQRhOC@#d)%Tf!<4VOs_d`BVnh!tHpcYC59z|^ny^Z^MBC3NIA#2VI^5mKm>h=mJ+ zLMaJp)E2bumzzWTn<&R{8G${;7464k2Q3WH! zmXXE{Yx@<_)Ip~VsB2d1s9me1nfD?ITAI-=m}6gm^|9S~Yd`yG>=d%Md*tEtm}8DL zmJuV3axEI{Jgdw<&?d9L!~5)Rx7~h+FKIEUfUa3ei8e6U*C5>bt`pU|W^HLYG-LGR zrzK4K>|6r}OD_zNbLY>uH{N{9{{8Gr)=Wmyop$Wt8qN+K+H1nXOc5d7dx*|LS)zee zOPR`-P*9ul`_8vW2l4*P;UnyfbI-JKKmKg@+;g*L!A+#aWGSzz>cQkkOFkkB$EQe( ze(7bvGZZ*!5t3N1feJcdl2opY1P=+hEDr?11CpUXGl-K=d8j$=KnkX#|MY+O6|f~A z=k25qA1O*f_18oax}gpsFRtJjZCW+aNehi3YwhTxc9j;fk-h!)Q0vjNyBl>v`R@`#g01i2(L2}1!x1>W$780_3PIO_Y$PBy;`E>4<@eFlg6c`G`V5& z$u1YhLY86>k~x(VO)Qg)FTNaZj}Lg>W=#G?YNIsk-J`P|_O~w9I<2K|Ppw<8t_w*R zJ)%iNm~WQ!h8w6S)VMf*9mhrBqs0;mA~FIpH7Ke9PdxspU3Jyv>Oe3T{S(q)jB8ge zv6r5H)cz}N@{ezxVLP;LVgv5!Z>OGnqIK!Ct@YSLXB{}fAVG(KlaV(E53@UO>gR;d zcTs?UgcyAw5`!yBAT>;hd_qnIr=}BtYp=cb7Q@_M zYfm-YjzA4k=Ls#s3fw!SGk@|#mBmrZz=~(Z79-<^?#DX<5OD!O9TbC4{t+Y0WIT%U zVSpzo?FgxSuQs=E1=5HKtXWHKC=Vtet&I}FJjpNQshI{d6VXRGsmxk5mtdt1w9U3n zwZyTL4S4Epd+@>k*v&Wh*9=30l$VzsW2x|=_<{n$YAs3XacdNeJqs7q8=4{)aHv|U zOxnIo2DBKJ_3ziu>NKctr=E10oq75x(sI<2=B0^d-?gRXl5IsE0bd*U-g~dlAW>K6 z))tZ$*dDaK%|YZFsc~|V}mT@8LYuo;GxvdgNTKh{%Q?O@IAsxmQO2d%bp`vSk~2hvS=Wtcf&w`tBC{g zl-~wTkl;07Jr3ZWRz=~wErC#xP=5Bwnnq39-P*6P%$Y{wthOYOVgzWw@J z+i(B~%xwYMH2WL>NTDEke z5U2NOpAY-cF2D6rd;0Z{YdsAmyy8v4l|uP;YVxL zvXwJun0b_nOP9{xIf-A`K#gJtCPd?G*Fz7w#b~f%_Seb07*v=cg_$(= z5Y!bo{5WBf25$M)h5^Nn7n0`tH??jgqn|;zPe1>LU3&K6Vs6!?nv=ngW?~ShSiN^9 zdKf|hU4?<5w3u$}kg-7WhC$F>$u}@>)Q}K)kiIdx%&J3xfwwM48z?7)Fi25$z!Q1v zt1m5t^!!CoTqj({I+T(+7GVcsZR>Vf`wC*K86^4(Y=HWf3JK0LLEt` z^s&CYPsa9GAwH=?U*RjW#X7R-s8_$bV$1%aY*XvnwTqp6>bbIs_|^{T)7#a04}&uoCxrYmc~hnFO*F@ zO`@C&c|{Z{TL33;HSq@3iDk@&4Qg4h1NN3p$j{Oi^azs_@}YoD2+#ma(v`J5Oh}~( z$|hu^550p|MIPZ1^e4vU3QlIsnq|)ne8&1;)6b4M`dGXD)|<3tf4gj3QHe`Fp_ous z5`bPNG?P4 z%?E+{OMU54zt3(vx(d+~CiCdn>=H>tMN2R%xq(fA7PyJ9Dszc zfk*%Sh-P=SHJFHrc?YKAv@;1)U4tMLQ)gWpNFV@9Gp0}1Hr5VVA9CE0SIjTRr_9IH zI4~W`MA-Oo^pCQhzS4HvwT-k?i$xFYn90Irz)6x2JU7*!mkRui6xF-eV|U zm`v(P5qs7TAk2^qX$v?;!Ko3{%1N*$-OpbYlNTa0x`RDPr&z@an8`f{=4wQI~ut|-}`@#hBjCAnbci-B(Z@gh6 zzWPoAK{c&+j})x5BvM)ih~5Y3fdOxxx6+0h=_!0j);GL&yg20wQ&NAshCzHS2z`o! z)``BWw4j{4)ax_b!=P!Jyp@)108N*rhLzqR8!lhM;u{0LC*p5rAQ|m)RwTm1u8fEY zX)xIJ(x!C>d-I(S+*ScqJvfapB$ML65xn7us=jtf#XO*k7$WX6pLIjsbCwmKWBhs2 zBz-q>w05QpvEAjjfZY^?IDT+Xn(*VEKD2d&O{$4-=&JvUve@3S^Un6suuolmPN!JC za=Cr}$p_Z+uz%XM7oTCZt5apEm^E)MtLXKMWYi{IGHIzmjzAq*$ICY)ld(R`Hc6PMht$U8l7Yie?=gmA zTN#7w`t=N0v@sR`*_nULDyFZk?T|RVnl^1({FG-~h~TiujG6llu!uB^mH+pnf3?n? z8q0clk=GHo<1lR(5QXCys8jq!#PCd+t_@JWo;Nh&=}4=e3w41u7{C$45awLabfHc# zpb&qj6iUwwk1}Fi6H?Kdey%cRyP9BGTF=rG35PIz)y)MR>JZ%dwrShgCd!IGo>xE- zn-b{1Sl>6-K}3YyXsV$6RZCAt|KD|&oimp<$U9wzcnmN(1%j9-Fs`a)){dRFm-YF0 z38>Ih0fZP6=eS0=Qq~e_30{8Xb-VW3{;Jb6n?cB0R~uK=h>?L|1Izb$+8ZuR~DKT06VL_@VJ5P?j$dtVdHJjb;L& zkGJ$!)kWn{w=&erE9QUvo{3H@I|wx586u|*8R=SkFKx4D&UgDYW#qWvTAUE^zgWhh z)2Gd}+Q~sXXI`-`@#^?Z^11jf^~!ew&d#16IKU2+4+{i>IM(nzbqtAVbC#R>9P66; zZgbvmno!_OB~-6zlg9S?D=$gSJ>OrHkr%mBFY0IS4|>wJNo(To9H2>pEGui~!%sNN zz8dz9b?w~VzkmM47qSNJWmjF@&$|AtYsgrC0bG88?{r8yXJLbPh5WP4mKAl62+#H* z3U{clIgiZ*}MaZ2)yZk-qy1oWFTceZ9w^?bJkU5I6rQu>vVEVGC(IQl>`Nc_-GmoM?Z<0{p1to#Q5<){Ur{Tj#_-ocw83@>)j;ap8 zL;%H(Ku2OW#=hSdUUYpP26@451F-iF zrnV?v$IFiloPb7G80};$V2s&0GRx&tG+{<5@IV>(y4|^S87Qq3;?km zDxhc~iDR}IFIu?BjaSJa6-U`Kq$Q!BWPhLi2|Vy4kQW00cuB3N!#Pe=lH|kg7q;bP zg)CamdRM@m+7zjA`t%vL+b-K!(`I#rQwh}I20=U-9951X;H|bLe|$#kgQKt|3m4e4 z&%S44r_2q@Wa2j)l$YaCCApG72E?w36%IFucG%ACW4sq2dJ&ID3%NnUf%7Mp5rto< zzzw|h9V13UtOH{LunRx&Jn^+uXn0c;zmznLw77cpD)JLj-=vy=ij!?-n%G9_7|Y81xVdhM2a1hq6R3Kl*M)x(XPjwiY4+G7kJxd? z9cu#zJ`=3)1>gXbEanJkJB^hc!nN1kpe=UC+7thI(r&!rS~*N<TG--w#m9zgA*`*}6jLG^uIWKmPs5Uu>tHuqc*RL>T)p_KFHGDL`>*&gk&uzg`Ab zY!M=FwrH&r^Cq6Flo1lhNo7!;Yi@MUm9@m-LtnL5-uO&urBd9WeZ9;isk7>XprHoo zQF;0PXPoG)apyOwaNs^L5JmQYwjys5CN65)88c?cD`8iyO>^)G08^1Lbr+~YPO%e* zr-J`xugAvmBeQDLrj0vzS|$&YB^_qWoMxT2uO}xwl?8KP2|Qu|9}!+^wH}2oUBM*= zo*II+^=X*~gucgIVq+u}uv-ZHVH3gt175jD{XguDC+wZ~N9djB8iXosMO?p*B?aN! z@g8Udyn4_(MNHBHV+hvXhzSh-`hj2iw;?o&g2CJkk#re%`g-;?=abQhegg|?^Wlh=LDnvg&A zJ7eYyZQ*^*dL6v0c6sQG7{HF8qF-jeZ~-9*w%KS_nbHbyzGCTOJN2~dY}~Z@(lQ7~ z49+4JCUc=J@CeG3lOo~P2YL7g%Ypi*NYWaJPPpp5^Bdj!X6k#Nds!qyj>2sBaKZ)XWkI*||N^ zbU9*fnxUaGqwzEh)$pOR6#ik{;o3AlEuwI9MuDV-T2NCXrz1oK-88)%v!q+-{&%MR^W;P2y{(kC-mTNJ1YWpM?E@~D zL9oQdK}l#6-t4sfPPoCb330uCDmOy|FTM?2z|c2v%~=3JSW&&C%B)hOo>gBd7rCrC zgI@*?mEp5n2^6I?P%(Qiqb+`=#6Z$lEc5{#z-2*<_^D40)_ng^N1XAng%C!XjkZ#< zDr&y@7D}Cy^~8H*MXAB(=pdeL+iw$KR#4&szLWR5^{Z{_v~e<;tE`#)5-~OhCz$ip z;4gwIjM4Hyi4!UbW9u|9SE(XHEv2d3teyS*%g0u$P8InikY5ckc^EO?4!sQ`hz;^t zQMGnctFk@_RnRTMAX>gkGDHKC#$u6JJPm6%H;VTnhk^fA#?dKqyR}JlWc5%OEag^9u~7q$_TK z#;9Y&L(Y68GRQZ>;*@r!5Mj>9$!D~UBjSM}MzAr#sJAH|~_GjIiLoK#iPQZ@n)?*m|poMo{S5}>&`oE@cTpTrkn4w>SBsDw4a|f7S>gnwS72rn6+r#&i3rO zqut&AQtQ@jAK&8(5VA4tVm2yUGWtUah5Qhhvk1I)wMI@KQG9YlV7Mh-}ft^l5XHkbHo~Q9Y%0XA$YtGd<2ct%fw?$IF)@xW> zXUVs+M)ksJV3WrEW^-3aJ*&ZY)Q`Vfs}>EduG|=}mxsvlzfQ98I>~a)S~(wT-dG2> zw2*Iqe2%NbtE5R2rdX*;)vR&dD)!ZPKiZu6=~heUAZ*v6wfq27QaZf(NrfsrkC*Z2 zv}tqYlc1V)-m#S!2@*L|RsGiwSg4=-`Xju_XSouD2Fr-kG?Kv=msMBe0O=Ko^QH;R?6RFf{BPGQ@zH zaeKZ_JIOp1-I=WhdZK-zFu_=@1$QxUAt5>s< z@En$orcRn}zy3bkG#Q>WSIpZ;KFRL^bOr^({Kv6T^B;N7*N`r~SC4;(XQqWvm9QAW$d?YD1d+qP{Y zz9X0OT6P#WPTR0E655h2AyPP&B1!PhkQdOYqS4aUww<({8i3I@AV7tQk)XvSMm)hm z;h+kj5-XM zVhjpf8Yy0A3IAa>#Gc+JQh$E*!7!ik#&zJ-qJrTXJ%A37ofv^zfc?8j1M`h_Kl1PPz|E)GcG|A==p7H)J3jZVzpj!H(5$@Oa?eYdO*#9ji|x8g<)o=bWgS4Z!Un(bp8eyL+cUqPcGfXkF_>u; z>$LMWl~QFxYw7X&B{_k>CadYvwqj+%X3kipb-rK}7TH;5hK0C^u*21A>t>Dpd|4?e4+34;%XC<95wm1MGnAJIHh8D*N!=Pwnz^ueY(XKW^8=?AMv4?fSc}wsTHB z*lK7S@>=l=ghMA|5dl{3z;{;-g&OF3!LmdS;B zy7tBwHss`qzW_wg>Vu`}{!pO6mWEb?v^p&HepHn>1~e zJvQ(TJK~VNtd8n2ZsZU<>f}4@!Kd!EGmh_RRZ6e7PlkM9J^Nf{_dR*H1dWy|Ka}T% zs+Ov%*9F0V`Vg}4941l@KGTw7{2nEFnv9IEQ>PKx;ji(t>jjVEulXJmo zV%fwDxXC>Y^#~Zu{rl#8n|&lIMo9sPffTt%eFb7^Fs^^`^3}6P59@PqZ~yh!cVF9O z=bd$!N;5a^Kur##A#G0LFGh7XOOpJMm$N|HL4PA~7|$;ges{m&;Z5>Gcu4+*6bVj} z;Wo$La1=9s!{eH`@?mSAIykC0EPGKcu353nUU}{{OSIZm&NGJDYlH5z@uQx#(yzT` zPY(LjR;({$SKi*wZolj>YtwOW8#(4J>+!d?5+p{sK=8=Z_t}uQ9+aoHhwb|>2H1cH z{$rng^@}tR8njeo6&>dB#-P`2Y1w-A+vH)=NPJ=gAHGC_#{D*W+zj7KJo@YJT_Cvq zp_^>#j8Ak(&=Bj;q>?@L!WX_Yus-ze!0g2!*@_r&ul9?pl_@XpkXq|rwHmj{QD)5z zZaILI@(8{nE&cGr_ttsmowS{)uE!|pjuvTyHslrd_}d;fS?51YpFT500R{@BVOwQ> zX+wh4zc9|wzVgx%urx%V9^^w$4EO$4R7DpL-MnV~s`k=zgKX5~)%L~km+goiJGy1L zRcYZC>u9Nd`0^VU2=4pOtu}quhcw0QKL6@v z?GpLezWU@6`)24%HsVJrqjt%!!x#yu2lu_+`d)RdO_?#whL8Hx-WhbG-FU^VHt4-? zg?s1<8YYQmgUX+7fSX%kpBv#Z3$8_jQx?-*v}mD?{_!Vk-KrHp@le#fV!EPUbJ7Qg z<8$^vVs)C51;f@@xOZ#dj!;1}=?2(g_ByGDc+ zy?_=Nh@q9x8{i!~>g|BHM%#sV-XpJ)BW&95gROt>Hgo4eI7oPGj8#igLEtb=@`!2c6 z`ksB7{o}Abg;g|Cfo9`4p+?YTA2*^EqX~o5#0E528>A_yAbuK-I`%|G_>&W+ z)22y3rKQnX@ABZ)s#SBTpS9Jja&>$5 zxx4N7qq@5wSG{I!Yo{fF6ZVq*ki4&fRyQ(Qy~4U3w7*?>>G9TD?uqL+YHZzl>}fhU zQtHr^ma!(?-hOVN9e>{GcIpWSS@Xtqv?Ha7oqG21)^}H}6i6)%gVJcAK>(b?q-X?! zlnT);>&c6yUacT?WR?q17z093>I@@q1OP)|&GEs5w1>OB{Lr&}rQtLvV{wzt3WvMW8)Sb47?Rnt7 zcKvmyYU!?nj=-*_@|h{>L`cob*Q}CI`NSP|<`w7Zc<=+QNqwD?nAY6Ry67a^Ps_|K z|5eht2a|rAXtzp3@ao&=+g`hO(3Y+$wtM%kcJ&Q6cqPkgX(1sygclxt!VW+Fc)R4h zqpU@f27W$7pT39K12_D`9(i<#b{DLcZ~f$2E&##a1OY~Lkg;}^&IwrQMtNB+FWw!R z$~E$v(x!E*jWWnNP;3-o6JO-SFYc9l?7O#p^x?;T_!_)2w>soZM>SFfT8=^H$m1GX z-orSM0p16E?@tI7A%z>$N=VCeU`9;EpLxXjSNj5 z**c3*z*2?Bh#kqF3jPy$12M;r8*A-axArZG@b}zcUT}_oy%^%qn;tMMgBDYX8vy48 zWYF&tE@Yl0;!Uqb!8yeG6}3#%TB#i}mXu0h@g3LL(VL5*o_x{?_TJzj^32v#GvHu6 zm|ywiiN${EasyWL4diMW3u-!d7zU8%fS;f#*ss_6Y!z**=_-bE#>EfXJ0Fg)%m2Bb zZNFVB+kX2tw$C;?ky6Hn>trR2_6`*u1O95M_u6j1y*1G>wm(Z_F!A>pnkg){u~Mr& z_k~%PlLFJGob=^Otx@YPazs_pU5}R2QV{!_tJT;>24-j~mfF29jI#S5|A*{I!YPaE zRo}|8E7;?pBW>&=Ijv!)f=b(`hrtli#{>NVE?9p5{L62ChI(!7;2=OE{POPqPEm>p zOtQi(xf&h(&RcfJtvCBiH()D4dUKjkz48hX&m6_NO}n-_qBf~10AXTyj<~!M=kY$$ z9NM+Ha~JDaYJ@B?PvGhFkfTD$N5q6>3+Gyk_6Jzw`qkv)V40P5*RG^xd!qc@%L@I8 z!AtDXf3(%H*k#;f;sy;Em1WP;{oI~5Q@a963wQHpgm$!*J;FK--sv(%<4~@q&2$x%}38FXC3z3r5Gy z+O)I(zW;!A>vfTr@zOg_SjTpaJYV0IN~uf=5NCl4|C0UR6HqUG$b5%3MD1lsc7t4a zSJ3iinKFS&QcxL_=6Oxc6Iat?$B(lE_uDTseuQo2i~pxL*|}&jfe;Z>SfDy>!XZQm zshzi&#TPnRamxywYvW>ISg}K;egzJ^FDun>_kFs#D_RDq!UP10C=f(=B23iRAuFh# zX#;Uw#-`)YwWN04fQ7OwyiYmzc>7}5V|L~N+sn=LDC@QVX_~QjvzOi;AwwUzXoZ2O zley1w;h>^seiMKB*?M=_)As0gihVnBf-PRQ+V<+!#dM&hjyzVrAeh;PuI0ptoV&YB z;@w&Ab=8bBO7CTHS*`XHNQ7tdE){-M> zH9gG?Fc6iUBM2NG!i+dF3a2m`Lrt{aTDGcRzk$aIZ*ydj@(4bQOCu@SWVs7m%UEi2K6)`J2C{H)b1Q`hT|v~mULyn@Z~07u9IjfMyr>C zJlZ-Px+McsLV4lJVFlyX4O*NE>_*VyKQY#<46`(Ltbjk-<4YP63|n{(n(1qY!G`Mf z-22=&UyZiWKTWWoewk?h{qGN!w&U(P3GvEs!XgM1{cv=zFCRb$+8B?!^DHx(4J=tlU{ za)Iv{QvdP3Bv{HyML+dD$7|3k1}Q*r<-2x$fGhH$%%F7)f)`#3qc$znSH~(#vrhbV zYlF()aUNkPFgl!}JDsQrt+~yUdNW5Sv)C!qQftz%j`JOsAq}Ag9^wK48baC~U_fjh z$kW>iny4#F2+*P)t7wA6X_Ko~sXPUb=XZts6XVaBGsnh`9d9jk67L@oVme;_0Me2x>pUikfKV7zK=SGIVUOx-8fE;vA+UYbfn&7BeZg z!RE-}NTO5=7hX*3lfGmup(#4D@kZ!1vmRz5RiuT;3VG90;O|geAZ1_y%G9CqHRvmJZPv3Bw)IvsSkE{@xRa*t`V#|yAnBO|`$@@UvV3^!_~ zm~BHJS?wThpcUYf&~`)^Y*@A-S1_nA!XXp#=u~I9&WWcNE3&RZFE#78UoD|s6iO_w za)I5il@Nii1H8>~0^m}qk69?A{Bk;uc=gE1*7JtGc6Mt_VKPdkRyrWZe-8@o33_jY@^BzK$|@tKiIe0Ri_6sNfRV z()cX?9O)!pmud+Rt?!2QIAJTNaX!OdmMKa1i>vGO+prU8MY^;_%4MCr6qc5uABM8? zwA<;(9a{R5LK%6!Ts&`3M-x*XioYRFyrh1U$QH=n)MnrPeWDjnFx!Bp;wn;NM~ zG|6kG2kOEPc<_>{L#5tE*-^>Sik6gWcs-R&AqkN1Q1IVmbatautyEP@Xu*?V!3j$? zMud6E=2|kjf!`7f!BGw9%`b>}MZl35wv<4SUyb-i2X;a%D597a$I!+OpBHj{HZQ`i zqhogedEyE7+H0>Hr|v=Jg()X?4zG^t2BOMVsH5Z>NHNs$4uh5;3>VLzV~1`3 zcdhS@)GVxmHEz$! z_NyQ4@efR=dR4KCG8%)yl$V`CC8?F&>m2YB1JXF8b>Z@266>u})#`TaCB1FnyCZDM z%!OKCtS!T!sy6wz$#%nkzSQ>`YJ`lQ#5mm5vD!kXXLb_>kJYQwWh6OMYuD_5&+72N zyHFt+Z?(1?ef{kyscLs}+l1haEi1p&yvUm$I7=-vHBNEwV-wQSTG};o()Iq(4}I$< zbngtkh#ANH;Dr^+YFj6Qu4eumD8;{6{{Sz&M<~Fzg%DM~S`E!CS;C?Z(q;`Hnt_$8 zfb%2S3+RxUey8tcuMhdgrq5n1;jp^4@Kv#i<0ja>gC;6%MT)UTnq4(j&L4j^+SY2; zS4)g_m6i)W`EadeI&_^SKS%LuNIJ@|xO{lI&gSO)xu zStE3%|E1*9q+C;5v8u9mD#_ikJDe&SkZ3z%A_M1#@_%x~;fH7LJ}GEoP_&Zrj~#Il zWARMirw-lr?O~5R@`7=+E+OE{Z}b0?>;_Dx7+^ExRv7MpmIZMN+aLS_z$Y-+3|b-p z8))0ViD&q~Br%ni!Dmaw{O{+ZY=MriuBlTi%gZmpNV#bzwBM$M;8)N3?%Tj#8~lwe zL7XT z@^T9=QMo}$1^+^8kkx+R;FAvO2Kf>MuY}tn$$655pemvzw5LT4nRFo>eEQkv)^+#Y zWaokba_IZT$*(wR3XSWcW?n+^iw@a*3N9Q|g>a)R0%_(EMEt?WdY&=zNLakrbl6B} zbT|Z@?CR^7Z&26gQ}Y-x^V)Uyu6Etxn>BO&-um=8IP-l$H%`QK4uy4M+8fqa6HL_c zO5^%9!iD;jmYRQ^)o-5};) z#fHgO#C;FGX#Y68o180kvZGG9!?sVWX*+J)#D4m2q&@%iqh7{aZ++r637uQXJL9kO zY<{D~>eI;Xp?n6Ip5lyRL?vb^L)k4`qQ3c<4VgK{gCJ zDGi}Uy*;Qom5F9P ziWWQtLIgD0$v?1@`IiTUnO}^ujGo&nu3jhs`D6UKc+w;p^=Yxm;&FD{UT4|W zH7nRZkJ!uBE?;Di+#Z)u^f^fh_QGuXmCO z9Ukh3L|H3*P1c)#-Tr`YmprO>7h5oWx^^VpW*1+3k#%TYM*=_@`{&u`+QCO%XVs+n zJAD70<+EU!b{;)ojXHO;9{Y5ZAAwRf?ClTi+ikJy6n`}%F36(gBZ&$2^7ka23IJ_@wQ@vN$Q^`TiL2L*K@1r`|-@|#l47A~A4PqB;qNPD6e z9B8ocugH`$Z?62fjQrlNzWfSL3U5i>f&EP2A=_uRd{! zoqpj8o4J6!#V}_&hlE4@e1H;?OUd97E9f%iS^$YacE8H7t*z90HIwZOaLyYanay-q z1J9K@J@B6U@3p>19BFBq)yCObR?Lm>3WJA@Qx)<}5=|ZDJm;VTIvD~>T3VW5kd;0T z_E~ST-^KdHE&(H(Id_4)QLYpww0xx59)WoAADsv0TZ%PY&C+`HyVBNHuPf!e-su^^ zpi;G(cGLyuS@TA9eLcCu&b!&DuLjyZcRXpmd%a-I+wW`--FK&*FAtN8R*7DvWhy1D z)G6njVB2-v#$J5+1AA`dC>@c0pdF_jC+F?)bl!OY06+jqL_t({uDoMbbRo8dwj;kh z=4Bn&Gsq4<^jxpsKhHYBzWDYv+ij=TzQp4oP+O$&cA=(>B%Ada%jKtHva|~geQ*!- zk^(HxDfd6XF*5j}8gO^$xV?Paw+WF`0iJvEO@8YGn{tWx%9$bWp-0X@IOU`hZQt(Q z?I0~7Y`bmIyj{A{qA&?BVYyAy2GPIzlisNW^(FOD`dN}-s2BhEuL#b=sGIbj&Npto z`Fz{$Z#&pe-+V8;pKaE#u|4qEjdo<8J*{dvc{Y@ha<~2Wk??oF-GA>>*5i(cNp2Tj zb&lO}Qg3V9ysj->Fi%^R=h!WGUTH@k*-Z{9<<(Fcj6;t;)G`v~rE&V*mhIY3elBpc zErA4DO~HxZdxcfgq?AirXop?|!AA{Px@3WbS(ZmaQ~W01KQ!1I?Xl+1oo@@o?{Go> zzpya`WP2iL|7ilMZ-_}u&3Ob>cQJCSZ9yE6P!sOa^B{Zt!Fy%=*2^^!1umU{8a0%` z_NJTeuoh|c?X;8k)1WtB@3RJh*m$Csmq)tw8Ebs7$dInLoK*E3BvM+PExIf&Clww> zfg3V3qf-1b+Frz>@^TF@xG=6!qpH^JtnNMEr^>djSJTdwF{ErmE0p$?vd+<1l=;O!8jsbwo_+Z9(nU>99*igoO$ zS%uQY{9`-wy2iK@ew|=9-h7)qD8Znf{1e3Yd8OOj?`b1~6spqaUwYXFz4(d_0DDGw zspd;Cd1)|mt5<{Qf- z7ls5sRZ-swu$O^WjebGgg*%Y)wM;9HQ;qg&z8$%=rv7Vuit2f#EpQ%=V?^9%2V*nU*F7ex>$j zKW201&Cevp{7Q-~=kS`euu3IOR%4)lI-8xdg*0ow% z(~o<6BV%R#pN<5>CLM^-VF=J#J0iU6RX1_b+>aLEpOy$Cy zY!7PJsV;jAX_qA2aZ=|x?J+MSfnl|l6wcfIc}2HP7+RV>X}taCk>_pCV^6SF za(4_I2ne6-j;IZ7)u~|>bkA87)YaFO`IEwh7<|^Mk1tuaLhHR@ODgaLhJ^{lB}|pO zUThM!-FDl;B=b0=jp=hHDs1^N9Xutsym-f84173Q@$ZKoVpm@+=RYz!3?cV)G2d7Z zmNskFsU^J78G)e&MlAaQrUqViBSjoIo4^=E@5S&$3Zey$J@$7>qW6A=ftL4}me-Oe z(Has6ICBAylj@if+H|t1P`XyT5GquzD%{i(POCW%*D%3I;>eAWMNdpFXea31AUE$g zTLLv;kvPOBhA#_Z;{?sBY<5VLmZKsx1f)lhBw<-;wldb&vM+~EvR!xHQM^h^p$dha zFpxKKXU>`-@2xLL*gVX6Fy%*>dWQe|liD3qp&{%QJ8Vi?@SnX?q#}&_a*|dtqG5Iw z4id_$XlmNjbXhTSxf7K_e48s!4>|#%*TDy=v#huOe)_3Dr`FeB_qx7iOS; z+=A2TGv~<2Y>j3q)%mEr$gto50}P#hmVNZeFxe99mMu|sSW}O}R zGAo`DKyjdR(+&Nt-}SfJy!k~xPC3?lwSgyU7b=8B;|`j%TEoOx3d0&Zm}Pp`Ij}> z<7_+Q^y{o;>qG3*>1FNqiw>2$YaNG;2;wcja|(8<4<)>(mAl? z90&X!%+&H$tr{{+EkXb`&@FKO@WYR6zwUdwGcGkOTN!y}Nj*~Ql2^RKymuU=v@52G zWyEs2bm?Mu-E+76nCvSrlOr51F>bEc6uOlHPg+Az2rQv#aHzmYOwdCZd>QzN1Mq=` zj7zn_oSvREA!LANN{fM8IJSEpah2#5j5BaW30 zht91T1cw?itMDQuKqQL_SM_!}J)J*%C!P!i^&)I3lx9JlS<_12w!|z3D|vqUuc}I@ z1MbvAaw+v=&}2m+u;u0at{H!sOihf;BU!d;_38D@AtwjeN}`mzmi~95^(ArP<>O+ z2m~>xK0b8w3ZVkrJIv7j!9Rj_76iHg!gs|3=5F8M@00IHPceEYk-!Ia@}p*MLzKK% zj8Hlkcna@HnEJh;LMe@7eAyhE9 zXQI&AL`bVVN@<#h4X|4^Q=r-eV;}+2e7I7yn3J>^NcR}HF|c4*#6EdCKi|31Ndnwp zCS}|befs1{${;y_>8>y#E{BX9Jz6KY{bK*<+gG)N!I1yPIlwH#-uSF;Z%Pgv#7<%#fYQ#A$N`L0O@Txsx8_Eb@XL0s|6m0e3bS zpcg<`2z|-pA_Pk1>u&MZyLpsQNp#hJmgMNQ^eOr_d;`G`9A^F$-?foQ z`FG!#;krvcwfgaz_qfK`L^jlmjth#<)dMkB@^S-ro6A^3MbDo*St%qEjF zN&%K=Iw!4&PEYbN=|KjMNv{g^wD z*86=Dk5E9cYY6qAO0Kxlcm4qv&MaBCp@Jq+-^<GnH`sVeeeN5sPN@v1OW(Fal>#xhZd7?Rig%YFNbu=Iuq9i zc-4V>$7}MNR|%m&>;3u`n3-~Q4iOkO+K%!SWPnzor49Y?Ya9N}1iS2#3w(cil<-NH z3KMUO{xDF$RMDiIcKYc$TmC2?Od`|FElo_D+cmRIxqU3DcLELdt6HV12ITej?z`{U zff@iY9E)4@<9rpLjZd%*)|Sy}Avw)~W+?Ty-3FJ2-lhq(Q#m zA^e0_;k2x9QA%sL2$EQdy8tS>3Rj->jZE|(SradqbX)8T@O~GrO6$*YFrbal3Uol> zSwE*o^3o4x%}$ka+5v<1Y0jL*_T*D<+egF3+4cP|cSAhdN?oW!ZusPTe9!fh|Nht5 za(3LY#|Wn3Xx;c-6)DkBoP2Yp;W_@$AWfS-LkFn+VBNZRlLId~ixQ2{{?rijXdu_; z+icg~jyj^RwQ9SI3B4wp$vE{n-b7t?7dU{fj&W-hkkcIq8U#0O8svIV!`i!cP_*CRVCnw%2Kg)GL}OoOIGJNUiqo+zsQ#0f=l|@ zEjM1HE#mcZg==bB(mCu?`B`d&CKLcfLczeH6NJGk zzl!}8ztOQ^{FP)RQ%jx>4?FZw>n$t!CQX`bG#~&+e#FPcag{0+?Wm)Uu_KP`W6hd2 z_w_(#hOD)@B<`IThK)2DNY5F%cT(X-Dms_Koi6n(e7Te>10#Ze^Beu4v%<*ZOFgUq z`#Z<8IEtei#;^A<2C;742j=A+kvh0`KuZ{6Ob}w|%po541pj)gXCsOWC9S7>V)$I^y#5fHSb0mkZFI}I>wW!6}RGzw@S z*2>UujV#x7>)XqiO|DZsuDpZz7z_{+7+9DDXsFRW2Eq`F1WdW4UMH=DJRjR^85Dj$ zYOI8#&+V352iWJ|&9FxwxYc&rX(w-EpqtPayb?M7kf*O{tK6m9O4qOdjdpb3!)+%W zIur*~FE>ZJl79?1DN0!ppQNFmSE*9Pnzd+Z=bdwowQ1K*TXH)(jmLPvHNu1X!%r}n z>$t-Xa-l6pQLle&)27YS_Rn;=IA5bJkJ|bvg)CbiS1w?O{3Jv;zC~~9 z(;SXgPWbR|qC5I20%$qe7~$erQDsZ0EBF4MxCkX-DU7_)K8+na#Rd)>Dp&0{*}$it zu@g=>K|*A(qbUrWfg3PdCvDhMPY<-6ci2wO<9ho$(&v{4-$n z1i2CjWD}840@>x61u7+w5C@GsBW|T1=={vo*2oh3zWW}uei{U4%$O;`Aoo4Dz?Al; zi-Qgt^s1eD?t}K?(^uIZT{~;GDA%>+g}x1I)LQKl&=LZJxrT6`h0=-3Yd~NIs9Qd% zGsBoeo){Z}ea3! zII!|+tMr*FT>BVwVaX^kaSbJ6;wyB>(;OQz_*=WC|2=}oy>{lAX9c&!z)5-M7NS5f zbJk2dGd zvC#>TWFt#UYh;ZYH+5XhU$9Vaq?hQ} z-8D98(nL8xnk~muO*PA`;dH~c#I>u^EnQnu@oQg4$8Xou);FjsLnm=AVgOmZNZZkB ziHWlXmUA@ZI7_qIiaLh7YE@iV>)d;Jo~xk~5Le6gp^`i$R;*M_1JyhYKpu4UNjW*D#>eVz4FVKc!bo!BsJ}`&pq$1(T7OgjTVV-;U_;J$>kZ+XTDco z{OOlpY?SPdMvWe8Uw!?(>`i{QrL*N$z5cG2Ry*t>YTK%bP9MciL;YGEdCi(t_QROb zHf7=jv&yZtm0^auZYAr~skKd>C&yURrdo&gJ6LDAFs>%N)~{PbUIgpOIa(F*20Xr2 zltFMM`JN~zm)i)S7(eRBb8SaPdsR+uH?f&mFB;{DcKn?n>$|ICfdF0(exMOHY^a@WJ9`S<6YZzn~EL#8+Q`ZRcHlxxM+yOV+$u zQx_Jup7A5_(Y)G{N*8&>De3nmCKTjVv65<3%xASMn?YziNieT+x90cm=U<&+pO(eZw`Rx^*v*04csIY+6d0jG+L!!QtnjiRlntYcSe%8^VZm;Tv)#(uH# z71XIKSCRvxTE5+_vY2_P(lTyLtkr(?6{_DF7c|(?1aHHi!BU+>haUpAKrYtI9WQ%J z7E41=wzil-@ZOlPHJZh*)NDcSj>Ra_HJkbB>oNBHvu{|Ju6x+8D`&$7cF(VM8ynEp)r^_bGDyL zz%CI6h;*H5xLoHSKwqoZtn}@NYm{es`NANq)OY-1)Nj~C@nvk;xUMyqFv7A$vu4fQ zqP$`9Xk*$-2!m#JrU~5m9+`TC}&0a=+b3Z7mBga-x@>q_GQ&D;(6t& zRd(;a_u2a&47KNxG}Sj_-;UD``-#re76ltz?HI+_4l#U*>%k(ZO;#Dw_D~ zBtN!&`LZRRcAfe;0wZXeFn*Fvn6TKIG_PSZr(pm+ML#Ad8;4TO%(k!7##*K|Rtrk& zG}NWiD3rH33zpeWKab0n;LyX5w0-y8$9f%jkZ3~!ivmZzLz5Br*%IV(Wa(qeYc`6ngP^4|IJcica1haEcF4L4lp2dl-t zK|5mGcn9o(CG_utLmmxfG6L{{8~0)dN)zlt@<`&ZS8Fh)j5QKON$2!HUh1dIBs9`5 zp@ntogZM9C`C_?#q^YF!L1+isU_0R~8G^d^O=)&;iiU^7S~^^bgPj^ku&Gw9sx%K( z#1C5fE*;tw+wGbKmFXHX<(e07CqD34ECJ`<`yRjs()#!Br#c1!HNq~xHgB14OwedY z?i$hD>QK}^l3N}Hz5B}w1qC&HYxo|Cm)YKc0Z-d`=ig)F#}4tCW9$eJa3Gv8JK%ll z;73Pf=EnLRAz0g{%#g&KaVbj&CAbAF%tIZ8nH{qXXAYVL5(gK?%osV*afvz=LI#5Y zjJ#6iDw=haRhpHyVp+N~AZ8hLwKiT+Gj-I5cxkI9p@hN0+n~-#2W1w*+MZjx(+$KF zU;x-x5Qt02IZ*gZ{edCCc&1L9A)~^x?T|hP+F56w;qMR#`1VHyIx3;4%!W48-iZ?M zj{e7ScI`D+%W&}Tf;Brkz*Pz!y@ZrML=fU)*^W@<$7|j>!_;@`l(SwD*5pRjp@m#S zULTbp8ji;Db1Ns;Jd-a|>k#ARmWQ_`j|xVb_GCHNSSn z`VSvI-1gjaPy6>{|FJVqKh0rI|IVwAL|pha6?gF-JZsq?6`V0mDvUfDQI#&@Bqt4} z#xKe}JO-WxePU+-!LqD?uZ^62nDSHWP?&PisGuQ2XuaX4n{2+8Q0}?+9@mgXxE3a# ztuao71$9fkFHBwOEAUAsgazTUM1+E(RJ=hdEA*VK5gz5a$=uO49| zMto&myY8<1q=s>WTv+tR0hsx~uTM@3shmFf4$32SEJwE<3aVF(m)|w!8S{$I`K5{9 zh?`22F^~8?^Qr~_TfXOv6Y-9w?bzc_lm-6bcFHLy`<%<$odX`3d2XQ_=$eq)&cYTA zd}4dwfB$_u_~3(W$k3s(FE}6*m+?Dv!2kFh%P0sRIn%}6A{c>thA|_aoQI{nNV(1L zo6}x0uG6YsP|sM`n3gN=QlFvR%$+YO<>hyqm5JJ^nkXszyZ9Ts@7zDvDXT4%OHoDSxARiYP zv%W?KpDvBoZMWSn!^F1I8l9;=B*ROhtLp--+IuI?=Oa3`*y5DIi>olkqiI-j;lu(E8*s0%h zN&bhuDh^B{Ze=TyIQ8_?6J7V{o|rasmbW3cH)n$=XG;d)UEDG^Br;UhK*vR`b84B~ zM_9=KGSTmv{)r1OyeP4B=~B;jYodvgs7uU4#(s&LZoS!g!q;Da?QI1X@q1BW`d5pe z3WtHuJ)h`xaG%8d`3t<6kzW4jHiNJDi*@0-pg9P+&-u?1keG5xQJba8Ma+~s(GD~M+iUM}kNx(uqfR{0 zYRS60llCE_Nf04q&Mb?Vy+_Zl-MM|^-NwJ9M4$4lICaS@4KSdcsJU2weB+I`?D|{o zwD;e7)_V5X*KzD{%0)`fE6*+YKEkUgWkR=NY;tH+#X5KHY~OzSjlKECVB5XxZqgWN zJD{{P0=Gh-ZGp%`Za52HO73sOM{HNI%5*0e#p`#mJnF#@x|pS%C+D1do?F~sbImn{ z+R9mS5GhJlh zODdpp79PHV=yA{8E~CD2yBx!r^YLH)(=h~!kBfIO)JFRuwG@YP#x*R;s$lPT?`?oXD9U+J}Gjl{=s5w%49^!37s=wE_Y4PlcHBZG2kMz+6L;NnZ(`Q$Ii6i^WfirY z7$11)9D?|`$yZ^(|B(@Qc?Do2fxtg&8&N_>A1B&u(<$-pyYD6BsLJ1`!ZWXY{><+k zkL$B+Mk2jP-Ke$XTNAtNzE@(>vradh2c37zH|w z_BMsj@y{JI(gpqEi2+YBQYSwA;3M^uHQumPo>8=x(m$cU-1}*M{kLf4luYc1{|UeT zn%H;$0~7yw;z?h3CwF+o)*8SMzpXtmOZ!uVq5(4!dG@B|mq&izvgeZhNgn7D>FFyn ziIIFF(Z-m?&71UL5D0^&4)sX%J^J{>-rag6KKkh6Y({sRQbw%jmb_A5dA7?ea>B0@ z6Bk{2X`;^|hb6xI{`<`LTXQGRtYkhnw6i$4iS1s#d`05QtNSH7?c6m1j0k}yvS61_ z@qgM+vHeoHCY~HHAkn6Mr^M*dKW4T^9`!Qs|B5$x(M*Jk910}{JpHr_2A5yiFEMh| zDDj;zAcnq`_e3YJytfuO=2bu5M?rAn@4qDu>U~J!y}|EiPP(Gdw6(P3&svYalu!_f ziOzw=rq5ADNHO^LtjGAe$XAFjcFv+ErubOT!ltFHIDl$jcH*gl&p0#C`dLEzRx`6H z%wR#;MJ+$GfD*XLS^jcqS)O_JxkQ7etrGV?_)ubz+DD)*e}zuIDeZ_fhQ{FXtFBI1 zjn`?0ncEmpnb<36JLFmwKa2Vtal1nH|dt`ywZn++@u^qAW3l{jBqi)n=YYPSSv1nw5 zvywgj)>W>l7uXr6ou;jd^{j~;+0>9r=?Gi$O3e=#ZtZZSe$*Mdre6>+DD1G^-a6Of z7#)j!sh`!r_t?&@4TqbnKB>6OSw@5#?{N>@Otf%-PE>59L#4j>@-sX0jML?ge63ZL zEAe{u>iU*u(rAEnn#heVa(kDv+%5GSX(D(vENx4)!+?X}zWnk_d*RuEGQ8d{lNbK+ z^@hi8nlWjfaCP>*h5NC9Ut*zf5}RhR`A9y6%OlLr3mdmM4a_}n94W1J$u%#hGQ}yY zIBBBv%=Nn;M!K3+gq0XD;Hkv)8PhZE4al1rR#dwZpSI=RzT(s*q(K`%o@i%u#z9zK zK^L41EPD)oe`uo5;YTKR|JxpkhadiTV%+%g@pE#voB5dMO-a`~&w~EY52(kptH>k1 zbzVi{wKv{OShXgwi^MChyq3`JgoJjbWY(CarhxB~$8%;On|R~2B3eY#A;0&Y8+CsD zjW@hqufF3rE3UlA{?@gt zj?QiDal9_N1D5{_0Rj?v1w!DN{XisO72z~?+;|)M!3TEf#TQwH#_jFye&^b*yY6D` zbmC;~TD3AkNt$dlCfuM&+R}kK5S(5D!DP<-HKf@z9dz~5OE22<`aSi;lh$Ro-P|2E zH7zPldO`mW{{+}(xp2p!&*aHd>_x%!_FHeYtNQh~9qie*q< zWO#pFaF=|`fE#d(on)ELSO5B(Z*B14_wAtv-m?==+SB&m{{U;rGXp>gS9(wS8d+O<@tXHpt{itH%K~sdB z`0c9nHxDR_mn%no`bmd^!KMTsWbte!dkO-KVD7xR_UR}8wbx#MTgS?d zv!)GeTc=LjTZ>k${h*arEt^||1`VyI1O==wD#*D;mCBV1RSk|ml?u<#Q|IjWX;Y@z z!bJ;h%$T3-laD^NM$Ma9w|(}o{d)G0^Px5xoaEY5!DlDB{!bWia1`qgeFc0+K@f*l zixw}ovH!>3bpTdfB=0Z17fOIoL+?ct=}1+;t|(S4XRp}%S+1Z)d+1=Scmf@SP=io1Q-(}!l zdF9n~l{^exyLL5hUu7{;``UBm-7`0W#4u_3h4U>eE9`%0>q|hnQr)uFtYs;6gNJ=@Z~oy!wea${lID8^Fd!SB{_5Ha_=xwH(- z0T-5H+DRvx^+qb2c!l_e%3+lU&oWxr4e454JT;WE@1qp#T`+M8o$W4>abTHu8wydW z<18JYAPxC-GxrD)r( zE!QLNL430$96sbw3XT}M8QzI-p0^+mgN2+1h>QqB6a`9dPAm)YcwP^a{F9vLT+qPD^!oni9 zZ93cdH%>2nj(7OVyBR^yt9Ng-p%=J>#5kV05a-J<;crp)i6=!R zJ1Y}u=^5C(X)|`x^Bh;iW63W+V^?Ml5_qP^xY5HgWy)mq=-C4i;SmOgFVk>YSrPk) z7WM8urLs@PO)vJrfcl<-rIU>tH{zXl-ov%m-ionfhhpZ;nHV{0BrST)s6hDO^HLUE zkVSWbDP=?V3KH1nj?Y%!>yypXm9+dK*Tg)?%6KH4JLfyh{_qnsU^#Nq1PmT@h*4r~ zWYL89Pz78kf5G-tSZtbxa5X%SNU%y-LnxC!WvD_a5D3mml4V$PaaBM?2)6O6ihCe` z_r|cRQ(qC3uMSle9NRfn22ZVA@6^dkO3RSJ(dF88Yq6DQbj!`~j_o_}2Ni|F!UDu| zilKn!={#B-Vq>ECPG>!YkibFV(%c;98poB*$!(I+t!r1b=F~)tO9^oAsFoq=o%{Q$ z>d^Gve+*b!Qwn#QF>5+#En2sJ9ky=XX4-nu&r9(6yr0l2tOR>PVsOZy{$#@a(3wZ{ zO6iM@jUhvBi&k8*9!IyjI*ynov zHKyMUv;?eLwVIZI#k7L)@G>4s(jw8~RfuRQxJwq?}rOq#BQjfU;bd|OxLH06*EZ#0Fyg@(60 zi;Btg**4nNDMdkxz`}U1wFY`G2B8xhD{CX82rz{Jtt3y}GsKX`MZ|ovj50Ln!ic z^O4828MG2*W~C!GG8Qp$F{ZQbrZpfdD;N2>Ip~tomKLLq9R2s?nuc~rNl8JwcJ0j8 zIQj1Pq#I8`Rdvl}-d6QIA^Auty7x1x4+vggH)eXa{a6(fdRDDkg_WcMx(()ZSG zHJHC>5!RE|O1MR`Q%Xl1e)wUu3Jl;GAKXISyqR-qpbN`}U$KEJQxIKI>-JJdbW^8= z4p5l`suZX}3kq{+YK~%~I%eZKMM47uZVB355pXXeH`0)yXWQm{UMID(M)-3(4GiJ5QaiG*~L z!~O30^!pKkX3Y~26BT1*Y*9%ul}1_t_)qHqI#Kv1b4N*1%NE9WLPSKQXEx^9D2%+b zWwBSUksIMl(TIM&ctCZlR97@tUQQ`jZVJ{8o=&=rr*!I|At{VIc(3WG?L4l!s30E^ zZ2!=(NTj81LuPh9rwF64Y5PuE9ShN=Q!6Tjtq~SN%MwTV;&pxc^~GV_6{E$*2?>c@ z?A#jBJlWDcxYOx@Mi1`wbiX(Fr%KikEnfjw3QCF#Ni@_RYhdOR!!V;%mlG3DBsO${ zQ9~>2uX5Iwf!Ol9^KO4L$Gg==a z#ZYu03xEbSvIyv%;POBwV*!lIL?pr#16$kDuH9*M0P=dwJ5jpG=+0?)9kZ^ra2;l9r*vkf(6TsHl|eyxl4 z@rBb0TJsg7e&3guN8}Vh(zo1y35K!h1Ll1O4 zExwHsKoYKseyjQ(=<->3b=7Yz{Kjy#ZB>&ts0@D=uD#X-7b;Ut%2kuRn$iRY0}Vt#Fsa}>cD7<+k}Aw9RrS{s9M&H z7h3%6Z$7fW^S5|tx>jJIK`8|)md4e!BlC*N8F(&}nASMBt*6;Ll8+}5tU{Bf>jOb` z@VYLQf+hx32MyK)a=&1pi8t*Rv>z0iHn>f{fm8_8f)eQRyr-=+wj=r5vQ4Hhe=!xf zc+LXVz)ugUgrWmG|5{av?ObS8@eZW#!i~T9sY$(U|1(n(H386+rW^zNgE!gsD=$RT z%>jvl24^X#3oMJ6H3F@Vt$?28w+o^4z9tq2+YxJ$*0U^ugyjj_J}Zyvl+|gQz-{5O z!x-?OnIyAPlO}J@nEq36Sx4~HoEXvhcv`5-8 z;M=z~yGjV`4~0N=DCTw^IW4M)5gTX4SBoF2TvwC{wQ%JAb^PMfajx-SO&r6*tb-WOui8~-p+H+{puybp|6b!I70 zr>G6V%av*=U7Lo6L=aB(bQgc}_9sqFn%EPrdFCn!Ycj13PP~=JFlx@Z_*#&tG@fPh z2B$H;3uL`2gJlj2(jr&0>m{%gTxvhj#NKov#-?BO=)SJoureIgb>58)zHp_*QBAvt z8e7XXmAB56g4&_hGDSfV6ZXQ06~)VIrO@25_U!J>X!T|P1v|>}<{YFprSgQ^#JFpN zOm6JApr)F-wBpO-9j&iQ=}%jy#Nnl?df!a!g!;CD~4g5KY3 zs~g{6xc#t>{=w ze9X004XP=|L56`kJ+(s%wD#=L%KOkdXBwXERv(PYGtY+h?Cg4QR@@68Hkd_^_EEgT zb?-0EtN^fA3%52`?+fl~4l&U1YF?6xl>XwkA_a`!fXa4FxX0aJ_kpAXUv;RDJbdL{ zllSgIxhCmrn#R(Y``RVD*wKEgDeOR}tN-%IQtQ;BvC&g)a9>?K*RiF**$(G;6Kv8{ zYtyjRNh>1IuZ$;^abbITFwB9^>Jaw6{9`q4#P9^)2HU}uS9N!aSQynMT$|4GY8RQx zbQ;1_{mi-Uo{<{Sv&FN;aU&}FL0!~_&~AY&L!kPnB?`gQ&s}-3-|F`2C2-n6{HCb4&2{>RF)Tk|K^k9l>5yRE(UQ9OUKYp|G&X z?Ap)~sAc?KMoW*(wK@h?$L0#Hu`x0Hc^rpYo`Dn*J$O?X+_aPO_<_HX!Xic^d}w!y z9VG=mr~M02TXwY@qE4Brnz4SGZT}+k0M1?gz!x9bpVg*lK4AOi{={n_@($SaLBk=w z3W4NaCiiH^Pr||GsS@ZIY6_+O29Z27u zj@_9V#_e+v&qL42D?n;mCNi_Kv1@lGf_MgcyVlK7Ol2V?EEL5h+(%zpjMlAMQ%NX7 zQp*SYG2185M=dh)C65C@2pqUwxVLt&p_1C@(}Kzaa)) zSalR$)vtom>!@;jH)9=TQgg7Ps{^f_YmN=?DH|$&4pe2`dn+`qw++%#pb@GuaJ$>S z8wp;;s1)k5Gdg)#hFHP5H*DC5ALq}<(q+rAD|HvLvvaX=(>And7LU$dI-yPL))elo z5EmDVuy8tsVp$Us(p#>q|C>?9otYMsJ6ciNT=bwMU zy7g>-8Mj1wWb&^u8>)=mQJ53?*7_b8qt;-3f^{h#W4LpvUAT~@4G${oQvt9No zl?1310;>#Yd)AtD>+sDt-(uGLA0UX|M;|c;6DN*GzrKBG!jIT1z59b>WvF;#pT+d- zeZ^PjPT#bEPo{bIX$sGHdQRH_hd_A0y&JpI(y(>wR_C{6J3js7WBl)bU*g&Sdk#~m z93&?v8z!sWDGt-EnELPrGtlo;Pfz74(0SA)JlUf1IQ3J+DWYbHiFKLPK?~dVW(jqS z002M$NklA9vjP zG%mY*3Z|WM3i|Z!!)bw}@|swNrWX!V(p_LjJ2npk-vRHFac;eYWmxRfrgBM14=0nBharQGJBOv!QWAXsfz>`=l8WcV9Bpb z@zEzA;rn^>am~JKvX=9?x0-`(`V*9z*XMf%cza^zQD6IZ{f0Wl- zjY%14+qyEkC{I&|H!4SU%HTV`_=o>})V)f+VLGv@`tA;^>AndA`x65Pj8dRZClj}V z%HmkI(<6MKJzRL8$KK@xwApR)~QK3FzE34 zXMYPCL#GXt?%a{)i6_tJin#tQg!PubvEZVju1$AM81RmP14b!O_0%yXP!uE}r5rRw zL1_x;UUdGW!PerqY1m_zCZ7~%f)z8ND0j^rFy)nCL0jMOh zV5@F+T|`!P78WjCjJMu;hf2@mIRAnRaQgJqF?{$?#LyQ)6O*k3nFqlq(Tjdn(KlmJ zfv(!LFIWnVjUUu^QwvXx4M&Zs%>L4AeYd-NehpG7s7)g`=250q2vXB_;rZuY#EY-I zhsPed7h{h&!nB$2qMt%l%+f@v)*r!@LZ#z@eKT2`e@z*O>ZoALpPQeDKYsrMFTe6C z9)0+qxbo_&ap|R(pm)z+CZ5HKd8p+d?U+#cwSz*}n%1F~#W;wQJG5IFeOcs~n0~ZT zRqbfMi;lz_!nj4YDfq^K#h33G`|*p2!V7Dv6G7`EuiDnWPReQv6`(RVNTtBmu_0a6 zSrqC@kd%>OY5l;4O`CA*EjM!;VJqBz+bzbgfC#Cyq_kF+5C1xQ)l^j#95G+vs46^p z#w+3?$~kkt!|iw8jqHp}+;h*JIPTbE3=>zL)&k_d1+TgtOmOtG3Q0qvuY;p3+(l!Y zFjcO2HfCH@)jt|@-I~hYa4=ApbXLuMHX7wQ^Y#q~J@r?Tssvq4OJYXDR0vXc?ZP$J zTuTQ)gYnRV_Zx*k-=rQ(7}iDpA}%KnmEMaQ?op|#%23sJ3r>Prac%w@y-%Dl9&fz% zGR99Bi)kmHgvXzF65Hv8QSscB0Sm()v$2h$3`Yu@@@~cZm z9fRa2MnH-iQouPz!v)inwM1!OKg3Ep?# z{d73g9yeTn9U>?st#1J3U0a6ZtNeQJ>sw8lqq^{HeAQ35(7yY@haQ5~+>vnowbx?h z>NQA7>4a8ENyhB2GaKYgrm|373qobEn!ed>t(D{4UpxivA)LT~snoqi%Ha9UlW%xFd&X~iZnzjw+tp?JS^BM(baxoIOn*(GIv7wxR*c3gQd)&T`_3Qz z*JIeReFtVp(>b8qS?)ON$USP0)ttEL?usW%&BrZ?S#L4vgT=geRVS z3Ojf1WSrpgsRW;;ca&+LpBu{u(M>}LmKg=2EJjcV0+1gtWJ$7&uCnO$iRb#v%UhlO z!H2hW{=VDa_4Bu!jWM1he*>gZSAu|QSqdtF)AXql4A_q1-XZl)&+;$u^s~?6`#C?* zxzD3$-n^Ny5O|u)O(aYP>9E$@3;*#5};3? zK0K=-(I{EMi8tK3_p7YqMmwtNx)?07krI+8p?04TX`}D1az*{9m z5cNS6-tw-`cd{5*%Szuqy>Zh`H=rARGhFni^O2R6gS+m$-5j`NEiLs~wtCI00Vo7D zudl2e`=bKtTUD3Zy&t*XSDmtUG3qm~+Lg3ssSot})eI|*E7g8g3fzUGH?)cAD^6%= zg;JpB#!h&umjvGr^X74H{&YGh`VFn@rY|-N-$H0^%-nD zpAr1{AQ<}D;3(fxob4O`t`%^cgidRMi1hTwF!QP_ z5gi-DZHsq7@0E{dn^rfkgE77*&Z?^rc*DDeX|F!1;Xm(!FjD^rpW}7^r&; z9F%?v9OmHkPK^cJ`)L^!4;|98cMwAnqj8V9cMLt*4y}0J;#zaJPPn`M&v#)!zkWFP zoU;ue_cHAZxpBVH$f&x4##c=(?$XmUaMwNe;$M&agU2*~hfyO(8Vk#Trcim}!HhEb zGEn&Rgx^@6Z6C7&Gb1AdJGqs2>y~ZUuzoXt8+l+<3HAg@IVeM9RHUg|W_qSNFc0Jk z=3!DHh>wpq$BW0t#c;<qI+Y)-S z+nv4}k?j0&bfasSthPt-_+Gi(Ehs8QB0Fe!co=f|ynXvN6c(2uA+Z@+wN649N31%( zUgr_`3gw;Fi|!yMVZXoo-h0@vVGHiQ^9}>b3OrwM8|!;t0?f8ab?PNXDJx5}W{Eg$ z`ZS{yy!^^*=+&z?TDE9zEG?FqRtIjQi7OeS;DxrF4+s8VwZDQCKq=O9=FY|QFT9H4 zq5=xkYz!XM7k&D+BQ2C*H;?d6OUq=R4MtIMAv$)5N1N8Ikdc`~e;~V&p0SxeFbYvX zkD95e=_oDcqTiqr3g~deM#Z9#!n}aHg7yT3(<;@Rd?JW(_8=>#5ZRe|*tR1Nh4i`5 zqIm?$3U?y8nUsV`WaJhiBRdrlA<IvEfV7}VrWnF>(?6t`VT@5n@V`KkEHeTx+eUuUJB;Qq@FDW_08EFbE23?dg@j@bkEIbHef2QI%gU?tAu+n z3UV{?@4sG)^!USY`*oKfZR2lv^zRSh@Bewpg`x;N^XeNo?Sv_07BzQ|YX!d!IJ)lo z8}Rf~PoXpS-`nYgKsI_^5S?IJxV}23zT0~Gic_0<3cf-qw`)HA>~q|!t9Plbef29(fcWeliCS-FGLtb&o`3M2;E# zhiXL4420*BVpc~klQlq7l7;!DM&aKhOF~Ex*Q|u0xVRL> z##-Q{E2csa78Xg`3nGkC6mbwyP^j|o7g9*;Ksc=sp(rXX;<+UBrJ;ZHr=dmCK_Pqi zjoWQY_8^~1NU3T6Fl1+CBQ-4xxjA_#D9Fe1mFf8Gi=|lb!!J1V>?3jBxn~;}=i*iI)4t1n|>}{ z`QU4ue#}^7S)jSV?*W??WD-_Iv{u}C*F8A&kV7zS+DTrw9-2Z`@@Ee%8>(JJ8YTTsuq`^KpL8~6O|eMV*`uD<3P%$@Ts-k&wwtjh?Tf+YpW zcR4-r?K|eaUsQfE_yc{qW8a$B?u~7CL3#7-cQE~wY54hlmg)Cbg_WxNkE!a{;tkR-gtkz_FCT`(;XrhUYtm?~JOm15!eN_~_#wanGIiVAibnaLh5skzSOhs^FBbs#uNjyKTqX^u5Md@BLBcdYwMtJ_4uH z1hA&=%)%3_As|<$3ki?Lp`*q#%ANRQ%|=HlU}s*jWD(zu!65_sAY;pV{Pe>|n11(_ z=-<~lkf&3pj`*C5+43Vpc#b_s3mi!}9o;iG@6RfO%BZkt@lVLj&Bx>^lg+ag?B>Cr zKZN5jy9&YVt&`YyrKQD;FXrP9L-Ae!i{Da?fVOVkj<|&8NQ~F~>%Pi}AlSUUL#x?a zcbK}DP)+2Ck!PK823~ybIn0?e2i?1MHFLAR%H^4};_2rLH|}|gwVSCBHlyzS&BHTK zJqvD=Lf{QE{=LiO)ZVMF&&IzW`wRN?Y>u4lRqU6c<(3zX^hq90pJRJD!dkRSd91$3 zuAosQ<+U2ajNY}DzzA&8gSk>!P*6|TE0d1T`T4R6$l6fCb4r}`31yms6m{rgVDidf zAC3Gs76)1*%oi#MQaRuq;e->hVBsPR zIqXoQd<1KN<1;7+mE8dK87q($} zWFHnDiT;Nij-H)67|q_)!ArqaE^lSA<;|yg@Sz9)VOG%h$aN-CnCx3_O*?gU%I(T===#*1`-Jex2=h=a!d;N?OvZn}dJpTl)zWxsM zK6DJac4$q;s)bli8k?^CSkEqX;UE6o|_6H~11PBRZ6B@1T*Vg719x zj(;#b**0IXVg--dehD+LnStas(KNerH%}-{_iiKUAWD4GPBh<3A9imuDS>KoW9y@a zpl^v)B$|>y)?&d>P7?(47v!=QD2@5XX#(XZ#l%b z3l-DcbFF~0K8Y_x2kc{|R8C~=D$L6zuPR07E?u!}cd1!KIeOFxqmWoUcyDv{D;GTP z5d!{JcnWmTX{P#Bwmm!0c{ahTzcvQ7sGZdLZ!Dx05HbFg^Ot#+>OHzA7bLGE(-R$dGPgz!n4e^)oRJ&d3fNz zvsoXLn#_j`NzBhB2#rWG@5JzA;tb(c!dt>p0?ow;%kQ0MDHqOnM^rhCjvb!=OY$=D z@$7eT)88J(#1Z`sEX~gngh`{gP96?3HVo3h7cAxJ{G2-L-c<%yIi2@bRq%y`iua1I z^2Tyo;_0VNHy=L#{Bv4ia!q~x!Kun*^R0kP$}BFz>{p(~@h6{(>v;x+)+@}s_6BZ$ zyc@UNb{BqIvED?t?cttQ$kM;6g26{6@q|CcX$ln>u=rT^`|s${y*=8u>jKUe7@-|P z`gd=0wX5Pg?;P5*kkk)kJt#Bibt$jBmo>no;YTef)jwnO2yrR0TJUJ% zl&}@`<1?okM8Wi~!L+mjP-!+(24>{$_*#*_71Iy-j`OxAuA132HFry>3{o~E4D-3T zuvy$5`Y|~YpMSl;ES$7;@O`acg}`??YWGF;*Y1v{uzxU6ztacmP=3`@9ZbE}^jyDE zR2E5{J&2uHVersl=*#P}6|1pl>tYN(@?`Yy<4hH3#c_0eOI&s1otSaq71*+QGk*PP zJ|4aGV*E8K5&ydTIwZz=-xqJ0mdV8R$1oD+tt~~LeEKP7oOK3bVxo;evjWLmI(Hbl zmz5xtBb4nM*I^#d1K71X8}oi%icyF4!?Is~Mr>jdwyj@={O}}995n(S~ZWu##MjdTfX1LmDq<39f2W-4nici zVr|{H2B{f^h-07rVaak#o-_g5HmpWWd<$$|vkbqi*o29br=WeySbX!<=h%{#N2P2G z`t|NE4TAojioca(7mNC&#Wbo z%vjQ;Kze>}h|h*!uOhq{? zJk$v1VlG59+O_5s%zOglnP^ff92mhtT(}rY0Z*Z=b}?;a=t~Vnp2CQGq@5@!?=>}H z+Jn{tzA-@bH;0P}HPsLk?Fc(@LMb2A1j^&d(qv5ktSh){LQ{6#yR{)D7hoIDMQPo- zjRV^j&Okr#{`kWa54>78oEp`Ri&e88p`1XAq&c2F!DmXN$xF))a1f!1HFW9L6H_mn zftOx<0WZGv3XUE-3LTT3MTe`F&c}J@ore{hb|JA@3-s+j5T~DW5pMm*(|GCeA5mDM zg_2^t=2GP!%dEF--HcVM*I?-3hgF%zdjyBMWb97G%H=DuY0E}z;6Xv9EYEw-|ApI^ z#^Rp4?&9L9ApAVDzF_g+tMy0>P2mgKOQkJnZzFxSL z%FA!Ke&%I3_rgoDaOo;+TJbZLvcvJ~>Mbs&>L_pC@lf$rqiI5!>g~q zX114#@jJ}BBEG+O!8AorJYhQSyZ0}+;f5P=-8ENZ_>jRS`CE_Pi-Em5nS9;N)&9b+ zDxPSxItqb1Jd>8`r4R@&p24vMIc9FpZ|=`E#hW{fK;Db;ZFke+(w`Rpf$m*mDZ{sM z3XWR_`B8PV!dT_kuar9kG(}fJg^Cg{EjF+WP{JTO z%^C!)9|)qkzf5ZnWCh^XMQvYH-pWflmddHozixu$ft6Qnjg%q~78-;P-kgQIAAX;` zEQD(Z#Q&&x(Eov%mY`zbSoECZ(M7+(Av`6OYl0FJB5};r3Hao*uV_-|CP>CrHcg=k z19gRgdbJezR|$15EtJu?u5x|G1X;Pb2R1N*EAEtA?{ z*9ULm-`xh`stZoz7Q{%Ta*E-l|2}}LZoLxmbT*Hme@3-IoFdsvP9EKKL2cY0< zB>&b+OdapIATUdTPr(YWBcbMV*OZ$PWW7<@kaExbPaD@;Fj zyaCt0Ya87B+~>IXytB+gyrlL+@zm{i;i6Nt*s%zi>*iwh`mN~GE*2lY|2Xb>aScWf z>_V%?sd(ne=a9#(SSOr19ml@#B(A>hX7uga3Rj*y370?jCT5;{ipgW-;J)Z}=n**P z*rTaXv@q|ky5SZaF>DaC$U;#`G;aFq6S(u%OHsnn-OTeRqs_?EaQhW!vts#J_T4-z z{cSl0bZcicF5%7(6q|hXr+?qR_{(4aiaTz*4aXmQEQSspTK=sjHxeSHT2vYqjITa= z7gJ6;8xK778cvuz+Klc*PYoqlWuTbb^>o^15Y5jqbgpBR016+KRQIB}e9l#uun$DY zNe-Rmcnft^H!~_G-f9D14urBY(?GW&l+D{|o##SYnwlLPNYF~KOL(*GXblMFNM3!O zdVx_a!XxB^Bn|i7a4q^wy#SY-GlA1`#l|{i#v#Gs1}|z}&FvSL(Z>M7*kOq)!VKo_ zgEEfZ_t3H+XF{5`km8{{ZJ!pt?eX3+gYThmFXOJO-C4O}z)Nz*4`VBemmgKF<_n@&CP&{tpK_gS~02UoL? z8+8Z<4jziA?kD1!^QR+%^e4_lo;md7SI1% zr|d;!{Nzl?EflBQgWYL6F^cAQDFmv+gek|OZ-+R%@Z5j7Li=C%=Oh0@_qJM;C`MX} zob)t|8|kcT(7>n9VN;Qonu^%wt#Q(6H(<=PGw{&E|H3C<{)iJ!I~`ryCz*ZJkyNI{ zx3V&K;otB5fssQFHAOF?3ARhG{!|D~z~+sc%mSffFS-eX4;f&-YtdY8ehfyB8D^df zii!~#*9wOX>}Q^}qoY@k)|J?fOtkVp?l>2IRp&T5c{y^Dlq6vOf<J^N%XM0J>V1x-~7s9rwW5%8YkTs;58Blzq658>C9 z8x8G=)~rIXb?xu?`-Au4-~abIip-$dsiz8AGd%fUJ;As0@hXqE;7>c{<>k|@HRy@3 zIm=98O~Gh{Cl{qjI4j6oU@Rk*kj`@GXo&0kx%MQQ0-S=D{Vysi0>MSA=xB&mODg@r z^hu!Qp^>qX^i&v5hh>gO!ypQM(Up7;Cm}y#Ef4KjjWQ@Q-V_&9Wcjxr%xAqK#BW$e6`$_QWGdbEymh zA?A~xmq+FohD)ybGY%cO5DON}!~M5ihHWi};luyjht4!%mJ}8m^JQtFnEFVXM7c7P z@MO-^$f$tsVS@=nK?|=+G}Yo-EF^^_yj&LS;p*xFGYT?ssWjx1*(;Sl!IM^$zd}k% z3NF0(GQ2nYW1PZ0?8$9f8zybr%OBW^W2{NU>5h#napRd|F#fblaMM*6BaRGR3}1>} zLkb!(N^pe7By!u@Kn$C3Jho-z zvjmdCDQP566f| zbsRRW-GC21_!htXw$7ORleob6$fHJM*x;@d`k_eQu@kSo@dXYaGZcks+wsnkUZyO)88fx)FB4cM;ld;10ZczsdgYE62n*c z{;N;9NNy31oiutcFxtwbflP~}*67dm1cweD%u#GNjy&keOdm`Sc9KZgcnT;BOw3K+ zW&T^b?01YCH;#>s9CCCLPj}2xV#G~CLL&?_708?nS|uJm3-5mYCU;I; ziAfX3qjTF<2$8usGK}%GU@MO0b~QWeBlBs?md&wg^>RG@{M%f3I1V$fzXczD`VEF< zKaZcd-XM(r+9SjFQ1BNaii_}u;Crmzw23(e86ml2^BU~jv6ZJ5a;qY3weG9uRa>J! zxNxVuL&BoUai`~Smd_2fs*2x1Z6E=c#L=VHY<%eXypn4+c(fbG*iIiq?t>Wi;$2b~2E390(5|>?m zC7yozS!^WTC=Z*r$&cyX;h9842O)7upVMdcE+Zq8qjk{_6(I_IEhg1Uf39z3Jy{>p z-ly+=NTv8@bnev+C!cTxVv4hI;?xWA{^vh2PkJ1rpOCEVTomUQpok-Vc`wZ1BECze zoQSI)nT27ahvC?xMqvHId6;y>nV7SXRt27Ckei-`mu`3%GcLLZ*WdFJI`c^HZXH|V z)*Bzf(=U9C-MK}yEJRZw*oZstcodgi_b*e1cCp2{@Prw7^r`2$xHi)Cw;(w(q5@y8 z*toF-;lun`yWrp`;xLQ9O&p-;e1#^Zz|cn1pE|`rgRm4-w}zTC^gl=$kdu&L{JQvC zTz3A+nE!hQ9{#_#(6t@szX_{cNScl)#lT@i4x^TUw~K=+&sFio2*vR9%gV~cu3c$n zwZBs8yE=SrLR3sF9=Y!lOg?%nCJpP$(a#4o&tG6bM$l*b;Bi;ut#@amb7B})EM16) zZ@mypT93oJb(_&CJ{+0bzU6*s7o%l)N(%DKR74zC+JE!beb_}tdB$-QvF?w>cK%$%pax=RUy`FT9C8#rasdVmU{4oDZh8r3HN>Byk~TAllNsY_*~b(UfSG zGR!pa>ZUyPaGI;FVwh6P}eH(MQH(<)3IH;8KpLeOIbur0Y4p8VUN(5rhp_MKcL zCj{gCr{?3#)25&`%l6URAL02o=HlD=Z(-!%&Io4tju~K zrzd+~M#UkBmXZA294h3wd*eqSnnDu>>Hq^FcQMof+&TlPkzDzR&97X4zZRz)H<^c&aHPSndod%iA~HVnU> zQJf3qB1OZ<{DZSsUShUcE(5e%60o+CNe-K9-5v|d?dnYt&Hvm0*^`Z5HTcZfJgeJ;FE)ML|rw;}nHWG6` z{}3O3{0Ww?-+>1oc^tzJ?Z+Jig@{QSfF8YjbDLf)I&kf6OuX zZQ*y^rScNqo%KFGc=t`*{LDKTLhDiAuAPkHXhzmvq1FFO;X%r*tUBF$_QPXO-pN9Q zWAXgCc=_Xnc;nTV(W#9ba+SEYx30#GSDud#7j2>At_g??DW*?`i;;muj2=FO>u2O| zzoKslKkNutvvw_UypX$TR$`F^B`{}c2<~MWf1fXkk($^$z zO+97$iD=!b6<&PdMSS$>7x-=2GHl+m6=}QEc$y&R=h?Sa4_Wk77p2v+wJI!F^b_*4 zx8q38uWLtuoD`e23MAxU&f%AGH^L9|Uc^@`%g`gGt?@n@&h-T&$B)Lb;|H2)fMS~9 zznJ|Yl6noNyYa4Og?|jociD`|NNL}KhaK(4ZYuH{IGynRhjY<;@IZ_j+5GvG8asZ`qUxB2m%!ZOLKE6-TKlIjl2{% zp}b+#FP*+MvGSM zaOI7EF3)}9)RWDt0Mg1VhJ8GS5oi zt?ytw{PvY`XPRd*>JXm(Hs!=KF!}iD#(|8EqBU_jlJ44P zFn{LJ{0Zh%!Eq;_>U?M0hDXKW*b}CeKL>OAW6FunE(X=0M(xhFQ#m_z?0}O`I+3Hq z>&<$RF?33$_YT7~SyuALA%JE{ZSmTwm1N=?DJ#13UUzEOmaUU@>j)}y*Kq!r?P@HJ z+VQjZUc^x+&M<|0^W$&OuSZAXf(ouy+_4>&E}D-QXMKZpJ2H^cD#65?IO=ema{CiF zZ_No9+P^zv$(p0JHVzJ@@V)>32e|&)nK)|l6zojhh3wpHtX{qxow^L9Al0d+n?J9gq8`&MYzz8%-r^uRI49fwap{Su#k@-=47{*-v$gI3&`)4NZ1 z^z6|Cu{_08M+uA8q)=*Mp#-k| zT(fxO?&Qdyg?8vsH%w~m8Kf;UrJ!#rlv4Ab{3mdoj;wGb?RjN?H~Wb|Gu7sVZ(=#A^T@7uw@J5 z@wc73xfX@`Dg^82#P=0p|Gn2@LAl>eqG{fSHr&G}Rlu2cQ@acI4h=0>un-}K^yO&2 z84f$*Xng;RX#BY2j-g+ODQ4TF*378wWuejbyW*l^xS zEFQe?3ga`#I1{3;00r$ZP;Io6nUY!(Rl7{e*=zt)M=mlh9O<<@f#ayYL zzB`=@X>!bnAc-c%7?)1~buMRxxw=ZtgF1kI-a3@|X1Zt1;oFpgudr_YdbFmgUx%A0 zI33X3*6NQ0r;~#N#@5RM6(hHJw&rb9CZ$`Fn<8&{DJ8XTaNl;x?&+Z&)UV6a{<3aL_Gp2KEX*VOd>2HEGD7r;L$i`>Ui|**@yGrqtKSSa~$PjZ-F6&uzT08eivW6H4156FvI6^rFcFw4hczAf1kj z{`n8l?{rNcNgh?26#3&q2$5~~kPQ_vUQ*iRR z_u-aX&O`TR+&-93Wq_72jlPR=((vBPFW~Hp&c_|M@?_L-(g05^%-OjPg|vc1a!O1J zfvhY>1rfpXo`o{BPZf+W_?@p}xb?Al*TMK^-fH}5+E}KIKoFg^73E}N3l|_J#%cSr zqC3J}9Y}4aeq^RW1jIy;oq(*FptX}nJ9~0+xfR)^6!50@!SqL!z3~V2Ps@KAe{J@E z4eCuPunniiOHWVdC}N{Aacdv{g82)u{EubWz^y!TI5KSbNKBkG$($3AM6;rkHZaKY zMm*~Dfe5B^yM&+SO1q?Vrdwm3DCmr`1CfVdxYxn`-on>A_xHZ;ZG88uXIlHs9ftEx zc^JNBqxigfOY0ta-|y~U{ks zfQ3KK#d&8>N67Hw@#9kuVF;%Wyjiz|W6`-IynJIo?V%k3R=GDZn>FJBKWX@O&Uajo z@+MB3c08tVTX6U8-MAOOnX&XJp7JoX#roJ#M61M{?-$Uuah|bAv9gYQ1C=^43n#~sE+kjeOU$1bGq z%0hHf9Cp&v=YOAm4Ih2;2gXbnR9V63@m{? zrQC|Rlkv-tz^Rl#1u0gIHCS0<%dclc|D?C3PiFTud0dS1Z*1PW)9kzb zY2o*ne%hIsJb5Z)A&85M#ji`3;oiSJNHca2{`K%bxzB%)Y4kt{kJ_WGG6;^@T+DO_ zw>c%ZaZI>k0`*C+-a1Xg$6aN3t@!3yw~8+n%F@6A190v2*K^+b6O0)>ip(_L2pVe@ zsSJiYJbT~papxLsQowxs+55QeuTSIO&vQ}afbR53(GE}Ce+w?V`aH86Ve%-tY^JBa z@4opTWm!9(pD_S^`VBD7r!Kzu0(9uwAM+Q_FiL^sl_#z&4lQncU6oEjSM>)?FKCgX z+9fqL6Sv=b1ICR#!uT$*Fr*A9SbR%)k+3utEGY{77se63+9@F$p(adIvgmYL6p&w?rz-R>JaVCkkCDn!IJD z0McFCq=Rwnv}1Am+Vwbb@(i4Q)^W(*m5LZ{`5ixb7}jl0V||?Z?UC?z-8&WlJrEQX z0p6KgDFUo9tu007P$sijncoifVClo+!O(|`-t4 zJhbLG9Ch>wJm$6y-v4?r=FFYT{pE+7mEyLswQ~H}W2WMjS6{)OF1Q4re9V>fNlAM# z_J)X58U4h#a_LTX#p<>6b1=wNOqsXpPm8s9Y=}6jZJ8>oo4%|kgIGLfWc{cBeUw`oD9!F(U zP#-vV#yL1*>=-mpbk;36Rskmu_cV&{IL2~>XnMwOtoUO!jvPOU^XpOqxZqXmTU1IF zl%a$N6CHWXQCRpxM=V&h0(k|+RCs#fy4$b8zLXwpCHf-}*7$YCf8k0q^y zV8GBJSi1N%{J7wEb#*aZ4Iwp$cmdg3xzksB6ZRsshCnM4g zF%^*%9ws*LhbNHIwguNx@GKAxY_7laCXAeTBz~u7$@mr{Fmd8gW5LPHd zcFj86e*4X6o1ARo1d;%>D?w)0Km=R%)$7*czWX1-y?5P79|dj7>r|6Ggp-=qw>D}0 zEvr?8{Ec7N@oXDx*tiKPojT&(_h#Y56OLyaXkk^w+K0L>M}5J!R=Awq+i}^c$Kj=~ zaxj1C2ROW6S7-fz%0XyW(UjmQR7KlbtS5zK$^GHs*1?bnNqHv~86qDHEZbLeL0ZTOmuCPK#8X-5cz09IeYm zv3QUcCYr;QxJ^>)5ag+|n4V4*Wa1CybX7Qig>>v?oukPrqQ$z3pKWX1*Pk6lgWs!SCI*NQQ&g}R4zS=ss=8Ai{J>E#=Ks4rzz$!0| zgrFF+1dC@--7G$A9-eXb%d<-^}`#+@T zm0;J-bhPI*!N1>HhL1n~96Qo>8~i-;0=M$fsZjgYExalOs$XsEUR@f|QQ8V}@w;MK zIu{y};`ENf)U>G-dMXCyj3M4?9=@PJBYBn8C2$P{Iv^7|_^+6U8ad|@5J%)Uf>%Hf zhbEDbr4c&QO2`+|J+Zz^2+LKoBFRw}7c20`I8+{T&FO_afJuUy8H;z71UaTXL|eWbS|1TxnKH^-V>B_J#;oX%c`;?39J#>vx8MEkbw zXiX>G$Qz80pf>UrEKWoNx);;0iXObd3FJH5w(4WLZ{4mFrt+sC=H$=_9}Lm3Ui23+ ze9SoWU4JC-bj0>$SGhTraDBD(ULHAsHxh!va|oUK_!T6i88okc)Nvz_)Vd>En)NdZ z0EIK_XL=W3Xqw(^wbaM*p!QZDbXwQoMz7pP+Bybt*Mx(!hA_X1;Zy4RcfebC^;1WOS0n|;7?bMHMh0_sg;Rl_UD zEcXXjfIs`xe{k*9ml{DS(qI1b4@f!X3M^Z*iLrG2sI+Qe`7b}x*~pFf<&U-I*}@q# zXb_$9B;&j9zBBLj$9^k+;dWyPy#^h}!c_XzYt~@xx^;&6%iO4PTLr??JJW0J1&U;&k=12b5H&@wB8T5`n!V!&_lqQ}lu7M{-h+ml2X&1rbJ=5!%(M@j3 zQTM`+ej%apoQCMeqsJ3CZQwXCQcU-z)w8$YjyrGPy}heR8gFnad*`%$Wih;e6J89S z9b}nEw*T3yrqcAY&-QB5Rep8aLDP(t$8)te%c1sGT$_(=H~U=KRp}aPsJ5lRuZpn3 z+PafXcc;JnyclEXtYp%}iLRtU=-9P8N`8C~3x1h_L4CR!1|@Uc|2}vHsre;HN$FtX z3Wh=)nZb}l2O*VaP8)<&@w$pGVv+20C@a`({Hm^MP}@+eR#FYYs>-)|-&L!MGC0-u%_3B;>I4U;N@H>9bVPsP*tGUq zRldGpDrX&o8^RI2Gcv0RlG6qha}^O?^_ENBbIv;-8)>alos8Mn)XjgqsAEe=iY9Kp z`4&u=FdpN_jYk(+g6zUPo3ZMypJ>ve-d8QxEuwU zzQ}vlSEGNeYBHmCMK*Ey&AfNg2XXpAfb`F_8($m1jBL)laK%<5G}|bfdCp_#bk%20#~Uv(PrfAPS7UO}hUiDncdCTRfKIsVvWw{0WsKQV-KtfRVZfFs z42gWpUl(*fP7vx9{BR%dTZujr%*LagST^O-xj^BWt2i ztW4P0^zN{9FNEuxldJQ_{jdE}k*D)jX^Km93^Pg7Lx{Pjuk7A5H{ry@L}M(S*!<4v z9GPDw#NEqV84UZ)mM@ye{tg^42#-AS1g^UBYFu%}l|0mE68iDX2rVX!By7dC@ojot z4IWq?2d5G|-s73L6b2gYhlFskoFy=QsH5Pmymge-K47b=o;#k}v}uF%oE)U?*hF5^ znG%8cq_QU53QuCA>f?}?L%Ja5`JJLbz2J9ERX8-Q(Yo^07h|_)V$(F`k7UMWis4Kj zD4w!XFnRI4<3LQ5?Z7t{00WgEsjQUKnAQ`R@;Q7#P^G~*Q;JM4OM%icjwvf2_b_k$ zGyN_&pYIE>ZQBmCXwkwrZW4&ff`bnShOc-|o4fM%6{()y8(bO0RAi`kRvCAvfr=Q; zT~TgpN4=;fEhh4MvU};X*Qz z$WEa{TMnm7)tM8I$E}y2ZT2`1`FP7iarE9^b?*!9_KPQM&HaXR#dmEM z>nL9bKfzps(hD!X{0_3|MNdkB-rG9ax>R*l89R39z=f`N;Gzr9$CqFJ5ASd{&E0q3 zO?V*~J$eLJ#P>&6E}l)I!Wc&v@ul24BS7Wk3nNY`G!k|3imHlLFW*^p zwrRa&fFXj%3cnFt002M$Nklq8u7tu`_RC4bQrKrr|B z2ZiXI0(dY3#!u_AaQVgOqJ>$TB_%T$8EM<`!ZS}JxK7L)YglA(!Aeqz4%a{E@h_Z|6b93^{37t9g zx>>-5r<#&0C@Q4ioh|f~nU7`(&ABMH1e-Qzb1)fa7RDA9@*uw87_-22^QJTm7}x_x z9!Up%6UUo@rUn#7StTv_YaOk?R^aaft`yiJs!`XkTW`$pEve-yNK<0%&3(7t{c#kx zMgQ{qO0!ye&i_8g#LCYI)v zn3!m`0)_Cv6A6Imm?#oH88e?{>dU9V&d)2LmRLaWsydA{><}d^Yd$@X*=(ixNf`;} zVzsEKNQ|L{AfB*4{peHt{PVAvyI?7HZGITTM-9T*(ZkWNe{b&Xj^l_i8EehoxpRqu1&6(#6!A)d|&cidY^^1<8sdp|9;*(LB9QLNlD($J7RU2dymM z`%O)sQc_Y-!1mp=X#)?raV$K7Wy@L}xT2FJ`pcMh%E>rl+!6Tcr(f{xH{W5^npJpv z)+gAyJ`-JgH$x{n(P^99&N#11>72p?fjSskO&|@GlHs+bWg#IU!E^;*^%U+@M*FKy zIRh0}77H;eJEEy6T*LI4xH6o3#u1$xk>o;QNn5-HwUh#N3I6urKXAm+^lHgBhTo7^)JO273q7kH7A;@|o({|YSXP#UIp6$<^fd0s>C_F`IXVn3jvh9f)7l%u^&IpPk&$O) zNsAUdw5u~Iqb2-h+ zI-)kvK^;{LTBfgc>(*iEufK62=kHkf(^4${X%qVQ??A#U!p5x`=-8Il6gpYS%F4$P zR9;%QOrT;DYpg77+q6Y1ZrQW&T2SGNWE;ucE`>@AP5ZI+os?*_b}a^#pvtF^*FwtF zT|15PA!buVjapj0f=*ae$JL^|O`A7k<*H5Cwlhm$c;X3Uc-=vk(|w7y8;(*igSZfYDvXDWX(mBllspM+nQEafSE?TNoIGvd-WG3X*{ z`6jT_ar{W-DU5APX2|lY40*Y^^uU*D=xpW6)mXP-Jr&DbGKL7`(m9TpPY?<*s9#SU zKA1=8Qn{4opP7|`6)RRD>w_fjm2GDp1^Lu{Me7FS(JXTBCMdMW_0z5ujcKk?{vH zj@h{)!X}WIi@8M{bux_QlPww3-hsV#ZqAGnRE-D;>005Ah$smoW2KrVhLpt*OmaFXx9n94xW?dHL;e;z=jd zN#9Xu!;@E!IeLmII}>>g!e4!bI#UW{{t`n?r6vwAM_Ri%|uu|-{(`ECfExS0kxf{PM+QJ0OP2NCHUPI9-B@7$Zi>r!RrSrD| z<0l@1-W2$U^lOe0(a8pmS)9qvds@N(6moy}>Q(D#+FyhfYtpcGIW0B#8BhP;GdTOq z8Hl6>L3Ot6&yO_ZbmUmhM{~PtTrBaCf$h7}v1I;S!fnr=Z~}DgHw3ZK z5o8ouSf9(i>swZu_xf`IeP;CP(VI*|!atv;$OKF~sU=N^+wl9c-?4q$PQEYZ$RZQF zsW7fwwi6vwn$x_Qz>c4YX!_#6a^_4jc;zQ%X)a#>)VAJUf!5&V{w{~|7ahn;<(X%m zLHrg_Kn%ufuRMTZBL=fQd$I3@v(Im5-(}pONKzq9#AMF0l3@==a_rA$3P-TWdX2CU zV<+u8LYCZt3|{#-qiRi?k;#jZi&cxU6RJ`xj)JB^#qcHI^+t;dO$Gw)|iB>p0(s2AqvV*cm%V&)MK zaw~Y81HLli^i=@YdzDk|X6Nb+UfJ(ut#zj-&wIra{}o^V^2@I<{)l0?=bl>;AFER> zWvZ8zQ}jTGQrw;?i=%KBtOc8DKcpwkN83c4(!e64oo zCqq+wHm^t*S`Zr>n@&Yw0-k>M|M1K+|K-}JF~%}sac9c9pZ-)x>pCcaG_(Yob&P35 z*fBv3=-hSh-5ZylIS7Bg^In{L&E4qJxvdj}$h5%{&bDpa(TQf*mNd~QsIzJDmwq#`$%gFe@{Ni)G`u67-Gh!f~eCl)z=GubJDIJZ{AXB)`c6j%l_sTm&ZOS78 zDG#pu|B*>ZA-L`KTQGUbK(uWW$EgyoOeXb;RK+yJK>0u-m$iaaO(NEmNC6d!0(KsmRo(wkqG_t@s*=vvcS1w7Q}~ExzFhDI|f) zM<#B)?H=Zv%;S0Krni%FPiNEe6;AqH&b1@-CP>-}3Q055fB77kC~XAWvU&4(;}>OjMz)#OYQf{8jS!+$ z%(Mmj$!_)wEgEanx;be$gaHk^`D8KkTi(u!uL z@n~_2iU>8PVhQ*#x++$m(jY00(~KdsgcZ{zvqraJ5gb5-M$x=nMqyY=M@1WG?%u>h zls?7gEunbxm4|6f>)~G;3s3CQT9nv#*s-|j!|!p;pQan7AZ_PnJpbt`eE+u-Xc=9E zDJTC2*I#lLl4$PENZpF3KLwsSw>J`FweH5TSooLe-m$DulVBIW)Ko{8uAR+^k&3DB zY7gPt#`Ww6Ib1;a%P%W%>a@c+^|_lTL}nYoD&@&Z=g1JXovev^r+->wA|;F9O>p3H zAs`2kvJQw3F|vWhl9b2E=k%r+W{8A%5ETn8_B1j`)V*tLuPdB~S6P8 zpG$9MUwu6nJ*h-E^FU6ktD%{o9CH@FeNjPdY(N(g=$mIeHMnVf3yW82mswnRGHcfR z_~esMv2x`K{IqyG{&wG0c<1$}=t8#>nvu~<;c*Xomcu1k3QRr%T+JXdVu@HLC#^b> z+^KG*wd9}w{0Hv3=YCv$)rAyL+eq*{HIAC8g9>T0Two&{8p0MnnK{9P6Sq;3(Fo!c z0)n)A!-1_V)QMzyk=ehDSGs>Uok(ZR7Z-AFlS)Sfe<~B3!&NQ^?}DPzs;4*|M9-S5 zi=k3N24D)n8ky)$VKhEchl+^`LR=I_Pt@`eRDez$+Th0P&cF*VzRksZT*pBMDjZro zR0oa1nc6Ue)+0I4y6O6BXo5~QbQi{vll;gVCQ%)rj4QaJNhuMpz3~RlKlfaW8Z#b$ zI{#eU%Y}z6TF^4U=@6X`=zYC}Lw`=w3VTh#9}EcRqHQfQn>b+-M(`xnovAx1JbuCI zwQKRsobCANlW)0pBc6+Wb2xRg8=Kbd#)L_|cu*l}o1?gGoHq^%Ei z#MVqIr-j@xREmwea?vF*9w}Yhu@4ku>GE~xnB1C+Op|Ga%%t+1Mj_6*Xj+K0W=R&r zqS8YAv0@W_D8wSALu)lIvdOPDZB0dUGWkSK+v(WncrJ8PA4$)mLe9l%kxUyxx?l1$ zk69`!M8838@!Rq=F7oSy&MED2@#Qyi5!z_OXKdf8svqdP1bS#x0($qP74NQ#ur2j3 zXdWMpHOo15FyJ`Ozo#I#Os7W9!v-Eomz0o#%^TO6Cg?%oEsB(4QL!-AwvN+xZGYUC zhdS|WzticrQ%?DR_O1i4$|Cts8X*aR&;lfs&_b`$i!=dI6crT@v7DUcEN44=SM2qV z4Mop(_AVAg0YyLsL9u|+dnbXAUjM(}?Azqy<-M1emzM&^!prx4TW5D?XJ=<;XU~>~ z*i%38x(2ap1ZJZ=iW3P8w$B2W>Al9=vJsl4DM3s}4TWSC(z`Fqj+vQT9Wr5!05;MT8F% zy-x5b7>U?Ubea`kIN584uCgI%0<~0}<$Ok3W7~{rBO>C!1nxy0!4yKWoCb-uH&>jl zxG--qgX14`ra&7LSj9=P8#J=-xb1Ga5*7)gpy^7EjbeDVCes$k9-2JVgYPr6Pbq5_;Hfv}L;@d3X>xneXN; zTjacR&ymajb(Kt*FhK?n9*8~aX1ao~$*e*c5runq+i+!aBkYZH;?aB<(E+1PXP@pc zt3MD`MQwzc`64}$dC}s<^7WjrMB4K2gMt1%jD;!No-sW2yh+66|OYsJhqR9`1dD0BVF|b z>Vc4RLa~nd7Wa?!bUfL9YzKuGuqqsYlTW(sJ9fyt`Saw%4?k2zR4NWyaj8bYANMje z4`gg`-^Qxmr>&23+*Bt-wv9Z&U_|VO=u(Fzsa2BxxP{2A!b|@-70hqAGPTrw$%DQK zwaz)oar_(vlb;X9mn$+#4tT7LX;~&2X-1@CF~f49q{eHSxtQTy3d%HN)z3+ncOXQP zj=^PNdoc^*cPjD{rf^L5ln!tq!}|}YU&3$VcsL|v)M(Fyaxmc}z8u74P=F)M+=|MB z#G)=16t%&;GcEP9=zO(_MsWRMAG$`Xv~AxBmqm+gfL=l8)=lJxxgW?s$Mk@9e2hdl zYc7+<_JET2S~;d;OIiKdSd#}68GjRB)4-2Y4DD>BrmTjfafh}6 zQ+sl@JWpYj;TLqKfIynY55Y0QHnQZ$MY#NEooa1s7#kl0i%J?P+cwC%Z$2jv&U{ar zHNXlMVcLl<1WCod?_2M@BbQ!sF}4`)k`W_DY8x0X)^v<|xc;~|o}x|ZXwD=a-)%lE znm3o#5D&M+(>bBo$%OIjAYiTOGI`&-B9=h{i|Do>gA@j_?7I!KPg_2}EpwEPJ?gJ! z&y@!syjhZx>R~G&6-dRiuXb>-qWl>+n3l>kW^^!whFr9XOVZ#w6r+krOC%*{OwpO) zh%o?x+#1Nt@P(q_Og_rf@e__EGR%|=nDC6`ncaO{Z5ueQXDF*@(Y2lhRZz&@tj*^vYH_OPLt>l5} zAIXESUmy^UNKE5q(y7N#`Fz1oGUdd-&44q6*Hcm)Tud8w6CUVlrC&{SeM#0qh1o<2i*ZZD&Me_GPBhA~yOZQG~ zrDxA>(xpo$oZ#vrq__2PB7hQ@IyjvKZ82mHhDulID4R1sV9**WDEtE&V&OHQDLf_? zcK0D6Wqflfm8l0s`byiBVo}0?cpzxJj+@dbQzLzt_5fqREdu7Fa~sNGGpMc!=q3Fa z$Tzwo5=VUn6bc(Y$iy%MIKoC96wV!K2hw4fDoW}@x>!?Or7#6{V7CI{eDu-BXxNt5&U&#~*)6UYPkTZXA3_ zF1Z*d6Z8m)5#Ls31KL4Q4}^t3>o^b?W(kw^=hGkw_VXIV$%gftbTYT?VB^{J_S-$& zEzc8P*!VT`$+oCD3_*F?i2Y^pnC-AtsmoBf9uEYwz)XN?0a$kC`;pU zsBpY6ovVBs*M8eH*57{Ha7~XathCXIKTPAE&ZhVEbgD>aA=!An~X2q{7WW_JPtFmw^ zPJB@(h)Uflc#npj2c;_ear7_cz+TzDbtg_2ZI{$d>rhZG8mYC690{ofZY<3J-5=Pg zmDJ|e8YCszIQ8(^7hlTjZ@vzFn>g5i>8a=TBQaKYZrv>n>S6nk^upCMn6}Z@lmxKd zjW5^a{3P`-JxCuih@9N1?2b#UK>w(0$yL9J%RVCxAPRmwYMh2Gq)41Bh=iU9$)s^y z?VF>)Bvi(_rHd|d6jCF{)6goU=}Ffj1uqBsEugG);TSOIAcY#^uirP+xsdCP&67o1KlkFD{z^QEd%&OFP(+F9P$aPpVk8yX?e)N2Mnben z1`0wKMMgz9!U0a3oNz5XRoqXuotZEK9)&K1X4z(v=Nlac942QocodTnpKU;n9Zz7l znM7DyX2mulvtaV)AOo+>BH-wcA0-dyGXopaZ~$rBGCc_pQh>;q6Cl7Dh-HZb8JSrU z2a$ZBj%9G)e=Ckf4(QjnI2&jC7JCNpqJ4vOwmt!5QA3ir`niTcQ(iE<^{4UY#6V0n zb#%scj%!<{!CU)f`nKfD^+qIzbH=s#y60)XT`7wnjPJxBD{Zv-6i3t-jz7{M|N2Su z;H(x_TY7izDz{C2K$>R!C8rMRDXm-L)C(qAh~*@?b5A+0#dMkZpBE*q+c4>t+`70} zR*@r|>BVx#Lr-y=(2df^5A5s*n|o(}y796jgi z9c6KDV5_^D-}O!i?F4M-jf7x}5cG)KuQ1pkxBue?soQ&q+;;t0xW1hkC`pj*RASe5(C#r7#7o6%n0`0mVUg-hPMt_l?(abgsR; z`r2zUX7m^tJAR^!byR;Z%v3I1v0LI_}NU4~}K%J4AIliEBiL8Gj`ZR!X1) zf_c#w|Hzauy=G(vguXMJOaV9+BQa24LW{T@5Ev*dAA>`Uz#wiu_GjzYtuHBiHo#)p zR!I!-Gzm*$8}LZ!9@wv-9R@nW8!Tu0C0O|#&4(yO;%UaD^F8pkDd}$~t}{PdPFZ<- zg2O;qI%QdTI`gt|oqo*G`K{1%jFby+0UbosbnQBHk>ii4BQL!6j@frvX!rL`PfBqMt48ks3W zk8euOIM&_Wx((~)Cs<|V#bkd%0{r8$719p(+Z@xYhctxrn6#Zb6=Wn1988rz|J(%K zt#FCMQC{x<(;RdNl%AE~*~~$Ybd+OJ!REn#{ieYnz7v*09D8orxs%jwT34ADj(;jY z({43&K-8ETDwB)t%6Y%6lN)lWQvf;&28>KhDB^^N<75?1CqVtlMaMv|&@SvJm4HIY z+teHM3-et*BC7ON+kx{3VKa1;5c5t=)5f{TKSmli?Jg;)D^%Zw`Yg8nS)1x|mBtjX zLY6V${@P`iUIv2_y=D4SPsz)#z9z?w90_AGEfC8XAzJ?PFY@w#UXXuYdWC#FcfJf7 zG(a_ci7<3dk(PP{_=k;7yYj(ywlgzv9+*mJR5&8+)}Q4$2fU~9-NS8KGFO{7ZIvEf zJ4qspClPswfYf%SQVZr&AhJO~HdieuLjCc}kMhQ>Wis`mNz$=ZeQ0FE-U5zTsRMR$ z&@*59ao>{MterHuDlrIb@?|)HS7cXx5DXZk)~THkV07q!M{RyLe%1O4##56WPB@ZdApBGe=F%;rt{n8>GX5YSFu&co_mPz zZ;JEbREBvu#`a zj&M$}_L~zW#}a8j6+1uw;tRR{rrU55Bweof*CZJXx_-tPr@_`lS8TQ7^cH9p9rK`a zHm#q2GV2TZaP%!f|xFHp8?FBnerNE>Q6^5=D_gDZtUg z0xY;l%fb*p3R@1eS(1qb81Kx9!qL_2tZaYOliK zW`Bi~7MTI(z)_Q>B!A@SYM|4J7Rc?%IqOlLaOSyX>n`cqu^o=P;V28XCb-XUm}RsD z7!ZsLb({o4BSLUqJ5`=|e2z@M;B*~GYP{X2CVdaLl3a zxJSwH#~(#=NTCCa1R#_OI3_@e0S76U5gf!|ix8E}3*OLQTwT!L?7!jN<{6H7&@yHL zwXvtyB)$o33vAK+0$~P~U;vCg`*4H|c*nEhWjU<_>*t5lI%u7}NA@R<_tfPJDGRU4 zm)oJSiBGmY4-F~%t0HMV)yY?zI?G}_ioK2Up(3Q>orTy_-VF1VzfJ4y=_xI@5Q`ac z4+%Y`v9KKy<|kg-BCKRCra`>i^7u?SfA2vV)T@gItBnv>#L*MZmbaRBljLr_adO^{ z$3qJvJe=eYfyb5J!pcu=f7h6xKYu)q{_cAp%Ds2rAtQz#tF}70rL1;TTzNPjEF(G` z6DuZH7zY47FrlVwesZC~Cj$6iVS_C}9AVqMdbPas@nSjt#9^`lR$gbmu~;5^8U4%KazLPogm*SXdj#}v*f6_4NcT5M^C&HqM ziy_tlgpU7+iJZT5(M2npg|v;b50VPfWRzV)oafmq$iQ*3VaM`EBQS&d_lKfOA04C(*56&OP+}Zw?ZYL!rLHavWkA>R$vRuN$5lUC=&(`AV8#F%}$4K*Xcubj$9VNS1 zP~e|B&?!r6lrX@Pr?3v}J0LgS{Fr<`?+01%^1qer0W${xo#rV1#MX zssE96xZC8``)-r5#~lOH5*hO0n;*%PiyxNDuDwu>8_`WZed8S&_P{gp)~pxgw6T38 z4@Rz3|G=zoDXAZ^>Ji1TXm>R1*`vWl`sXYCOe-$HVE;79c_`vHiI@6 z!hIp3MIxKV^OF+|k*?|)h=ha$*|cRd?B(ypzE~5~m$0}H+5@nxvwNM~zfH0t9ADWB zM9YD~3^+?xjmV~xDh9J{TMjt>N4XK0g!k>RkuNdbWJF zgB{zp1oj#4r=W{My1h7#PWuj#(9fbGG9|;NMvTcf&`tdK7jg{5GYiT_&0EW_D{qH6 z@@A5Q?ZaB}?QsONE+mk+S#k9SnSRyXa_v1g%Pm(Q5iKpfJtOl}b!_(3+sfiTel*I5MU1h@LyI~?dN>5Jwy6g{S2wsKh=qaZT0TiKf z#;GHu*R+S^>F3{CoPs4sqg< zMy}#NUZ^A++d<`n2ow>+>^&PqwW28&!7yJ8fxlpo{E7oY`Dxuj89Q#2B*sR{gmM3nhoAjK(sPS& zjI^GtS@E-+IQo29^803q!O4*wo3_X$Lobm^X0TR~@T-M*c%*yz-G{zk^mgOs~qOh=z z9OL}ak*+(-vu$9XjAO?qX@}A@ELv0NWP|(kz)KgoID7Iq>4syE z1(1evp2^R_HY9T9zjaV_{03LPoO$w4sb8m-Ud0>}*GR_U4gxwV6{dhj&RP7OJO*8a zzC<`Nei%b)blkwuy==kY)*SYOer+inpHnieXjdyhsYr!rJJS z3x-9KKufU*<=%*95PIQv0xitXbhLif2pYA#LDA_5t2>;|a9{5SV84I!mR)kr}{f31ujxlkI_ z%9b0)Tqzgde2v`m@5$1*K5TDeRWlS4ffLTS8AqCu<@6H<;}W00%kPUnm&c#~Oj;+@ zmv>)%N0!y;E3aNPNt!opfVyU4@09Ir&|Vb|T%(je6bo`7XdCbwZ9x;Sj7Jd~+XW9* z0^(K6d-m+qv%B=S?N;8lOTn$0Vc{XLoCedggp2?jbpf|PFqm~`;ig~d!+s$}FWoF;L_~G!_`PqWT1-R~)wh)GAOkHJfbXuL@66NG}`*>Mci(1ism#T7W!py=wx!crxa z0lD?5rvyM)%;~Fq*|Uq=u03!E22WvdI}uV!EKH)I)20sJnQ5?kxw8=W?8Ixnl<*Wl zA+Z+p9O6LNSq@Kj?ShfDC7&*q%QA2Y8co52{*k1mZrg>dHp_l&e-b3cLxtG|ARZLv z66bk04TK_+h>QN#ZP-N5V%&4bUk$9HKVFyu!_8C?3;`G8KKKI8C>RM{xJ`vh-ifeh z4_H!%3#5}3sgMT^g=A#rU^}7(5Q<7a;00XesWJ=8!FUP9B&f!Su`<_fixwTB={pt| z48=m5KLy8s8p`Rga`yU%b7bA-t&$a?lYwzAB=*1%*tE7>KAo#hC!EfArn71I&M@no@vJ}NDm;khOy~a2_|Eh; zPy2L_Pk-ab^2rn%WngDKo7VYs!sYZ2=DV+Y1f?9bDYr~$lYnNX>G;h_*h-0uyWE8W z+P;X#PhRC55m67q0$#4h+f(Vt7%-Id1V7RoY&#ly$F?^8%yOs<&0xml=>@LT`EDFe z1|TZwBac!RLZMM_(`3$wOU3GN+yW0!)$s^tSOjP;wl?!|Q5N-}_@m8(F{cca%Pu@g zVxaeshvScgJ{lKFQI{;S6}B&+qh*RE?988v*(P6>&y!rWajc;}POGilzD@Pec+}j~ z#KfzPzlSy3>f}DFqw$<&+DVW5h`jV;%eKuLOd?Rfru6YnOoHQm;dxnkKsFe; z;7wf+t*bE)B0jK;(g8ikOH9^dfEWarnESWKCc|<4|l8NER50hy(oTUuG^><$n4cuPH z*98u9Rf4>m?H96UM!F*a`$DZ(2zOA!+ z9P3Y*JcT*K-Qi|@{%jokt?^ud*)aR`l!v>Yf8X480ggLx&{rAGc>d*IzF3u+zpdAD z7(BZDwmW1YPL$Dzn4aaw*wocM1~usuXFof>OZuY-%tR&@1@U^jBriiELuG9Wm>HAO zQL}KxUo-F8jMx;~&WOQ253dWU0JZx=q!BL568Q^v4eWzdl9!@ErwY=DJ$q8L0m)cI z!v;(~PQR^MvmLYp%I~=4H99I>_Fy~sk0oF0rF}e#nAjpgb{9lQpZ+~z#(9K{9y18H z)*c5#eD!4GW=acqgQLMW(;`Ov=~24v5Rs*(ra|ElgBx*)$jYd@20BJQZLQ*14#N3^sICC#cne@F zp(q=-FhW@}AMa#9sA0_a0<0*}`$B;5+8@ib>11t^sxM}WGQZs%uwZW_91_41G8c_`7<3?`ZWF95Zj#Ln2h>oo8sGd$mn0fkG??46G-&GE9B(#Jkyug9y$g;!{R}5vWLj30&_S zr}P7Iy9`J~=0i*P7Hfr&b<~m0jQw-8{zkXplP`&QSlu3@l9R4=R@n zZYMxq-hIKzB)-aH+OpC+i?wmvwr(S{aD(U_D32bcuX5FNMdE)o}D6jnOAjexIQZXcj8i7%|z z-rv?r!#0q1LiFDh#+YuBac7#fJ~h{RBC0X$OYHl zFMa#=kmCk*1ectNa&;t{!AMvDpBBgo_6wQ zEV`G)Z@RM5YCPi8A!7_4ZqDZKF<(Tt;cm%FecE`=e5@ZM^PNB2=1#br@!T_JJZD_@ zaQAO-aZAc5lsO;2E6Y}Gg0wLjd*8VCB_dJ{#^vF3(b(}PNh{nvV{I8Y^XF-kO`A4L zTH1kPdnBCDXvUp)2GquBAv5e@M+Nt=)aHFGP=wa4O>`SCusaR(F#K>cVy8~h31)~J!QyQUMD{$Q&V74IBJ0FNfv#ks4cUiWI2fq`-#M9b z?_X&jLU}u5lUB`fMe}RYyj3D>tWb{t>2QzAZ6CfR;kD~v6%IQrNZS9{!LsnT_vO1q z%VBURU(W8^Q+jr7C0Ad1u3mYc3w?%WNl9|&Be%(<3;u?!fIV{6^%qOqrZIBQt&d1( zRE)02@}PWp;+dz)uU)#x;vfEyjO+p#chXqt*S`yty5l4p_dTeAIb9VQTwvRBi^7G{ zYS1qyH&ixlPLXcidZCUb%p>D^awYxhDKt2KM?6yyoM!Sb7YyuWV3{U1A%9K`JD@qJl&4pf07eMeCot>!n^4L|3p$@Z6_Q$msD`$uHmk3_XD!vi7$X^2*!a$fGkKkp4ZAWbN<2 z$-ghVQF>22Qzo9!SHfZF==f7llBx5)mE*5`R#v=ugLH1&6z6eWCn7LZQ9Oqv>TEc9 z9mn*fADEaTY)24Q>K^b8hQb`g0~1jIBObh4Tq1GryUVPqtsB!i-}&uyXbDp&JQw2* zjbzwW;C=KYZ0`0joaz1--Z?SZ$*H{cWt;nI_wwdfR($ue+`m1gXB>_%qJSbE9(KH` zKfPV}$MhP;hz2F;rH(l1?q|c?pU(X3yM4OHb^rD^o_o50zCn+6Z`~+sp<_dvB(xTo zvU{7n_Qt!K-S2C+=~Ks#tMaz(_T$2$i!Qxf=6*R_diUuAW#=^Pug9pNoz<{Pdx#5d zE`yKlB+Z&MRs(L~*smu8au8bhbx|gb)-}e}%qN{ZQTOpTY}hETy#EcfsMlhZ&<2a1 zMxbw4>_9s3+b+VW)FWA1!?b zu$JMn>gONi$(O#C56`|p=}Ar@?pP=Ylg>T5O1EC!i<^Sm2-%Rha*ILn3HgJcfulZ! zrPrXnfQdQ_dHHqFMs=in*Up*;+uqk$tqR;Z$`llLXK-FD?MX;z29wU6W%ZwbVS*x4 z5Lo_9M=jZ~P>9-KZL6q=d+ji?fyv+6Dz{}4@n|lSny{^tJIkD}{v#cdTgbA-Kgp6` z*U9xy-yuVX^_MQmiIRiu4J!8?J94P}{Nrmf_uFN-3N=I8_2?yYFP$p=`gD{cC=%x7 zg~$yLJ}e!3w39}4qvT*lo-}|hhU@R2CddD=8DKy!0D~%282~Z?9JTz<3S&U90^s%w zVb)XO#kPl0G&BqYuWcrxQ{jetdYd-wlC#h32L(xEYu(>gqhx}EX(ifw@sCU7(T5(? zq`n52zj+^xSMytdIAj?B`?OHI6H@nY;;IsGo1h#|_q=RU`txTeSnFTbvm&rK^Dc%9 z6XBH0ZjiGbb)5IMM(SR zFlpRo0JaMXapEXL*6&T1-fa_Q=*VPg(WT}PyTn@hY zu=MWI9Qsvz<^HQ5lC~ozNWX3^QGd`*&>F7#vv7o(TZ!76=>nq4g~fgorUjqQTs zO~2mB2A}jYA?|F!0m*dBK#`6(#uYd?Ux?rmAX!Li-vaLia`G9+%SjUvorsKZY6Amv z!L8yr4t%=y?uK7iBk~7vAoAwIL?_E`)4r8->e^lv0JAWd!f>#bmd29WwUXqH?NwCA z{W^pEgaNBka>U4o_abmWkmO1j7zd6z7y{AL2pmU*AHNW+5R@pvFBjLAHia$#SNZ-r zwyPD7fBD6188dPO&KcwQW`H7df6HaOK>D;i`5$g^{b>(>;&`jKr?m8Qy5b2Dp3Zk? zoI~DSxOj+PkUsn>-&Iy|JuoJM=8zYUz7=Ai(GuxTi@uUkCry@HAA3^J}lFI{>5i<+0<`j z6-7GJncB|yGJVW2FikL) zvz&1qCCtTQ1H6t8{^N@zb$Rgk$~2jWYYI`SQ$@kNcVOXtZFrfFXKB zLsutzD8N^khv+Us>dF(@x{o1W`4PhmPMUO{@^_|p&)ZY{VEb7Zop1!R49-)$r)1r# zU!?DlF*4=K+vVyDCkv(ZN)IsZ-JONSTCud>wtZ|r+jMMWJ7Rh71mjb;=hk9q&?R{7g*%X*)B%0Q319OD6MzK;7a#OBRsw+x zU=m0})KHNtL{SfaNQOf}Yzhl-TPiPDQYiyGkul*nN{c#`YZ;VaazPb|TNBSZ{}dT_ z$|!m0(Hw~Yb5k#_4$Q{af|MW@w2}-4Bm|1$?7b2?OM2&yQUCxz07*naRA~9lKr;gH zB+g=?qy_UGFtL6NSh{qDbb=k2mRQ8l*>={CYWj1ODX4^o=j6)4mei`H{Oig~<&L}V zmlt1r26vXXP-HAmk-R>%RqK|re$zUL$oeR%>BLNo<7ijZb0VSfwqkQ4nGOKN<*`Mc z%O%r59Ay}PWC(aOBD3SliCyLC&I4fLMNPD3mejIiF%!d-z8v=xSyxIgo_qd9ISa}^ZQHii zVr?7vN?*}1)}_4864Y{~MO&#We{tmnuR`&)Z+)~$Aa!@Plakupi8f)<|GpDD z%c@XAlqazh#`2`|BQ7oWaY2x>bE8~yRxjy&!r5}q^_NQn7}?=AfL$zj%gR@{=}XhD z9B^z|+rM1>e>L|jOypoj)~y%b2~!)l4_Zh1$TUQ2_9@?37ViS6)2J5c66uu5Q{PFH zKykvtxQz;10);r<7#f9?*v=?`p*Sp1Ol%rGChakj1kyBhMXrSl!tCBY<)$oyY(|Jq z+I+$30!T>7Ak?iF3wUs)G`zV*N@Gk)3pE9z0;gDC=E6VRuF$7`Yp3crV~_#iB8zDX z3PT|Y3C9*py4rCVf5L^XOX)1I%%GgkCRJd{nbs}(QKrDHK}tS}rtD54WBHs#79#!0 zC!H+6dv75h#Ate2CzhG14%9 z=B3|S5Il@45v%c=aa>PL#AINcv|tQ=JR>5)BqOCj7BAf;CyeWe?^=#k3=86~bYG}0 z0hpQ}mM)PL*d~~G+NoNSiXD}K(a*#Gk7>Yw>tTs?F|GuhlkbH7V7B|Bxg^1yt^ zIB;a_gvVl`x44dEe!m7IFDHzC7wktw>hRDASYGCSd1Bi2@>kvQ^1<}`B?+eu=xiSt zI#+%KI|ObeC>+-&ALf1DIp<)qtF0!BSsobywilUo#v{YPZ*{ImLbC)ISPO?TBXtO3 z5f0lZA=IJb6wWl*^T!jLpvaGw0;+fecXwuA_3%Jf^}onGsCtCG-c; zG(W^=5SRg0fd=EU!8Flnod~bOO7q2JSWG>*QMfAamMUpr6t# zY>%ey-7lLq9FXok{8KVSuHKyi$9s4=LP}HFawI;oqZ-r;w2nl4b6u}s^?C-j5Pp!A zEC0~!bTafKV>@8lB(;J;k4&7wg|Q+aEfJQCZSi=*i{haE0cPu@ZVNyc6QPyUHkd<3 zNB8DJqA~F5f^gJ>6~l%QF*ZQ{G)sy+bl@7J8==@P;J^r_MyojRF8VryUqzm72L zrB8dZfR~Znd+$Rsb?W)ha<1!OCJu>SV?LB1D&?p;kd&}hU2$GZA{=%y@*2q%P@Z-w zPyNah*At8!m-feS=ewu))!|2&bkZ%zk=JHEDfc}6u3U%n_X&;SKudN)PiZ&o_wSbU zOeoFiAPz2+qNCMF}w_CIf!fe8~O)eND3%X1*1^_`o?#LQ4*m)YN_n_aI);HtOiit|f6uS+Co*S4)(cKKy8>(kGr503WO^QdJZ zfelYiZU>~VBY*vMPzLmGE}5Czpj2ZJiJYenYVNfGF+-_k3m+v4Y(Up)*{qxqrFhVw z>mi~eqI2<%|9}hPJq{2T6F}Vs{H@9TtH($Oj|w7np(}eY+JOM`UU`BT96YXS}psIbid_AkBQ? z1v%@CQ)K{bLYR6U8rPL1;76b6ym-7Iv3Z0@t=cBb?H8Ud|2T-fb%*u(#VRjK1=$7%f`&4Y-Y~5>ij4pw6dq`I!RI_>@F(_fm9)IEqa`u$- zWbF4p$RJqtva4KgkZD^g&K5}n?6aMTN$-LCUy@f|xgGmlyol(4A|sKIiZ-EG^@oJi z!z9TIiioMm*@f^dLwC+7<|1dz5d&>p#^JVt_7?_Mfh(L+1Wk%Tf~g#-$#8HBfdhr_ zD3ex{m>!e67KO>414}=UMu!bn3&y4b1`Fy!xTpcmqm>F12MO|ttrIEfP+a^BCkIZM zFjZo8wNIPp6w?t#wxnmbV8KG!y?d`*d(AZ(fpBxs2Gy~GLC!L$@YT@FeDxba=2>m( zvw%Y^M-7OFz_mh3vDnv4UpbCr&{zICNf0;Se8n>{%wJ0*?Niaph2Qk!B7*H^f6lbd zcZS}mw&Uxd>|1vp*Q()jooV>Zw7J}nHs(mCUBB~2;P z=;Mx)`Sa(==o6>N!}tFKhK^cDe0)2Jj;RYt!9nyr6pOLls8UnBgCZFRdcu2KNH?ex zMLmNMDj0I*jA3sHKZ39s@!d1BA2d zA>A^Li~=bXl><4<@XmNdRGrY_tKXb($vhywMPhT*hyI9<`w++wP{WquHPuhRHw}X* zO!&ic$y~5rVPq1*>cSe?UvliQYvh*|A4`w!-2f*BI7gQUrvn#*cHO#l(tpHQ`Sjg4 zAVKM;;yHgs(;l+@pYdY*JBe|1Rh&3r-~jIU;Yf&yuK5MqrFN}45F^Lp?E7|AGL44S zP!f2?q0%WQX-ews=^5U!LENZ=bMqwFG%^!``|f$NhyJx7=H_-EmCW>JJov@N#$r6d z7!8i_602#jTVo)5;C z+W&QsAK_#r2p{XBV*%wd9{p^;5Jrak%WQaRK7ae!-*}$NsHmSUn+(COUAti5C|)ls z+O&ByjwddbmtJ{W=6(6GoO{lN(g8+?($Z348z2V@hiGgS#wdeG7&ws^;N((FbRAe* zj0Rm{+vcH<(y;I_TO_1cykIE=s0Wf#6B53@wXvG#lloq(bzcouP}O2;d4YUE;4n!+42~je;GR9NC|ehH?(b#*KUB zoA0&*N?brEY0`JVaWZ^(Zy7malng)iSSVmZPojp5Nw(!-!4!}~V~5(6BLP30ob7O) z`+wlU2W8Fbb@I&9)0ODh3C?qUIC)}<27#cce9P9Ya?wAplx?_}=<-X?hl$XR(ikR{ zYuAQeKxi5`|J5K}Aco2*bHWWnSOk!n_Y?5W?{M7m1;PWQC&7sTK_o&O$%2_EIGK|i zkqBx+z5@snOmYW2vcoVQjWpahN(7U@4$6go08bDQ=1ABxVA)}zweU?zhS3T}Vh~~8 zmCp0Yc_6YWvS8sKa?#Z5)lB=$~QjaUyHgj(m>wggDkpFe%RWf??X!+Y&lTb#< z3d=ps(eIkk0tT!6V*orsOlx(-8wuTtYrU7t{JC4?t2wi=t+q?LbZ!e1yW!Yc%a(n6 z4oGci*VnoO=ZB)emIJkgRzu?o9-G*6YQ~bPq!h?%;(@n&;z*W zi;P4J{8}ZomJC?9+p%L8P(MUodwaP^7JhJTVW*>4&-PGm&60WZ7pWirbn7-qnl*0> z1|mZm)vF6eAPOdMGb9?5Pg3jFl92w>+A8!K_7pZzfWFq$9&VXE`QAQ9e|5`XY$Qu@U=RjIowlWC~8q`;ML-WN{@FVT2 z9aT%>8#Tg&PC7)B@3|A6H)|?+n~&5CapX z4IANdBpnQe65phuv`A>8cE%}=<3QvpD+fP5vv^}6MksX)vLN2)YHKev{4=2#n2Q?? zb6~wJG@>>J2PFX*)CaRL7|DU>A_4_JRQ4aN{M?t!rx#>|Dvr$Wok-jKm!t z#9yrbXkM3=`M7E)6PyDpo%=!9X}6!^VNOJuuv5Qd+ir}D2;5r&e8tK>1CoG^>vl>5 zT({e}MhHP??M>cGIWUG;}J(xtY!I;oO9gnbW;mx9j z7$0ENQ&SGew(Wax0a+%-agnT8o`NRKm%6P&ByH1bykkobZv4p;(_z&xp#c`XRyYR3mMz=$3S%9#7*Moc771=X4XgZJJ9jE)KYaLL z>D9Ztv~AOlG7crA%#+s8oX**P^S}L8uD$wtnLT?J_8Q~;YCmUQ-riXTnRN_JZ&Cgy z{LNMm2qAE_rZ~$~Z`-k5PCD&e8Q7zlO#R1r*raHQE5&PTkCE<^MjJ^Pwg@m`a9kN? z(ex^r3*t6cgm#aaOm99`nsnR>;GQ!l0GbQu4pw9&x9PZtjrO2IFPz0kVu})WomeoL zARVP`1|S&!qT~!^aZ1vCW-{Eln6wHaF{wpDr#A+YIxrA4HkQ6ccJ5d&ix&PSciuBy z1`Qe{)27`k!-gHJWw2a33Gw|%bME8MwlN#ev@zNheP<<)(Cg~hR*`AB0tZP0p`%$&+)(Dzkqo&sb)m>*&dB0)FUqE9ESd}im;W0@3dlBnuGFb2GmnM_G7h;d0H&A@O?9!d-Gr`VA|^25MlL}Ml{_jE}) znZL$R-3StDGf3PgBr-PSxPSin7dZ#}i?6*lQ+oBP9Ia^u=m`JA$&r0p+Ho~R>u+EL z(cU$|5mq|F`^u}Y%K7J=Cm(+BteiLw{SVwGQ)?z{#ximFMyJ-}z-$^kbctvcpG{1* zfWddZadqm7gD+za*Q`1XUNS!fpbS2l!dV)!MP3Xyj%oCtYlV;LOv80Wizo)ix%rJ% zB{C{1g1K4z)(DtlD-B*lsvI#Xp;4tha~ zV~%>*Zx+P5Wo4yfTO$gzw))3hP?&<6fHXnx#Y)Q14(yba6zqv#Ej@eokgKn{N+)`9 zz-Cobx^Q5 z6TuL4lX`Wsv3&)J5++@<-|iX>z#Eoj9kEEy?5md@GsHQLlS5*ac$isQCw+sP#Su*c zo3L1<&r>=m*YTGTEgXc8VPp(c>s=WJhEwC6r*)v)1LnY(-Yt^YdI_*2kSYJT=vtY4 z)_HOk^k$g9Ye+EbqwWZ40d99>fVRcpz8s#`a;NoGT}{8)5=CwdB67a&uA)TQJ-;LQ zn-dTBr5a%G{KlJZlx|%*LpiCNj2$yt36c^O4W=e$dYae@WgBI2DkR_$<&I4I+^?@$A!tQJ=&}7iP`+ z6sI;i$g$AawayvW`jz)t0%v(sR$YAJjNPX!t5nvftafn^=fq2A+q$eW0tt67(_0?I z4hO~*M_YrfuhnbTt2n(yix^4UzZz4mK|!O0%#`@q37h$7M);%NrjGoeH~-qcw`j|6 zTfW@|Fwz5q+ssooO2~r%>R< z7R14{4brfFtc)DqM|F5uju{48A!Jg0uB6Q>1`G4?7bmK(wv}Hp@q;H;41(aH*i-rC zdMm+DTW_Hbe|BDV1M2{%+^=S+mn5@@UE))9ZkT*E?DRy$Y#&X z9U2`#>;cC(^i&5E%y=d)MJKg#wBcDelTwWmZuy1cSZ^3D z^g(japkXX-5KLFNnA+CMm9knMJi5oRSy2}Vo>sKXnwXyLKwT?uQsSzH*>9$xssRgj zd6*6l$+9{Tl4WxuqB~*SICcQhx@2b=KjK#w_E*92exOpmaoM zTi8$~+91GoWjq^-nK#~at4x3VFCpB#}D`XD&w2=*K7G zKltHKzBUfq+MAZNjic}q=Hl7RhLruZY22AL>+j4#>*^Y>>=%nRv_sH{b^yXLN#-9! zdq4pYcw!g=Ov2ipoI3Givns570A?@-JU$i<@hnH4=88BPmxbe<)zb2q7K(>FoJ;u} z5+g0bkRI!M^(PY$0)HIjF>zD`fQgbOh_=8wNt$QNh zLf#aLdgZ}B!j5e_AT@x!eZo{D91V~e^XHWZ4jH&$R@mx_R360T$=MUmmnVH?$1TMa zR1{zidiQ}}*_Ck5Ux|QY6!E*dVRDC&gn)2SL5i37y!p=C*g`l?zL-5n2?gUj`@%UP zxaU>VzTMlO@Huq`yur)WoM9jg)0C2$Dlh!!CAs~ux;0}7&1=QF4`uQ$;IRu9FTO+NnY=^R144XU$3+$7Xo(qNc<@ zSZ{0(v;i9h&OW{6Zd%$~sPShW=0uIj(|7F353T+_^j*hNOhKRk+s-rRelxXH>*pJU z(xHK-vpHD;WWSjo&$N0%po*4d%LwF2I7uA2f|+#s8S>fe*)slw;~_Elx_IzcCwzg_ ziD}&1-|YADY_aEwnsqcvmOL*UiZkCC^4cn_} z10sKAQpg94A-aGg1VGMfDeO+9MQ4j_Ja2x`*}?(lQF9 zLvnldvu&jxN9-T87TU@#BFY@=7CC&h2lESKTWcrzhX;7KK7dgh{xy*0kOX38SJtS&Asd|!lsM?p2x#F-|KI_DaS+UaL$~sVXbeJ zW?;k1&h7AuVJ8BeAOLy>jLq=K;Zsk~ki98;V2j{tJ*&+-*6rA@>i!~(?q%9Z%iX^w zwZ4IyOqL5OPR{&r)yKl_#4TG_hVk7hHPIw_?&L*1_})`R^NV$>*~d$`g;? zDZ`*F+7{OP7-pTBY)wxqe1x5!gk2wn5(}ePG>uK8HAVSMizfp)>1sTTay8E7O2$CU z=QP3dcD%ZPwr09STnB4#=o>*DmzS)PW{XdRE+6TH#R=m79IFRtrF+{oII zDnh3uAsdVZ@wW(*4o&9rZjW5r?Lc{SoZJp=6H=i=*J5N`OeG96W8w=e;~hXg5xDy# zPV-{_*?uz~N9tdTv}M=N|Mu`d&$3HBvSS2|DX`7te<#@#{yzpjMXVBgWR!OnN**Y16Nmw0+CvsTM8PPPO_#D~r~L8PdTHOG zqcm;YK=$q4E=zy>QPytUiaRx$NsnWWk&f+K>ttvoIJ{GUd-j~*c?+kR!#s7bvu)UK z>|>Uj(5$K4H|=g%AACpdxpSIayltc0e*0a(@8e-IxD9TKt5uQOR{yeYm z4PIM+Z%kk zI#|-M_nx|Aot!%?QT}z`v$6*_xXu3fHR+PvMrMDr1gm1wMNV6EWDK|h=6*Q(5UXS| zs=j`<4NU)uB)4xbH{D1}l%FbIulW5}88EOv@ca~c=GkZEn{O7#nzid-1~>&c&r68N z3qS$VW#B3smOAsXGB1)S{3w6tHQqWzAo5)Ajet z6}LSnJ9qDq3(mSm`b~XMmj1p;B4GcX`wdIKUna@j&y*Q&&C{){*PnhyI(0ZnzWL!# z3CHC%h1uycYV;*?_1*uK-3MUOyH2!hz_v&iqF zmngsmEpu2f48^y$3u1-9yfQoiCMrNtHw}hGCO}$NOA?#$ZWCju%vZm=Dp{dNW#>=e z0x2VqG!<$H1+>CBRDA~Jq$Az{3`kO-+^rdb#rcv@u=4`#K!4JiiN<%$Y2wtE zBlTQpNla(?&i9fSmH%cL1J-aXwi3QsxKM@<86ulDZj_5JxmYjPAQ7fBB29hu>rX}M zRNl1i@hvmpgnI+(^!8hC%S|^uAotvJt^EGS54hB(v(`oJnWGWd9&|d%dEzoOv}XQs zZ-Zb$=__xBsWv{wL9f0;=-$wS66A-s+Bay3O&9_yk17rJh8d#-MbGH zF87B41NZN-$FFFCKlTc5(>5-VJ(&Z zSH5`X4jFgmT{3Ib06AyEG4kdMuZkoNmIrP+OB%&R;~oZVE1<5xUi`?k5DqXb-ldCX?VLDm9Xypu#N~fqaXh87el{IpqO;#N z+}D$7_+uK|t|aVVef_mubp4%j`IL#M-;dZ^ZmptYWq4f7fGvmZKxe;wl~)yEwm!is zE7f=_wy7lAbqHFJwArm&iV&I{swAmG_{`>=XW^BQ|7q|ruj5V2Tp4ZF6K029Mu4*Zz z$if)gw|B2>$5q1z_NQSV-b`#eDr7*jVq#h=a~Z-@g5T15r4BmnQFj@|A4e zuus~wYl`qty?m%|pI-9kpR1sVdlDwe0$g>=y?ihT*bbnQ9xAOrgQa*yo#@3twnr%P zQS;4xhRbRH>(dzbJ@}i zw-!!5{|uQn{XF?&^=53XB+Bq{qviIif0xVee+g+;$h}X0C7;fJQ<9n^O=cRT1H46) zRz6OgA-!M z4#_g(*{7g@__p-u+Es4{Jn6&}Bq1R|lQ}0%o6eb^BCcCqo#Fnzd&=8AE&HC%KK0H7 z5ISxX{IPPSEd6Pj%=+{TiNnoxv)_6VCklEOi$=*1kRfs}$Nk&A%&PtlHax-3kL|%e zXM09O)s=n&2FZ1IPLxgeUMuxGw2&nW=gP3tr%I>f*4lK(opP$kLr+WB4kP5sYtE8( z$;r~c-vH^;tDBNMi(iaPX8Hqc@7!_wt(w;JAD2>`9FVS^2TF^!@zSnMTil+RjtTz& zF2l-H*mmyOBddO0EyG6*#yuRoPag1Mf{(>@#{>FyldP^;lCnQVw&GU7)k}VoWlO%( z;x2{6Oo}pbVju@L0wVPUK@qm1LXnm$Z}9ob9H@E$Ct1=YFf(kEJly#)sdo!7Bhk1) zF%y@E<$*y7m$n^}WNqP(vM(h~c0-4t-5FTzLqd>+Q!u&wTPgkYH z2q-OHbj7){al@T5?cSHbxS_G5reQqVX$ZuXg=02cX+q_AWC~HAN^lQ{Wj+ zgX|#!Vlcf;ti=(6{9-46C0Guo%EC^6G6OHZ_>z3|$;YZxcEi7~k{&&J=>6ifa%f?; zk_2}UZC>vFHRYRitZ^`EUzir^(5b6T9EvT3Kh{XMHjU(!%Ws!qkIs}<2@MgUNO}z% zCA+q5k{`bRUcQ+>Pp-J+LQQz(Z4b(=*ZdQr@K}s#j&IRhI4`4@xWwY1s@t7fq0N|V4~-BMI~0}Oj+6K z2UrAl1k9wZ$01#rSy0SzM$Vne(L?r%{h~hbL;6Qo*y>~IeekwQIbQ|_w z-<5NwOwmh~8sVlzYmtue>6q5})DPDmekjrrX!dgZ)9)o@5ZB}-6PaKnnt390=**0+c z)r5D#Re;I4NsA=uKk5egVD?<;o)9Kqg>;bX2K3bhbxu}>tX;cK;#;(llP6A+@u!?F zcisD-EcohU8Gpw4a`O0bGO}McREN{AS>ZHo+EncXm8^@zZQkasay zig9BnTuh`YK|qaZUw-wbRGk0Mmv~4dvY=>5hG56mUD6HLBR8sFNA|`>%GV!nlB8t344PR!&758%WCDt&3aSTI0P`b|%h|2+M&tWK+gqqCFb-oP`pcJ?Xdiv_5&8D7O>*tz zi8A4o3DOl?3{L3*)2PuCw1_26#f<|3WY{@UqhjAfUWmvn-yNZu7x$C7HNdNvmn>Z> z&(4@B>(_1uHzNo%>Feq)>R}5b&6FFDbT}*d)XeM1nlL^n{{%2 zvvugF^)JUN&9<{MuYdszFrl_DR^kFp_hW|+k_Sgkkh`{gFK7OJlyvLdUUMzX&5-H$ zTp{`GCde~)T_us2>|*Q1NpdGL0&twsT+Y5(hSU1{dU~qAZO=dc_+6fR?s+-&)YGJQ zuU?o?I{|M?AHB2X@iao}zWu;|Dm$W7FadhaEU%iy{Hw}@WB?Q(ATvc3a9W<8R1m2Z z9VI_~K1&uX9w*IC9xQQ~;P>s^A#co_CY{<(mexrLlCdpOWbXo5yyQ>lVH~67<>q9` zck>q`x9-xeJs4Xk1zJlIw< zK7Qq8S}Q>FwYok>q=FyLOU%`Q>u@#EJ6riWTzRcMIjrDHqBKqlU<&NoT?| zY6pn|5sZnBb{TF8yfH|SX=RWMKn6sbJ9cc7_3PKm4iMC>+qTJ)Whm3#ck`nGU% z?9x-tm@+|T{P$yd>eYWqQ)tH*fT4@3*F;XAe6frtx8^ zu92?oTe(29lN)LP1~C|}Q-@CS@=GsC(l;#t*UZe}Bq;W-WK$@r0~g@t5@N)>1xr>1BCib$6L}-zicX z%82c|bd>4${aq$cxJVv&{3huFWycjiESFpFeMSEB=Ko38c8Rib*-tX|)SKm$bEnAh zBYFYPqvRjg-5@iEUnJB1FG(J_@o!Q$3ep5xJXFX^@LvI-uM1c`fq*z>jIa$%41VXU zZ3vf#P3ZAS$)L7?$Iw7LEqtER5;kAqp3(%v&*DOLlpPHC50^Y3WwU5_xPV;waB>Aq z;ukCeSQ!x86_kZB2MSS~sK1!~gvR@I4&wjify@JFX8I!2Ty>6HG{&M}IF26PCKp^iO;)Vg zD%~-$(HKz}MDY1ok6;foMBEJ zVavGV3B!rwJ9qAs3#R@{ZolPJ>D#{(j{Id}b&Uy=5(7v>bm}agtT;gehs{G7f^aAz z{`CD)*_M(mEn(I0%a6X4Rhth;$FA+=xG{s}nC@*Ak#86ND*N_l zNJ8`GGIDf(NL+eK9WV~x&Hn+mA9l*%5rd^;n|O>ND3GD-Mc*xvU;f%DCr=n5Ns0B9 ziRNT|C~O3ca1%H>hMISJ^PYq#oYdI1ZMWQe@7r?y^|wm9c5RB4)HI3PAL`~{nnrt6 z4SqTM&kdBLpGQCo9MMdH<#*f(ji-KNTuXpBFe%RSC!MFfs>+Wr+aC!h;pHUH{ru(2 zf0F&F`>~qe1JV9ZGHc;dk?bPr0A;hfbz&v1P7H`oq#QVS5K~qdj$UQTJ{(KilZ6Qk z_3wZQp%(TwW9!Ap@M8x_XI%5#DBh%{1c2KZ-0w8D=uM3|0nu4{M|f&_8yd)u#J?qz zjbMNJ&Ks|iHR%oIm8Ygb{7t+u{lv+HQ&MIIwCceevh!kN>jM9|0yh(KWwegPTTeK` zFgk`=7bn1Ye$C3_HFy*wmV;un{K!r zTPq!vx$~}nV@fdfb(RpEcV~YDC�?)Z`X$Y#$JTdHWbeZ^01}2D{=!&hkt${={XT zmhjk#HLQv_Zuj;Hq<*X;)7c*hQ)?K0;g1A{Qh+QdY3<#&PniS~7S@;N$TKhzQ2C0d z8*1aIrrr-h;)Gu$G{$KphFaw`v+Bu!ifubS>1_MC*DILcwk&`8HkjoePIU|*?2Tq-0+$J8mVqUl$&zg_GZ#%v%~sTp@ls4{DM=!{_{#)PFsuW{tR9phGjVj14$bAm6}Og;IDrRw>ZibS3p}O8 z>%en3n$lYf=(s~5``W=2=!l@Vw{%tJZ&sXUU^?NcEaWr;7(`G9Y5A%dOhgT|eoSZ6 zYF%8D0tr+Yj5O7%RZmy;<*V7DvB^kSr{s=h@}Hw@3xkb!q&}T(Todce)h;a$rXAQL zPd$2zJpSxEGVO`yWk|0M+9ihZH@!pn$saHcCpS9dS3L^@+n&YaN@q!tRNPlkRG2Gq z4ZyqrPS(LpxPTL=sW_=hd#+08B#Wbq+zz9|R*9K_th6m)*e?O?_{dpOXC&FUdlz3CRHr3^0+t`K15T|d)c2yxPDMssBo0BJx z?-6EhcbWN|G=uANO#eDNlh7d!{_zA3=W-qEO6RZ$;H1siDrsO+=4bpu@J`drcEV=6 zS#iI{VK*Z#nF1}7?;6?Up+wd|3Z{kexHUuAN*Y<25Y5K;@iHV9NLvh)Xyjz3V=Hhc zVz4NazkyG^4$D7>JNz}N(G4R{iC)F$N?j1UQ1cL`pr%yVAWp?AYU-n`J6=l$V@;`N zWy0Y;_}stCQ?_L+%9Ex_;*y4P5>P^3o)SsXm8#N9ue64 zZrQ4JaecQDA{P$_cl`VFceTZC-`S`3s>uKUr8c6j536M2y; zLl7&79E^wfoQ#1Z;VKE_WllL-Oi>6fCZgdZ9^KRloNNMeRlKqpGpfNA+phek54Z#X zk2m}%`q%aatHEtpb;L`Run|(`N5^;LLPB8hN%LZSEmZTMA*Lc+nH&-lrT0HDHuLt5 zb0)va_#14T(LQG@yPCm#qgAX2D#0xw_Jh-}GU|4?&CxI@goSNo`_bRjl^w4vcr0F07vMn30qNE~8Ttg!BRRHt`XhTA!8Z1{~U>{-2BQy@X9gXqibmmscEW21)T} zCBD!So2h0n%cLmLxzApS6{z&A-3O(&c49mw!j%q@=K{2R7x~q9z!R8bIx>lYrZ1~h z|MF!!du!{mO7Jh7OcQT64DL$8zp^VHi+#-Yp)zbE*uBWf#grad2U`GPuH$~F7wd=d z9RkK6B*bOd2x)Wy4~D@t^c5Io079d1TA>mAao++(`YP7vq>Wb`O9+7kf=q(#7or$i z5qWdMHWCMZTihXS(QoZ?z>BSj0@x>@?F442eatuJ<3x)PC_NV99*Dwh)ghpy!75D_ zKw&W|st!yXH^!w>`;bd%XW=UyQnnB4;?tE8>`Yq`IGB$oShS!1XL*9^uXwdF1!jfsjwHs?S+8KcQxfEK zJ$rSRXP){@KK}R@C0~nFB|qWu?V?up|0&|)NdF4X_!TgvnZJHEld0-~M zG*H1X9J&bjZx|CW31k#F=~J&!C>1#nTxOP;LY~H$BKAW=; z%8oo4VO&KgV4F`_&+?3MJ08o*)BkX`m1{f2|3rtxL=MPUnWUHBq7GMh?Q78l)%qthC9rD2h`~?F@H{A57v})R1`uFdLFoRc4+;YBagc|UN zb>e@2Ve>r3*iRLc;wfEblUB0x41eH-8o2!fT;m2_l+TntjTbowrt{rrjtDejFkB>p zV?e4D!~sxFrodYn%sUTPp7Q*>H=^n-tv7$$x%TNj9#;bh)j{kT`OC_~=ItwO_x8n_aYda zD$f50cl6%7^s0a;SP`+JvA1B09b?p(*kX^y5)%uS89D-Fv%lci-;b;SKD)H#1+KZ|a*jub`Al`D>1)Wu+`@70cC?x_0eM zJ$rQF*9+V7AxW*MB@Yx@wrZ`b-X}Uz4#EmuOK(*VD_n>Ehjke z)n&(ZH=U#YOxZPEd{9<6f*XftNtrGGeg(b$+8gxDbFa|Sh4X3n=>9Zx@F3b_Kp%eF zqQ5?Pt0k}Bw{6#&JMu~@E-Ivwk`n&lQfoenyAOX8qyx|Do6)YqV%oVgpNjZY&)i%- ztShGt4-!b9>{wh>!Z_trT*}M$oR-I5{@k#mkWT__OXd8<#AV;}K}yZ|B*p6QsaM~j zeB$RQ>e;h5cZj41CjI;MrNW{jTD*8A^Dm&HqI_!4-yzB60Sg{K6ciQn0bzHv;A$`Hq*8p`P7=fFVL-P7k%V+F2m&H@(!pO^ae7ecF;PHXDZr!V#8!A5Id2|$@+@Y0%B z1FCppgl{z9-GRv5oC- zP4}r=AYPp`bsZ~$X0<7Gjt!4O#s&p2HY%8m_ER-QL#r_HoGDd({opq=oA{AAQ5fhb zdImLn5_*1RNfh2M-E6=?7lfc}>8693T?IF@LOTL^ZJDe1HN_hKmO)W*kzTsTr~xYs zXm8r_am8)gwBh!rmOJNN`iq|!6yn<*=+yE34!||%AC3jb00fUGb2$$CARgQx(#=AV zH1N}9!sgO@7(!Xl8NnVLg|eD10a_qleL!_da6vFY=`uqqr9lA=ony&5zvRiD26+S$}Zr6#U50SYiCy$%eD< zXNuMLNXIQ5hs{g!P5r?%#lT0W+ZgWmvFcx}Ja=j)H3&b~wy>tHI?lGyw5{}5ax}!e zdU}P*&DmDC$qronf^M7Lc4V`!UM2kJ6vT+Gf^6T;W0B5etvNP zm#BR+VYhhlC!?pPseM1PvJj6?L&%4&I2=j*Cu!ZYs7x|4qutv*2L#v`((rvSr9nX= zjYJ}_S@hwIv4%ZRVdNJaV>q3=AX*A zP>@c+QlTAOL8l@`1Q+tSg=8v>8=dGhj7H5)<>>TC4Dxu%6oJa_T{7~e$DWDV!EQXE z1LAtc$7hFR{zjVTTr>9-?zbD;@lzd$;4kn*to z4EMF6k4mM3rx#5>Web=f1_jVtboY|SPdbakZK^k<;nO$N(j{5>z3rJSMQ=QNAm^8_jRVR~JlF$i3D2J5RZtf-gtuH>Ky*^Pd@nq=bm(W7 zxqT0K&CA7ooiaG%VP5W4CYuR#GCBovl7jA#L8(m(Aq^-Ig-N1llZp7ZLY>&*8X2S0IUFPxxw zdC~Vu*Y>N|S;J<@Z9Z|Q=fypCdceuF#*vemv?&R-2z6_{Id2s&3&y zfiFT%-{80&(QzTw;tOStMiK>B%by2jpUX(ll{-_+QqV~31aq0qDJc+35>;dXIzAGU zlhZOuNfG@N|B;QX+%8*)GJjRnRzQ%%K2@j72q zC;bifZ2Q?V+4nXK?x9N}OXETdSEn7Ew4HH{<7kteFcNp##Tgg4bY1=O!&spaH1RRb zBxM>CD1}VFw)A2V$esezvEe;nY>0`B0C-%-wE)j7qtm^FX{_leWGM(Ka9OdyQdlwp zyWl}9UB|0JKFBO`CnICfIM5eAG@4BzGZxVP8e^~qKLJd#ew?Zn;GL|GYTa0XMDSDL z=hRhbB_oHER#Uvm0-3TvAcF#1Sg8nhn)BWdj1#A!$1#e>tWREP)5np9(vp!~1{LY* zAe$PhWJ)DA&hF^f@I(C4H8_oJQIjjLisE|&iFELOXlEKme59%VW!myh{QO!V@F|&} zO6v#4uL4duscg>lbjM{G7=r0ezZipU(C%u zHT(!osmAe+va>|shv^s)h@Ir>48z$fsPg2te9CS^oLPGxX_jfi+x7@u`b zHK5WZ(aSARk%ohFD) zTg!pRZjdALm{cR7;}0d(%X3hbdgDm;jG}&}YsJu$sg;qaycTLgw++_+64Ija1IHZ{ zWXd5@F=a!QF`IG)$s4o~oSm5Pz)S{t*=)EH^&gQvQM{nx^`H^GNvK1KC0DGxpyf&z zy~d(LQgWi2IK~Qz+A%4<2{7?X-!EJOteZd^i3Q>g3L2swa9I4azc=1c<%C?$&NQ2{ zHdS_G{Y31L2&}VgRAJ{J;p` zOk8bwBKPhC0?6an7J&1DjqvfWp^Pnni?^)!kAGwrxns$f5Md8s$k~wUCWcZCKQsmz zgSBExg913Qc^$DPSgGPr17F$97YDR)W^?<8^B=q#FuzKrf}aYmU-^D0aGA1YG96n( zECbj}Y=&zxkEL`+oq%#SPyc9UprZE}I9|KZ zCy%jIe9~Eiz~*4ZXYDiuDT68}KgLXHP@oy`rkhybDfd!F9LOu*kMNm>$&{>^J!b0B z2y_ZkFErrEG~($lORZ->O_2RL+jPsF2z|Fs#ZPDF2PP3Y(hVD=zdB`2o_6#~hL=s< z%E&20I^xhv9#7cxgOW$<>+yKU=4am<96P09-J4ZU3|pehHXBl=Qcj8bbjo`Qz%Uts zlf}zMf%xT#fet$K#uTKXNTv)MgP!7Nx?*tA1W`UhGg1AM#Y>b=D)5cbhf~qFJ}%yK zKt0BM$zNSg#b(+kz$Xr?U^eqMj1I28&Hbh5I*d}`)uYS=E?M2b#h*JjhPv(7W+@K} z{4z=>94ZWg+ara#x>}yO!inM+tR~{}C-cteB1BV?ZwsKj@N0-G7QU6p5@0GFT$V%* zn5~?v@4vn9vYDTb!Vion=nJ-S_xqx1)~*$m75ZF`Tz&=It6Z}tT!t9O^?W#DDgpS3 z(h!_F{Y{1VhrVFs=skJ_|5P+ExFS=1N`^S{WpGj-rYR+4MZTn381eB@77b}bRVt+q zT{0QmR#Mz#K!fQwxZ(T7%{&~M#JJ63lvr*y1Aup%6hZd`BhY?ed7E#-?XA$?-L8K8 zPLAk@?r6UYckQB@>d+oJ?c)CZe0Q;5+E)rtF!&`jYXc4@_b3m(Z`MH0!iOHtipgIb z=as)4eZWvlq2tYRItY12VZ zBrutf73YTFM?u*?5j&@SP!N<-4VMlEJ$mfIBAUTZyaGTzehCef90mYBLgR5M$>^F4PEba|tP1|yko+&(b;7GWvMpCoW!})}1YuWv-4`p;Epl5oH1)D3 zPt%|v0MqMNS1=%Gmike|0f^H?BFh#CqOr=d3NhJ3KfHl~8C5C++n7vTGQk%!xl9a3 zz{_jEBWqRF)m1Eu`PNCYGxXivS6qs*DXD>5AhCyM4P2RQiHF7`(N_qiG02w84UsQw zvOCxX{@~T2Bw}|zw(tX!2+#VkyjKPXQgPWxfNuC=p*>(09{CXyD-$21PK5l(XxSy+seWpdE{@(D%C9xVXnJu>k!bH-<3&16GB+c-eA@ z?cz~ZEH`NSg^x!sRz7%SikFuhuy$e=_o5VBdMw*FtrP@j=O#)4TL6wLjc&lC!wpip z=%*?#8WGoyCRM^I;Cq#aGu;_afo0OD$;yxno)?aEq!>IoVVrS4ibil`t-=ph1V(pE zyj0HBibpeio(K=_`~>htaMqQGJcdd`^jRZW=&xlqr|eO10Y<>b@PhsP%)ucfetuvB z6x1S>JeXwA&n3kn*dB-Pi>)JFC)Q(cz)|Chsy|uQX z{M~~O{y9UuvUS4&jktIryKLLMmOlOLGb$;o^t1~G17<*AzG+aDznwn+^b^{aR}jJh zN5~$LKbGAfo8)g;vy#4=yMQXIszJn8fKOqgFCKYdK;XAdfv?jpuph=08`rL+Z{{te z^2%yGh!y$V1d_bMylwQ^oUdqSVX+32bT^!2(}ZS8K^}ea#h0`_uMmzJwjcC~wE~hV zU%>|-_<+9w0kC8>Nho+-xdi+cpce^`^zfh{T?{<P5E%D&NE42~ zfr`tj7%sA83z#rn=mEJn4Z-MS{5uLJl&^t8a9U5BcPJA&;-6bEdKr6(%mN1-*hXw? z+fnoqSX>QkEk8CeuShSO_aPlJc{1e}m20pFPRN7!e3Z{Wc#GcsU^Z1&n`QnjtG}az z51B&Wt=_D0YHF&PKOWCS+Bfk_BfC|$45QODXdV6cg(qnLiF;E~vEeWJ0Y>gMlh7Z? zkFYC$8@=+vELye_gS=1~aJfY65dEmYwhQPQJs}GtvySh-_ykSbcRY9S*u`&N>jR9I zHRP+BHs={!qLmB3q{#>FPb=0KJJfQy!rwHG_%nRP;Rj4X%LzH~NDKK=hMKwcM{qZ- zTucWYbO6m=yh`WR)G{xNwx-4OkS0OoF!ZDSS72}jj*W*j*i_PWQE}b)?EQD>t#@aK z%NgmQExd7!(7^yAN7Dg4@4y}mW@!k`t63nHFl`LHWR^m+^SzbyuRM(FM%(6-jeB-q zE_hU`i1YcC!`m)?DFhGaCQ~YQcj7wZ!D|qqC>!RIo(1Dj9 zAuE2U5%6{7Nt&7cMLHO*kA)+8N{0`cqO%tr%y14n_z-H}wv`qEdkKUN8fo2jzjt z_iwk;ORv8}IV?9q4%=?pfn%u!mmh-c%gAA!qxnH+UO=|XmiZW40kb(Bf7WF*cm87P z*r5#)!ghhxL|08j=NezZZE0aXop$EAv~EkD&VfDf$~SnSJoPF9?pXk*e}OKF4zl4& z+<6^+=4JHlf+f_gQ(H|hmvt3?ZyxehO*@Q4G<^I4^wrn%Xw;B?1_%37G(k&eEf07> zBMw@gkRO0Z%Ib zk++k2_U=Qqnl1hsT==+sxNbw15Lx&Ehj_up%MhbNr4O11r zhrM#uDk|7jLLEAFron>;Q-^j22KpKq>uBw&?`gxPE!3iQI~p-^6m{*?-k5`F6!6k( z8$N#nVd1ymP+4Ui_36I{4ea0Bz;Nr)v+n@vQmeS5%b~iOD%!R^kGl5gO|6>O(09w0 zQAv3f_3k%-2K4EvaZqQZVXb5tSF?=84AYlk`uFVx)pet)J1oB6ws`}sTCp;k0t z;2`P`I+}*)5E6wi(wWurOnlv%RkU?SKDBDoo(2sXOx?P4)O?|L@vZ_YtE!`3J-Si; z_ARt#-9~E3a6^X;rFN}b@`n$(@n$%jh+1BiZ0+VH?kGipXX*dID~=U3@MgOtLZc~q@(s7JRhwC4No zsDfJ4h@pchry1;w&NhejsipkB)9jziK%Z*W}Qs z;X`%D3;$ZVdJUEGW0KBYyR#gFsdY;}?GKY1ZqK^)>O&p6)>6w>Etq}{ZQr(?I&|ql zZCf{|6}jULs8%ft+5cJku{YSB*n+@^J_Y16iR>e#s} z`|}_@fI@ki&c7M$+PQ<)ZP-Mu+jXMh!v<3|Ki=9^TFK?w1*X@!a*>5Y+x8u4)W~7f zs>(Ps-s+1NlSwfX{ z&1u+(k<`6&2QDGhE~0|`JgQ=U=-#~>+ZieRNP>MokNvG}hfeGxZKw#kwQ5hJdbOs_ zTX#^G&K;>u>sHL8lI`;y^g+&b8D7A-UJ;h4Ar_k&6zLY8*pGDvJ$wURc&2u zR(RV?AqRe`1YZ6Lp@UYn4|=uy<>#7IST^BW7Lf9jtCf|}TP}ScW$#a<+FIiu8FZt_z!ISTsU2g*>jxYj!cajIYOOy`nl?hZj{%ZJ$k!skGLF(vJ z&Q-I&UZi14wr^C&95h*7d&^zw-rKKLeftkqs=f4)~;e*N95 zprlF_Zd$1>Id{5xtb zd+#$*jUPW=P1t9Wn)s;I0|)2^+qQ2%}TZ)(!W?rOy7(Q3@7k!sILQ`A3Sdr#Fc zojRstmpkGJ`P(+CJ8!yL4IVa9jUP8wjTtjWO+Ml{_3Hm-tEy0Sef`-#swvaXQnQ}@ zn>zZ4!_t}guXX{vwU?&|P^Cn#!vq*}UiquRWDt~zK0sfQo^qdNbz zX|>!R6~c4RFfv|tM;C-w>t9pv(=nXxhZaA{`34_ z)#NjNq4ElfHQp2VUau~{;!5@W-=9T~rEY%cuWDCWmCoZw9{RfAi;s2Qfd}l*e8;GB zulb#-uBuf3e)2wb#Wgpme?0e;I(#Upvu>KH$||eX;;%kdM@>0c?Y;MSHG1?IF86V2 z`o&kNrK>h-di>Z(z4gMA>V)&IQCsqgRQ0ay>Zcc;r|x?2&+4IjZdQBlHBRj_VP7?% zZ(sF~SKm?9D0mJ_zn-mV;9+X+;*}h$Q}^9*vpVk=x2tC!e^l{f1ulnuRR7MT?s@nL zRa9E8aRB%E2XCrF_Zq1VIQU?-7u)Xc`yWyF-*vsZ?DE@KUg(HE#qbdSTD#;MHDbS` z)kmMt*L1%8;6Li{ag)^FXU$S)o^h%g$G)-WsA1~3GiRs;%T{ZcjjI={(|$Z%-S_C9 z)J;FXSnV-%gxYJb(Q2QAj#ab2TBu>FE6UVg?z>f8cgIY{10T&B8Tnh+u#J1E|9t$7 zw#O}3oXhb(YRsU4>Zs#?tmd!SqKbBIQ#W39q1t1}2sLW-D8&OUb=LWpsukbC`FT}tGSDoslvhnwRGWJHT^(R7ykTLYDWHGdOLOTDPN!$yBSq-gl!q z{i5qsArA_6Zdjp?IdGiDx$Sp7~P)|K_mrna+t{L1^*OT-s;L&Ll?`%6_&Sg6*nTCVQ8?h-ZPh%?mE)tfZ!SD*f)h8eTpRQ1*epR3Kx z|K;bN&}mP<@-}xdN)>Ebqb9SAN1k$y`tdz1SM#Jnk`Bb%j%T7(p3<^w# zRKp$Z@+Aw@)#pxA7hZaW`fA=H#T)U|ss&%DL&mga+^*`c|9C}h+Okc}{rn?!%%mK3 z?)AS{r4`jot4{svnMXAJ$GN;WZQiPuEciy9ebhm!*|70y*}AO^!{zpuhZIdbhs(^^ z^2vK|(m4AcaiV&9){AQKvX!c`%GgDBl-$mh6cw_cysXBvK94^=ORZeHk&CWMz4X|9 z8g~qLdVhKPIkn{bwW=_0qfVQ8{6%W{>h)^p&U`iZi;vWi69=m+Zhyo~X}Ash_tTH6 zep7#<2L+Xd+tfLS9i;KDy5(NAY~>pD{dWu1FMo=TvA6m@G+=)zdh)@i-*UcEbKZYLVZDWD4>jvw|7H7bRv*3dn(D@BH$U(fRmLO6&1)BG z9N<0t#4NRSN1j@~Xr4Ozlw&x(yIQ$1U)5BWtNU-dOkI5S9cEAv8W8T-{Jq9~@!iif zzm-eotDl~Ex|(+SWorK7WvZm4SiSHB+nar7-jWq+*REY^(Y!etXZpogDy}}PMlBDT zjW1~;8Tun_dHq2F%VIa^_Pu}Epit(O@-JU7kY2VC;|9Z}VN(fOS4}TP-?7WZmd{R4 z)v9i^Y+k9-mz9D|-!Ik;#$*3@Exfi=x^ok+9+3LgosX&tGYM3xx>#Lt!E`m9I~UZv zlEOUoz+JbguXMv3k>-u(W~!-YT%fk@Dpz?MzgMSB9jdOq_wfh--!1*-GtG0^y3I-;NjQ>Ey$LS-|DXIyABzh1mX4+{P^^Hz1<6}RX?LBXa~>PLr6R2ScLzpApD zf!Y6kLE|i0$#gl)`r{pH(n%Mptp%kzE`Ofm^eM+pSH;{we*4iI8t2O;=s-fDrc6Ea z%+V%VH##ZTl&?zOl!QFx75&j{nX?xW@|U2S-2MwROKH_lLlhEGx(wnbJ701)wuI`z=? zXRDij^ZRhQEuZ(fnmCZuL(jYt<_CK|`#5*-bP&(%D^$sjby}t;UU zK3N@m{&n0*YXFV*^(Sv@oxYfF?7Z;v_c`8Q&0CD}V>{h^mCj$aIbYat$Gk!v#*gd& z{^|!h?fze#r;a%7VzoKHBn(rMw@IBb?I3m1`8Vj!9JHJ(1*`R-a!7ck;G2)%(u0Co zZ+sl4Q&&~49%uXQbLv&PgU{RaotEc%u3yIGO(k2&w1)6glEhu|RXnqwXyVOnR9IUS76^0$V%EB@%U+|@R>hI61f>NxI zMf3;*Wbnr9fZ^I54tO>KlZ1kK<|r}uE1p>aDFSOYWAIiAcZ4{ptiMGo4saH;Bl<=Q z;guc@6_L^KJE>^cD4M`K-P>|& zQB_&R&yQPCZca<;z*zhQUE^{)W_FMxjpf60*D3q&LoN8es)})1GJJ>58a5|~h2}$a zzL@ecAi1uH8msiLBW_bBiNJhn*-F0g&ZLZgUXTIiEdD=3F$*sy+;&Z{We zPIKnWq2i(5Pd<^9>qm4C@hPtw>gibwSB8?g`1e+?fF1fjp z_E)6Dn>Nt8tgKA)fxlK)l+y7Ro@H=Z#_F0{>eg=<-FDjzw0_+>`sCw}cuzqY?^Rez zkNow1I{MtZP{1M8`3(u}lqO!zT24K9618iE{VIrKc1^{I*^wqq zoIvOxtJp7dS&u!~7snqqm6oqtj}1F|(`VT(spR|Hlh z(!lbDx_VTb5`wh=oGxNzXPE`dD>Kx1qGtv+5w%igoJqT+!mC~#L-8W;Mlpok&|xO1 zYcLNQwpP;Sa|4ME2avp!PQ0g#H(-_0+poSrC!TpRBM+l<&N-AOjN6kAJoYr&z%b3Y z)u|1=`$qtIFIEqn{dm^%c<`kgXr$xT~Bq~Y4_m)$!m+}(MKQ6rcd5` zoi3m8ispa!gMXrP&pLw!_UW!=5!&eS>v$v9SD$@Im!CV0zTH4{#+m2PxN+lX=*Y43 z%E!yXySz=$wl_6nU4Yk|cZ=1ry&-!JjM%J}c5T^4ExL{3!4l`Q+&Ow{Hru{+3r@qE z7STa>DB1u9KC-hPa44a=NLhjhQuP~2yvdC3%S(%N+G{U8Pw&0iN?VYZ_<3__E4uc& z>uJx?19d)ZXz1xE;~LsJjcv)3Y?BfCD>2}Ks~(t1p8onA-gsa;Fh_L6Wkvb)_rEIY^5sah)hWss1s0Mv9eHj=!hfWc%Pb;@nQ zrr{m^S}#MsIq}&CtV?Z06%}n7K;3(G)O4{qUEW}6-=#CPtTk^^=uDq~nlUc~1Byb@XlrdaL?-ohCUK?_lkRyg1GTq8y(1#GOd;o_|Lk5X{ z-!N&023dE4rcp^;AIRtBd!YeGh^Xiw*Ti|DE%0X#KeX%BF^3w?d|1eE2lbHNEDvV8 zsMJfB&eH>eKRx>rojmPO-VNTGI(2MM@4ncI?s@qe-HzpicCJ@d@XU(?-kWdHq9qj- zY^Sfk`j*BWa4>b|UHFwe;}cyqBE!PvjEF{xh%B0v{)4_&hmf62$Hgh!2{docv)|%{ zblJss(T&$#O*wTHtOXAYFh$~b$@BSOn*js*5O&7*9W;tAxojMrHvMe6{m$Rgg1Mj5 z>1SU=oxAm*pPhd?%VG3Kr`DVYFIzUOq5}_|M%Uf(ApPsY>C}bw?%1&-ty(af?)cqR z3~w739b8Bgt%SKpN5!vn@NBdzua*?>Y_X145pZq*4;WfDr_$n5UNteZ?U0uBFHDM` zC+NChx=ZNJ|HH;IH%1=r_&K{3zx_e9Ctc60JX0o)=EIRHSSLPKnpawO@*52;dDlIN zpj&EX)q5i~{^AF^_?c9`vF^-0>Zb(QPe(&?p$I;E?=?Ljc;U6T>9B+LrCh9_@b3Ia zi+9kA%S((7LOz5CdbkZ!4l8Qp6@KPwAu%2Jtr1d)0?NGZ*Su9Owdt^d3PNWg;H&|5 z6n=A~Ic-?Ik*ZpT-js=E5Hlv+wO@x=hJ75EYm0dW#jGS@B@QEPtWXs3EBr&)pP*<> zE&Bw^2A{3tfkJa0JXG_SauMVeOC4c5z@O08@WxqQk*K0Wk2;PfA9^@l`m-x}HE$jL zYt|EV@^QbQ1>e6zSD9YDQ8Kc5!$K}f62VQuYHZB#t3@!#u z(lm*lXrH80!ZJC22#MN2BZdBmJz~*_m1ooPz88B)GX}_Jgcq!2RwcNY>9!1Y^gFh0 z;^fhE%wbcg4?hOz)UgBAlo!!|Uwx7Kbi=}b9rftdi%vRZ0R8WyFLYy$0Rh4{pS@2< z9&-#oawsvcgz=KO%=SYqH$yceJPeTX)iWX%)kuqS$H{r176}5&djJ?Pm!=$h6TLlm z8NcFK#Y^_xsB70Q{K{5mdi$j(>B(oGC#;Uo`}Dtb+~v1ZF%Mk2b?>2H3p?fXGwJFx zy76OyeB!=3EQ4^z>vjmIoOC=5-lHFN?%YYs^6AGPP`^Wm=;eP6pdF4I_>hTl!4I$R z=_P88jXU5ldU4ile(iHL_Y1j<+k)zFmoUF|)VfVO>fQ}@=}P50x6u6$JV3L*;l~Fo7nZ?$ z*^dXby6>e0PB4+uI?;kFGl z_uDUN#DHGZCRDZ<$hE-ZAXttG=yM?)3SxD_oL*szWL^;j9#&;Q!-y`DGA({h5w8a} z%WX%8o-v)i{QPs;oL{U5u2}M4zhV)+`r_YMzE0ltSSw{_`bJm7C92al8NBiql0k!P zqA$Oi%MZ1%FCr2@$`lT3BS(-!u`IKdHATE<_zk z;;oM4praT&!qTQdQl*XtNE#TSg+YQe(nxE8e8(j&3^Vg`s@Cq>4c*XJ9H2L4i_)*J=)rLx50Vu`F1?zIi;U~~O)xV@)|K={b>DsHPU-$O(@^g>Vy^lRh z14kXAd21a-$&kLl#rXpNeeXTmXTsie%n>X*%erJ~5zmUlb7`Pbg|^FbUXiJ#?!AZ5 zbAPypesa<2+=jhGM@*hbd7D<#EmvPc?|;*suD<1N#^5~z><{ZV7ioKI8f8WNzJ6Ia zjx;-n7owf)!aejWuf57=DCE$ju_N^Cb!Q3VhL+TUH*(Zi`t-*8=-FA%(+Ni$M4Q+L zo_g{zdiLKR(9~1=(ZUsLY21iCxKOKTPF{(g31bCl*N!bz&ddFFz)X7?C*Tx91T&%A z5T3M^mY31`o%~3E^YDYL7}%E;ETT$&9*=P9Pku(fo-~zi>eh}fx!@cs&D%(SdSoVj zvzcfNZ(M=j8B7fx{=?h&`F^#w7ZPi+AAv*Eq>^!K_#Op(bX)YtRfDDyRGP=8R$e&Jr^=V6W#8@^v!L>|T6jWX` zuwMCY(#oM8x^rf`Mpq;pu)|x3nI?YGd2ZC=rwI*BkDD~aHE%{m2afVhoR@Kk7JTT+ zX-7|^&K)8%J3RBf>f+O>PmiuzI1R}&#nI#TqAqHno?#E&b0WRMo!5;&znngJ`*qzR zpLOAtwBVAT)7`(=pI&_Rzcgm}a60+S3+egVX7q=@Jk0QIX-82h-97U$`WY{~x8-H> zGOo8*U-T2|+sp7`0-t!-qt9S!+rGD^-KFmk zdU5ewy6?Al(6!fGO)Y9m>Ch9;rq^G8iQazaOZFwRFCa3uWGe7io|H^I`6u+zSJ%;{ zzkiax*mF3>fUz`nlOx4LG&sU26jY~k3NZ>eB@r5`Ot6a?U(z}vMpuw z0NdcFGcKb$Z@-lu{rfBQ$}?K$yKTD|mJdg%W9>CWHW zPTLF1>6ROBp(&I8LmzMDeHwbH9d#`_wB!|$lTSK}PkzK>89q;n4^BGq*i-e258_%x zwoT{G-D$>@5xSkhK7np~Po!U6aw(NdmVhMTLdbW595_`<|P5(AiNBup%pSkTP8XgEZ~+Q4BKll|(a&OT1x> zlC>$4CJUHB8_FoSfmx3<%Is^F}7_OwFdP#t76^t65h2d=l%!D8h;1Nehja}W? z#3hY28Xqiq*YcwXy_CwbV`d6moVvJa%U0gSPP{VEgF6DVp$z3NI_GcS#`_Ek8K)V; zbkj0I7x0X>sVQ4D_J#R7Xh(j5K3%R~A3mLq15Po->54sj@(F{mE%U%qG-&7n9Qy_K zR$wDrE)TwthLx`h-rc`x^A=tKYeoI{7{Hw`%grk_xz-WA5m`)o2Yw|*yQrFF#^W1s zz(!DGGxNM0eC6za&|-zc`s~=oWygagynWE8Zy(-(1{`y`A0ENLexL;!#{OJ`bsU`Q z2a&IiWhi91uz#T)cg$h_Vu!E{b+l>227bI#O+9+{rp|bl&N^({x>cWi*_F#FN)FD& zqh0MQ)}syVtFZOfZ*jnP*uE%VZEv;?%0U8TMY{Oij_q4{FGm~d%8!By3wBZ~-dll{ zj=Qh9fR^(6DYItYsBH&)+aKUV&<~ph!3)po`3wMljKlSb2eZtE;jxOv-}GQuv@=`R zEulU~o=N|k^#UEW-)LI7Vg)s4-u?Ucr;UM@wtKOh^-^w5bU%NnCnH$6>7zV zqF-FQ;}_y+Xi+YafQ~Y{;fg1tmI`7E;bpGI);0{%b)3x$ zJ4s$N9{M&H`*WIpG{YSYup{=d@12zD$3vu3-YM!GqKT-?ZGJRq{_Mn6OgX`l&QXU7?f7i0lWGz@DO>U|_n zt^$uM0(fB%V4i2gc5XJr^JxB<7&pkEgI3JQ&Nm(baD3#rri=I9^*$Hyf}UZ%xHJ6p zP3Vs;JCd9-%Ag8xS{@rYd~bAtZZL(wIcWpjnI^E9Q4228q+E;hQ+39j(fU4;gWJid$YHv?SAt>s~1AW0b42p5h6PUtgKC>{P08%OYf3%{Lq`UUip z&*ss1-p6q7{SOeIv`Kf|^8j6U>pgVrq2~R5(02Ek+CZt~k-spw8hi?lkn)4~Y zdT8o&HLnO^kCgb3;E3(?9vP;Gd`S~~=+fXfs-(^#U0`I71?XYWD}E*PgSd(u?zFUY zI!oJ`Wz%KP5zA8UsEMY+0HS;>Twxts@K_X#!+GIr1s#qO1kczKHRUE^0hEWSIEi4n z!%ErOx#Anz$#qMlnxXb=n?z~iT9I}y?mq%7RI z>6NG$ZuQVF4Q;~^u-M7Q)%jjF#!8Uxe4A($Enj_~TS#dB2VMz?YkU*cOS$Px4FojD|U1&!?fI z$MSInJ$0RoN88B<@kDTJ{A}LqS0~vJy1fii5HW5%nyEAv@o0PHLpBUrf?l=o;@6a( zng#s&61X^B8I@A-<(ir_{;-9Yn>_v~1P3Lu1V=GFl>Ex+7cMB!iVdIaeLep|_h=3NW6>2{$ci6U5gTL3=$?Yc zXrRU-vp8h3@}`QPEZ=x|PPv@3GaDbU2+}}H+)mQ~Y5r4BW^3FnvZYxBO&$#s_+^r3 zCiHUJHxoSl@(`cf-E8hx|3u&tEk`^~(ZdI8Wi2TGwq?P+#FH(@Y)p6)v3)Gqw5+Sa zpc3qRNz29+&wbEH?yezO=M{UTiZ676zSfhAUCZ_bH!KwH~v@CQHxjYPUC zNK^xa#uuFk|I>p5jwZ`D)>3>FPpka=@(Rlqqqh(qerZ}_=-YJK+{p{^Wcb-QSlNTF zGBdREY<^zpHcYzjMF!_J2)>|+kZidJAwxVGkP$yAt9bBD`SmRzzEeLZ^C@XrDF{*x zEE#c^8zjkKX+0<~23gOnWcb-Q=wSSGGB%EUWyjF!2)0er+)G|&brY`Xyn2^sG*`cH(OBLw{mL0UHPs5|&0@FDvHjGq zvgSZ8v~$5vaD(=d1SOOP$ge5f;hG%M-7cx7Viq6{k}9ou_^Fnm`Ao8D?1N~ZAWv8( zChM%Yk{{nb*bjhR&ar*nrlVqR^FJ1CIt|GrPyN*BVBMlDnz~3o@ z6G!51aCl7&r!>Y2Zg{ca{p8s+0-Y`^nx$5wHYOLyOIb^feqY8gmC;TcJG@Lww$p3D?|Ot?H||4 zH4UYd7;lU|_>dTs%GZ6bDL#hdYY=6r&hpWtQ-q{Ukb28RaYG5G1t~|EXaYAPZ#5l@&_UDG6 ztMA7LB5W!Sq!fR|gB$Lg=6qm9zyb25B3r=XWR2Vp#SWsU?U%UM={U>R(JzW`2%et; zvT1w4V_*>Gh5C+ung1c(<_Ua>d(m=Fhfc9^HLq~z3B_YKJnnq_r~0vre`;(pD98c! zal5zk1SXa(Z5oWoYl6Km6w^Xu$Dbk zK(|q$4p;0PH1GPFdZ%a^LglaDOtA){BA4jkO{MYV;7h8|$E>0SC-?4Tqu{KPT}u27 zhsYqH8Gkl3y#e&o&BI#*I3Ox;Q^>vrP!_sl4L}s}VNDZs`Gl=G>lCnOe8O!jEVA13SP<|erC06 zkWu8+ULFxYijBZTB{t9qWW^`E8h08oVYx1%y+F8*iUDO<>7cz-~aK zvA3_uQH_md^TfUVe4L{kQ2w}yzh@veCo>=r80cm1>=rq);cMWXDqp`e{KAQy;$6{+ zIy+dwL8<8}h3?SKtTP&WIH=6=#3#I3e)Z_F} zZ=%{zEbxO{DQGB$Z;~Te3z&AOz@rUFoq#v^}$e4aBG;wgVzz=Svz|9Cv zX^r0k_AEv^3F$x(mo%ek@&f|F7utw}HsU%hRnmq*n*QSah2-|suy%pURKH9Nc{e^{ zYq)|><-;-d z5FO-dn@sGEJgm&P!PxMD--iZ(ki+I}-v`dqFF}VL{i3A;w>$RWT7ZDM>RVi%hiQf+<$Yo|pp ztpdlxY_N_GTXI(X#-e&-k~>|prt|evky#okiEIC`9w9`fb0*@>w0J%xJmSHn zBj1g;q@e>(RF`-%V#Xh)Z`n%t$W`bd?o5-sOkN?g$el^PiRhjPY|uIcR~HkR^9HR& zqVynZW0KeCm+?VCHXSpuBIU2NHEWrbiY{^{W2{800HAkL_t(^lF18#x1kZKTgk}bhGUnB zxsUjl8@%5%Q-_k zlE<4v{2ZYxSHVCWAN4`72M3a$?kg-JHh?U8D9U~>!7c-VlRR9V`q+89^L<0sJ6g8* zA4Bt7PR*O+`z{%ucl#C36HZGkMlo0K?!F%w9my#paFc8?H4N64Ky zcj$gYeHy?FEg7e93^eGv(~g!bNT1O>9n1B?P2{mSoK8 zEKP%l1}zeVk6a@;>AUmdB|0MC3nqP1MN@XlAo7Te(a=q?hHrsrzi;^R1}h=__x}MR W7kPr)tKEqJ0000 +Home · MLJ