diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 000000000..41da0ad48 --- /dev/null +++ b/.dockerignore @@ -0,0 +1 @@ +./data diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 000000000..73f8102c1 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,38 @@ +--- +name: Bug report +about: Create a report to help us improve +title: '' +labels: bug +assignees: '' + +--- + +#### Describe the bug + +A clear and concise description of what the bug is. + +#### To Reproduce + +#### Steps to reproduce the behavior + +1. Go to '...' +2. Click on '....' +3. Scroll down to '....' +4. See error + +#### Expected behavior + +A clear and concise description of what you expected to happen. + +#### Screenshots + +If applicable, add screenshots to help explain your problem. + +#### Specs +- OS: [e.g. Ubuntu 22.04, Windows 11, macOS 14] +- Package Version: [e.g. 0.1.2] +- Python Version: [e.g. 3.10] + +#### Additional context + +Add any other context about the problem here. diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 000000000..7c2cc99ff --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,24 @@ +--- +name: Feature request +about: Suggest an idea for this project +title: '' +labels: '' +assignees: '' + +--- + +#### Is your feature request related to a problem? Please describe + +A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] + +#### Describe the solution you'd like + +A clear and concise description of what you want to happen. + +#### Describe alternatives you've considered + +A clear and concise description of any alternative solutions or features you've considered. + +#### Additional context + +Add any other context or screenshots about the feature request here. diff --git a/.github/workflows/build-push-docker-image.yml b/.github/workflows/build-push-docker-image.yml new file mode 100644 index 000000000..7a323ece7 --- /dev/null +++ b/.github/workflows/build-push-docker-image.yml @@ -0,0 +1,164 @@ +# build-push-docker-image.yml: builds a Docker container image and pushes it +# to the Agent Artificial repository in GitHub Container Registry (GHCR) + +on: + workflow_call: + inputs: + aws_region: + required: false + type: string + default: us-west-1 + description: AWS region for CLI commands (such as ECR login) + build_args: + required: false + type: string + default: "" + description: newline-delimited list of Docker build args (as "key=value") + context_dir: + required: false + type: string + default: "." + description: context directory which is passed to docker build + docker_file: + required: false + type: string + default: Dockerfile + description: override Dockerfile name + image_prefix: + required: false + type: string + default: "" + description: prefix for image name + image_registry: + required: false + type: string + default: ghcr.io/${{ github.repository_owner }} + description: Docker/OCI registry to host the image (hostname and optional path) + image_tag: + required: true + type: string + description: image tag (e.g. build date, commit, version, variant) + microservice: + required: true + type: string + description: name of the application + microservice_env: + required: true + type: string + description: name of the environment (e.g. dev, test, prod) + microservice_path: + required: true + type: string + description: path within the caller's Git repo which contains the Dockerfile + push: + required: false + type: boolean + default: true + description: whether to push the built image to GHCR (true) or simply build it (false) + branch: + required: false + type: string + default: main + description: the branch deploy + outputs: + image_name: + value: ${{ jobs.build_push_docker_image.outputs.image_name }} + description: the full name of the image, including registry but excluding tag + # secrets: + # aws_access_key: + # required: false + # description: AWS access key ID for CLI commands + # aws_secret_key: + # required: false + # description: AWS secret access key for CLI commands + +jobs: + Debug: + runs-on: ubuntu-latest + steps: + - name: Debug information + run: | + echo "=== Variables ===" + echo "Variable: ${{ inputs.build_args }}" + echo "Variable: ${{ inputs.context_dir }}" + echo "Variable: ${{ inputs.docker_file }}" + echo "Variable: ${{ inputs.image_prefix }}" + echo "Variable: ${{ inputs.microservice }}" + echo "Variable: ${{ inputs.microservice_env }}" + + build_push_docker_image: + name: Build and Push Docker Image + runs-on: ubuntu-latest + env: + build_args: ${{ inputs.build_args }} + context_dir: ${{ inputs.context_dir }} + docker_file: ${{ inputs.docker_file }} + #image_name: ${{ inputs.image_registry }}/${{ inputs.image_prefix }}${{ inputs.microservice }}-${{ inputs.microservice_env }} + image_tag: ${{ inputs.image_tag }} + microservice: ${{ inputs.microservice }} + microservice_path: ${{ inputs.microservice_path }} + microservice_env: ${{ inputs.microservice_env }} + push: ${{ inputs.push }} + outputs: + image_name: ${{ env.image_name }} + steps: + - uses: actions/checkout@v3 + with: + ref: ${{ inputs.branch }} + #submodules: 'true' + + - name: Set registry_host and convert image_name to lower + env: + image_registry: ${{ inputs.image_registry }} + image_prefix: ${{inputs.image_prefix}} + microservice: ${{inputs.microservice}} + microservice_env: ${{inputs.microservice_env}} + run: | + echo "Grab just the docker registry host" + registry_host="${image_registry%%/*}" + echo "registry_host=${registry_host}" | tee -a $GITHUB_ENV + echo "Converting the GitHub org to lower case (required for GHCR)" + #echo "image_registry=$(echo $image_registry | tr '[:upper:]' '[:lower:]')" | tee -a $GITHUB_ENV + echo "image_name=$(echo $image_registry | tr '[:upper:]' '[:lower:]')/${image_prefix}${microservice}-${microservice_env}" | tee -a $GITHUB_ENV + + - name: debug + run: | + echo "=== Variables ===" + echo registry_host: ${{env.registry_host}} + echo image_name: ${{env.image_name}} + echo image_tag: ${{env.image_tag}} + echo complete image name: ${{ env.image_name }}:${{ env.image_tag }} + + - name: Install Docker via Buildx + id: buildx + uses: docker/setup-buildx-action@v2 + with: + buildkitd-flags: --debug + #driver-opts: image=moby/buildkit:v0.10.6 + + # - name: Login to ECR + # if: inputs.push && contains(env.registry_host, '.dkr.ecr.') + # uses: docker/login-action@v2 + # with: + # registry: ${{ env.registry_host }} + # username: ${{ secrets.aws_access_key }} + # password: ${{ secrets.aws_secret_key }} + + - name: Login to GHCR + uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Build image and push to registry + uses: docker/build-push-action@v5 + with: + build-args: ${{ env.build_args }} + context: ${{ env.context_dir }} + file: ${{ env.microservice_path }}//${{ env.docker_file }} + github-token: ${{ secrets.GITHUB_TOKEN }} + no-cache: true + pull: true + push: ${{ env.push }} + tags: "${{ env.image_name }}:latest,${{ env.image_name }}:${{ env.image_tag }}" diff --git a/.github/workflows/pr_and_push.yml b/.github/workflows/pr_and_push.yml new file mode 100644 index 000000000..22e8d1511 --- /dev/null +++ b/.github/workflows/pr_and_push.yml @@ -0,0 +1,137 @@ +name: PR and main Push Tests + +on: + pull_request: + branches: + - main + pull_request_review: + types: [submitted] + + + push: + branches: + - main + + workflow_dispatch: + +jobs: + Variables: + runs-on: ubuntu-latest + outputs: + image_tag: ${{steps.vars.outputs.image_tag}} + microservice: commune + microservice_path: ./ + context_dir: "." + microservice_env: dev + image_registry: ghcr.io/${{ github.repository_owner }} + + steps: + - name: Create Timestamp Variable for image tag + id: vars + #Edit variables down here for values + run: | + echo "image_tag=$(date +'%Y-%m-%d--%H-%M-%S')" >> $GITHUB_OUTPUT + + Debug: + needs: Variables + runs-on: ubuntu-latest + steps: + - name: Debug information + run: | + echo "=== Variables ===" + echo "Time Var: ${{ needs.variables.outputs.image_tag }}" + echo "Variable: ${{ needs.variables.outputs.microservice }}" + echo "Variable: ${{ needs.variables.outputs.microservice_path }}" + echo "Variable: ${{ needs.variables.outputs.microservice_env }}" + echo "Variable: ${{ needs.variables.outputs.context_dir }}" + echo "Variable: ${{ needs.variables.outputs.image_registry }}" + + Commune_Tests: + needs: Variables + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + # with: + # submodules: 'true' + + - name: Install Prerequisites + run: | + sudo apt-get update + sudo apt-get install curl nano python3 python3-dev python-is-python3 build-essential cargo libstd-rust-dev -y + sudo python -m pip install --upgrade pip + sudo pip install setuptools wheel + sudo pip install -r requirements.txt + sudo apt-get install -y nodejs npm + sudo npm install -g pm2 + + - name: Install Commune + run: | + sudo pip install -e . + + # This takes WAY too long (at least on GitHub provided runners) + # - name: Commune Tests - c sync + # run: c sync + + # # Interestingly, below the variables for inputs cannot be called directly. (even tried: ${{github.event.inputs.image_tag}}) + # # Instead, saving them as env vars first and then using them as env vars below works. + # - name: Initial test (with retry because RPC issues) + # uses: Wandalen/wretry.action@master + # with: + # command: c balance + # attempt_limit: 3 + # attempt_delay: 10000 + + - name: Commune Tests - c balance + run: c balance + + - name: Commune Tests - c serve demo tag=latest + run: c serve demo tag=latest + + - name: Commune Tests - c logs demo lines=1 mode=local + run: c logs demo lines=1 mode=local + + - name: Commune Tests - c root_key + run: c root_key + + - name: Commune Tests - c stats + run: c stats + + - name: Commune Tests - c subnet_params netuid=all update=True + run: c subnet_params netuid=all update=True + + - name: Commune Tests - c namespace network=subspace + run: c namespace network=subspace + + # TODO: This broke see here for error: https://github.com/Agent-Artificial/commune/actions/runs/8351758971/job/22860686705 + # - name: Commune Tests - c global_params + # run: c global_params + + - name: Commune Tests - c modules + run: c modules + + + + - name: Commune Tests - c data.text.realfake test + run: c data.text.realfake test + + - name: Commune Tests - c call data.text.realfake::whadup sample + run: c call data.text.realfake::whadup sample + + - name: Commune Tests - c call data.text.truthqa::whadup sample + run: c call data.text.truthqa::whadup sample + + - name: Commune Tests - c port_range + run: c port_range + + # PLEASE PLEASE PLEASE ADD TESTS HERE + + Create-Docker-Image: + needs: [Commune_Tests, Variables] + uses: ./.github/workflows/build-push-docker-image.yml + with: + image_tag: ${{ needs.variables.outputs.image_tag }} + microservice: ${{needs.variables.outputs.microservice}} + microservice_path: ${{needs.variables.outputs.microservice_path}} + context_dir: ${{needs.variables.outputs.microservice_path}} + microservice_env: ${{needs.variables.outputs.microservice_env}} + image_registry: ${{needs.variables.outputs.image_registry}} \ No newline at end of file diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 000000000..8765b98fc --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,120 @@ +name: New Release Pipeline + +on: + push: + tags: + - "v*" + + workflow_dispatch: + +jobs: + Variables: + runs-on: ubuntu-latest + outputs: + image_tag: ${{steps.vars.outputs.image_tag}} + microservice: commune + microservice_path: ./ + context_dir: "." + microservice_env: release + image_registry: ghcr.io/${{ github.repository_owner }} + + steps: + - name: Create Timestamp Variable for image tag + id: vars + #Edit variables down here for values + run: | + echo "image_tag=$(date +'%Y-%m-%d--%H-%M-%S')" >> $GITHUB_OUTPUT + + Debug: + needs: Variables + runs-on: ubuntu-latest + steps: + - name: Debug information + run: | + echo "=== Variables ===" + echo "Time Var: ${{ needs.variables.outputs.image_tag }}" + echo "Variable: ${{ needs.variables.outputs.microservice }}" + echo "Variable: ${{ needs.variables.outputs.microservice_path }}" + echo "Variable: ${{ needs.variables.outputs.microservice_env }}" + echo "Variable: ${{ needs.variables.outputs.context_dir }}" + echo "Variable: ${{ needs.variables.outputs.image_registry }}" + + Commune_Tests: + needs: Variables + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + # with: + # submodules: 'true' + + - name: Install Prerequisites + run: | + sudo apt-get update + sudo apt-get install curl nano python3 python3-dev python-is-python3 build-essential cargo libstd-rust-dev -y + sudo python -m pip install --upgrade pip + sudo pip install setuptools wheel + sudo pip install -r requirements.txt + sudo apt-get install -y nodejs npm + sudo npm install -g pm2 + + - name: Install Commune + run: | + sudo pip install -e . + + # This takes WAY too long (at least on GitHub provided runners) + # - name: Commune Tests - c sync + # run: c sync + + + - name: Commune Tests - c balance + run: c balance + + - name: Commune Tests - c serve demo tag=latest + run: c serve demo tag=latest + + - name: Commune Tests - c logs demo lines=1 mode=local + run: c logs demo lines=1 mode=local + + - name: Commune Tests - c root_key + run: c root_key + + - name: Commune Tests - c stats + run: c stats + + - name: Commune Tests - c subnet_params netuid=all update=True + run: c subnet_params netuid=all update=True + + - name: Commune Tests - c namespace network=subspace + run: c namespace network=subspace + + # TODO: This broke see here for error: https://github.com/Agent-Artificial/commune/actions/runs/8351758971/job/22860686705 + # - name: Commune Tests - c global_params + # run: c global_params + + - name: Commune Tests - c modules + run: c modules + + - name: Commune Tests - c data.text.realfake test + run: c data.text.realfake test + + - name: Commune Tests - c call data.text.realfake::whadup sample + run: c call data.text.realfake::whadup sample + + - name: Commune Tests - c call data.text.truthqa::whadup sample + run: c call data.text.truthqa::whadup sample + + - name: Commune Tests - c port_range + run: c port_range + + # PLEASE PLEASE PLEASE ADD TESTS HERE + + Create-Docker-Image: + needs: [Commune_Tests, Variables] + uses: ./.github/workflows/build-push-docker-image.yml + with: + image_tag: ${{ needs.variables.outputs.image_tag }} + microservice: ${{needs.variables.outputs.microservice}} + microservice_path: ${{needs.variables.outputs.microservice_path}} + context_dir: ${{needs.variables.outputs.microservice_path}} + microservice_env: ${{needs.variables.outputs.microservice_env}} + image_registry: ${{needs.variables.outputs.image_registry}} \ No newline at end of file diff --git a/.gitignore b/.gitignore index a7194e664..fb7867f11 100644 --- a/.gitignore +++ b/.gitignore @@ -5,9 +5,6 @@ *$py.class *.pyc env/ -# weigths and biases -wandb/ - # benchmark results benchmarks/results/ @@ -21,8 +18,6 @@ benchmarks/results/* # runs/data/models/logs/~ data/** -!data/key.json - # C extensions *.so @@ -177,7 +172,7 @@ cython_debug/ # The cache for chain data in container .local -!data/.gitkeep +!**/.gitkeep # misc .DS_Store @@ -308,6 +303,10 @@ cover/ # PEP 582; used by e.g. github.com/David-OConnor/pyflow __pypackages__/ +# Scraping +images +logs +websites.json # Environments .env @@ -318,5 +317,6 @@ ENV/ env.bak/ venv.bak/ - +# Vector Store +embeddings.db diff --git a/.gitmodules b/.gitmodules deleted file mode 100644 index efb9173b9..000000000 --- a/.gitmodules +++ /dev/null @@ -1,31 +0,0 @@ - -[submodule "frontend"] - path = frontend - url = https://github.com/commune-ai/frontend - branch = docs -[submodule "subspace"] - path = subspace - url = https://github.com/commune-ai/subspace.git - branch = main - -# the repos below are not used in the current version of the code -[submodule "diffusers"] - path = repo/diffusers - url = https://github.com/commune-ai/diffusers.git - branch = main -[submodule "bittensor"] - path = repo/bittensor - url = https://github.com/opentensor/bittensor.git - branch = master -[submodule "trl"] - path = trl - url = https://github.com/lvwerra/trl -[submodule "transformers"] - path = repo/transformers - url = https://github.com/huggingface/transformers.git -[submodule "subtensor"] - path = repo/subtensor - url = https://github.com/opentensor/subtensor -[submodule "telemetry"] - path = telemetry - url = https://github.com/commune-ai/telemetry.git diff --git a/.swp b/.swp deleted file mode 100644 index e893e25d6..000000000 Binary files a/.swp and /dev/null differ diff --git a/=0.13.3 b/=0.13.3 deleted file mode 100644 index c4342c7bc..000000000 --- a/=0.13.3 +++ /dev/null @@ -1,2 +0,0 @@ -Defaulting to user installation because normal site-packages is not writeable -Requirement already satisfied: tokenizers in /home/endless/.local/lib/python3.10/site-packages (0.12.1) diff --git a/=3.15.3 b/=3.15.3 deleted file mode 100644 index 2bf5ee8a6..000000000 --- a/=3.15.3 +++ /dev/null @@ -1,2 +0,0 @@ -Defaulting to user installation because normal site-packages is not writeable -Requirement already satisfied: protobuf in /home/endless/.local/lib/python3.10/site-packages (3.19.5) diff --git a/Dockerfile b/Dockerfile index 2452d72cf..ee2946bb9 100755 --- a/Dockerfile +++ b/Dockerfile @@ -1,19 +1,32 @@ -FROM nvidia/cuda:12.0.0-devel-ubuntu22.04 -WORKDIR /commune -RUN rm -f /etc/apt/sources.list.d/*.list -ARG DEBIAN_FRONTEND=noninteractive -# INSTALL APT PACKAGES (NOT PYTHON) -RUN apt update -RUN apt upgrade -y -RUN apt install -y curl sudo git htop netcat wget unzip tmux apt-utils cmake build-essential protobuf-compiler -# INSTALL PYTHON PACKAGES -RUN apt install -y python3-dev python3-pip -COPY ./commune /commune/commune -COPY ./requirements.txt /commune/requirements.txt -COPY ./setup.py /commune/setup.py -COPY ./README.md /commune/README.md -COPY ./bin /commune/bin -RUN pip3 install -e . -# INTSALL NPM PACKAGES -RUN apt-get install -y nodejs npm -RUN npm install -g pm2 +#FROM ubuntu:22.04 + +#probably better: +FROM python:3.12-slim-bookworm + + +ENV PYTHONUNBUFFERED True +ARG DEBIAN_FRONTEND=noninteractive + +WORKDIR /commune + +RUN usermod -s /bin/bash root + +#RUN apt-get update && apt-get upgrade -y +RUN apt-get update +RUN apt-get install curl nano python3 python3-dev python-is-python3 build-essential cargo libstd-rust-dev -y +RUN python -m pip install --upgrade pip +RUN pip install setuptools wheel + +#RUN apt-get update && \ +# apt-get install -y curl nano python3 python3-dev python3-pip build-essential cmake apt-utils protobuf-compiler + +#RUN curl https://sh.rustup.rs -sSf | sh -s -- -y + +COPY ./requirements.txt /commune/requirements.txt +RUN pip install -r requirements.txt + +RUN apt-get install -y nodejs npm +RUN npm install -g pm2 +COPY . /commune +RUN pip install -e ./ +ENTRYPOINT [ "tail", "-f", "/dev/null"] \ No newline at end of file diff --git a/Dockerfile.gpu b/Dockerfile.gpu deleted file mode 100755 index 2452d72cf..000000000 --- a/Dockerfile.gpu +++ /dev/null @@ -1,19 +0,0 @@ -FROM nvidia/cuda:12.0.0-devel-ubuntu22.04 -WORKDIR /commune -RUN rm -f /etc/apt/sources.list.d/*.list -ARG DEBIAN_FRONTEND=noninteractive -# INSTALL APT PACKAGES (NOT PYTHON) -RUN apt update -RUN apt upgrade -y -RUN apt install -y curl sudo git htop netcat wget unzip tmux apt-utils cmake build-essential protobuf-compiler -# INSTALL PYTHON PACKAGES -RUN apt install -y python3-dev python3-pip -COPY ./commune /commune/commune -COPY ./requirements.txt /commune/requirements.txt -COPY ./setup.py /commune/setup.py -COPY ./README.md /commune/README.md -COPY ./bin /commune/bin -RUN pip3 install -e . -# INTSALL NPM PACKAGES -RUN apt-get install -y nodejs npm -RUN npm install -g pm2 diff --git a/Makefile b/Makefile index 2ae7263a7..533400473 100644 --- a/Makefile +++ b/Makefile @@ -8,6 +8,9 @@ SUBTENSOR=0.0.0.0:9944 PYTHON=python3 +start: + chmod +x ./start.sh ; ./start.sh + build: docker-compose build @@ -17,38 +20,9 @@ stop: make down up: docker-compose up -d -start: - make start -restart: - make down && make up -logs: - ./$(COMMUNE).sh --commune - -subspace: - make bash arg=$(SUBSPACE) - -enter: - make bash arg=$(COMMUNE) restart: make down && make up + +enter: + docker exec -it commune bash -prune_volumes: - docker system prune --all --volumes - -bash: - docker exec -it ${arg} bash - - -kill_all: - docker kill $(docker ps -q) - -logs: - docker logs ${arg} --tail=100 --follow - - -enter_backend: - docker exec -it $(COMMUNE) bash - -pull: - git submodule update --force --recursive --init --remote - \ No newline at end of file diff --git a/README.md b/README.md index 3ad6c1bb1..27220905f 100644 --- a/README.md +++ b/README.md @@ -1,160 +1,140 @@ -# COMMUNE: An Open Python Network +
-Commune is a protocol that aims to connect all developer tools into one network, fostering a more shareable, reusable, and open economy. It follows an inclusive design philosophy that is based on being maximally unopinionated. This means that developers can leverage Commune as a versatile set of tools alongside their existing projects and have the freedom to incorporate additional tools that they find valuable. +# **Commune AI** -By embracing an unopinionated approach, Commune acknowledges the diverse needs and preferences of developers. It provides a flexible framework that allows developers to integrate specific tools seamlessly while avoiding imposing rigid structures or constraints. This adaptability enables developers to leverage Commune's capabilities in a manner that best aligns with their individual projects and workflows. - -The overarching goal of Commune is to create a collaborative ecosystem where developers can easily share, connect, and extend their tools, ultimately fostering innovation and efficiency within the development community. By providing a network that encourages openness and accessibility, Commune empowers developers to leverage the collective knowledge and resources of the community to enhance their own projects. +[![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT) +[![Discord Chat](https://img.shields.io/badge/discord-join%20chat-blue.svg)](https://discord.com/invite/DgjvQXvhqf) +[![Website Uptime](https://img.shields.io/website-up-down-green-red/http/monip.org.svg)](https://www.communeai.org/) +[![Twitter Follow](https://img.shields.io/twitter/follow/communeaidotorg.svg?style=social&label=Follow)](https://twitter.com/communeaidotorg) -## Socials +### An Open Modules Network -- Twitter: [@communeaidotorg](https://twitter.com/communeaidotorg) -- Discord: [commune.ai](https://discord.gg/DgjvQXvhqf) -- Website: Comming Soon +
-## Setup +Commune is a protocol that aims to connect all developer tools into one network, fostering a more shareable, reusable, and open economy. It follows an inclusive design philosophy that is based on being maximally unopinionated. This means that developers can leverage Commune as a versatile set of tools alongside their existing projects and have the freedom to incorporate additional tools they find valuable. +By embracing an unopinionated approach, Commune acknowledges the diverse needs and preferences of developers. It provides a flexible framework that allows developers to integrate specific tools seamlessly while avoiding imposing rigid structures or constraints. This adaptability enables developers to leverage Commune's capabilities in a manner that best aligns with their individual projects and workflows. -### Setting up Commune with Docker +The overarching goal of Commune is to create a collaborative ecosystem where developers can easily share, connect, and extend their tools, ultimately fostering innovation and efficiency within the development community. By providing a network that encourages openness and accessibility, Commune empowers developers to leverage the collective knowledge and resources of the community to enhance their own projects. -Install Docker: If you don't have Docker installed on your system, download and install it from the official Docker website: [https://www.docker.com/get-started](https://www.docker.com/get-started). +# Install -Clone the Commune Repository: Open your terminal or command prompt and clone the Commune repository from GitHub: +### Setting Up With setup.py -```bash -git clone https://github.com/commune-ai/commune.git -``` +Install setuptools: +If you haven't already installed setuptools, you can do so using pip: ```bash -make up +pip install -r requirements; pip install -e . ``` -To enter the docker container do +or ```bash -make enter +./start.sh ``` -To exit the container - -```bash -exit -``` +### Setting Up Commune With Docker +Install Docker: If you don't have Docker installed on your system, download and install it from the official Docker website: [https://www.docker.com/get-started](https://www.docker.com/get-started). -Sync Commune with the Network: Inside the Docker container, run the following command to sync Commune with the network: +Clone the Commune Repository: Open your terminal or command prompt and clone the Commune repository from GitHub: ```bash -c sync +make up ``` +or + +```bash +docker-compose build +``` -Congratulations! Commune is now set up and running inside a Docker container. - -### Setting up Commune Without Docker -Clone the Commune Repository: Open your terminal or command prompt and clone the Commune repository from GitHub: +Start Commune: Once the Docker container is built, start Commune by running the following command: ```bash -git clone https://github.com/commune-ai/commune.git +make start +``` +or +```bash +docker-compose up -d # -d for detached mode ``` - -Install Dependencies: Navigate to the cloned Commune repository and install the required dependencies: +To enter the docker container do, and do the following ```bash -cd commune -pip install -e ./ +make enter +``` +or +```bash +docker exec -it commune bash ``` -# ENSURE YOU HAVE AN NPM ENVIRONMENT FOR PM2 +Then run the following command to sync the network ```bash -chmod +x ./scripts/* -sudo ./scripts/install_npm_env.sh -npm install -g pm2 +c ls ``` -Congratulations! Commune is now set up and running wihtou Docker -## Deploy Your Object From Anywhere -Commune allows developers to deploy, connect, and compose Python objects. The vision of Commune is to create an open ecosystem of Python objects that can serve as APIs for others. Commune provides additional tools through its `Module` object, which seamlessly integrates with any Python class. This means that you do not have to fundamentally change your code when making it public. -To deploy your model as a public server, you can launch it using the following code: -```python -# Give it a name; this will infer the IP and port -MyModel.launch(name='my_model') -# You can also give custom kwargs and args -MyModel.launch(name='my_model::2', kwargs={}, args={}) +To exit the container -# Don't like __init__? Start the module from a class method instead -MyModel.launch(name='my_model::2', fn='load_from_name', kwargs={'name': 'model_3'}) +```bash +exit ``` -## Module Namespaces +Sync Commune with the Network: Inside the Docker container, run the following command to sync Commune with the network: -A module namespace allows you to connect and reference your modules by the name you give them. +```bash +c sync +``` -## Connecting to a Module +Congratulations! Commune is now set up and running inside a Docker container. +Congratulations! Commune is now set up and running without Docker -To connect with a module, you can do it as follows. This creates a client that replicates the module as if it were running locally. +## Note: -```python -my_model = commune.connect('my_model') -# Supports both kwargs and args, though we recommend kwargs for clarity -my_model.forward(input='...') -``` +This repo is on the cutting edge of experimentation, so there may be some hiccups along the way. If you're primarily interested in using the core features of the protocol (such as intuitive cli) or seeking a more lightweight implementation, consider installing the [Communex](https://github.com/agicommies/communex) package. -You can also get more information about the module using the `info` function, which is a function from `commune.Module` that wraps over your Python class. +# Key Features -```python -# Get module info -model_info = my_model.info() -``` +## Module Filesystem -You can also get the functions and their schemas: +The `module.py` file serves as an anchor, organizing future modules in what we call a module filesystem. The module filesystem is a mix of the commune's core modules with your local modules with respect to your PWD (parent working directory). This allows you to create a module in your local directory and have it be recognized by the commune. For instance if I have a file called `model.py` in my local directory, with the following code: ```python -# Get functions (List[str]) -my_model.functions() -# Get function schema -my_model.function_schema() -``` +import commune as c +class Example(c.Module): + def __init__(self): + pass -### Module Filesystem + def predict(self, x): + return x + 1 +``` -The `module.py` file serves as an anchor, organizing future modules in what we call a module filesystem. For example, you can store a dataset module in `{PWD}/dataset/text`, which will have a path of `dataset.text`. The current limitation is to have a config where the name of the config is that of the Python object. +I can call it from the commune cli by running the following command: -Example: ```bash -model/text/ # model folder (model.text) - text_model.py # python script for text model - text_model.yaml # config for module +c model/predict x=1 ``` - -You can get this using the path (`model.text`): +or ```python -# Get the model class -model_class = commune.module('model.text') - -# You can use it locally, obviously -model = model_class() - -# Or you can deploy it as a server -model_class.launch(name='model.text') +c.call('model/predict', x=1) ``` -[Insert image of module filesystem] -# Subspace +## Subspace + +![Example](https://drive.google.com/uc?export=view&id=1ZqCK-rBKF2p8KFr5DvuFcJaPXdMcISlT) Subspace is a blockchain that Commune uses for several purposes: - **DNS for Python**: Decentralized Name Service for deployed objects. - **Evaluating Performance through Voting**: Stake-weighted voting system for users to evaluate each other instead of self-reported networks. This provides users with - -## Register +## Register On The Chain To register a module, do the following @@ -162,8 +142,7 @@ To register a module, do the following c register {module_path} name={module_name (OPTIONAL)} ``` -The module path is specified - +The module path is specified Yo, listen up! I'm about to drop some updated knowledge on how to create a dope module and register it on the blockchain. Here's the revised step-by-step guide: @@ -204,7 +183,22 @@ If you prefer to use the default module path as the name, simply omit the `name` c register my_module_path tag=1 ``` -That's it, my friend! You've created a dope module and registered it on the blockchain with the option to customize the name and tag. Now you can share your module with the world and let others benefit from your greatness. Keep on coding and stay fresh! +# Developement FAQ + +- Where can i find futher documentation? This repository folder, [Doc](https://github.com/commune-ai/commune/tree/main/docs). +- Can I install on Windows? Yes, [Guide](https://github.com/OmnipotentLabs/communeaisetup). +- Can I contribute? Absolutely! We are open to all contributions. Please feel free to submit a pull request. +## Testing +We use pytest for testing. To run the tests, simply run the following command: + +Make sure you install pytest +```bash +pip install pytest +``` + +```bash +pytest commune +``` \ No newline at end of file diff --git a/api.py b/api.py new file mode 100644 index 000000000..45d3e630a --- /dev/null +++ b/api.py @@ -0,0 +1,254 @@ +import commune as c +from flask import Flask, jsonify, request +import random +from datetime import datetime, timedelta +import pandas as pd +import numpy as np +from flask_cors import CORS + +app = Flask(__name__) + + +CORS(app, origins='*') + +# Function to generate dummy data for ID, email, and date +def generate_dummy_data(): + unique_id = '#' + ''.join(random.choices('0123456789', k=7)) + + email = ''.join(random.choices('abcdefghijklmnopqrstuvwxyz', k=10)) + '@example.com' + + start_date = datetime.now() - timedelta(days=30) + end_date = datetime.now() + random_date = start_date + (end_date - start_date) * random.random() + formatted_date = random_date.strftime('%d %b, %Y') + + return unique_id, email, formatted_date + +# Endpoint to get the list of modules +@app.route('/modules', methods=['GET']) +def get_modules(): + modules = c.modules() + + # Generate dummy data for each module + formatted_modules = [] + for module in modules: + unique_id, email, formatted_date = generate_dummy_data() + formatted_module = { + "checkbox": "", + "id": unique_id, + "Name": module, + "Email": email, + "Date": formatted_date, + "status": "completed", + "trash": "" + } + formatted_modules.append(formatted_module) + + return jsonify(formatted_modules) + +# Endpoint to get metadata for a particular module +@app.route('/modules/metadata', methods=['GET']) +def get_module_metadata(): + # Retrieve module name from query parameter + module_name = request.args.get('module_name') + + if not module_name: + return jsonify({'error': 'Module name not provided in query parameter'}), 400 + + + module_metadata = {} + + try: + module = c.module(module_name) + # module_metadata['code'] = module.code() + module_metadata['config'] = module.config() + module_metadata['functions'] = module.fns() + module_metadata['schema'] = module.schema() + except Exception as e: + return jsonify({'error': str(e)}), 404 + + return jsonify(module_metadata) + +@app.route('/modules/trees', methods=['GET']) +def list_modules(): + module_name = request.args.get('module_name', None) + module = c.module() + + try: + if module_name: + modules_tree = module.tree(module_name) + else: + modules_tree = module.tree() + return jsonify(modules_tree) + except Exception as e: + return jsonify({'error': str(e)}), 500 + + +@app.route('/modules/keys', methods=['GET']) +def module_keys(): + module_name = request.args.get('module_name', None) + + try: + if module_name: + keys = c.module(module_name).keys() + else: + keys = c.keys() + return jsonify(keys) + except Exception as e: + return jsonify({'error': str(e)}), 500 + + +@app.route('/modules/active_thread_count', methods=['GET']) +def module_active_thread_count(): + module_name = request.args.get('module_name') + if not module_name: + return jsonify({'error': 'Module name not provided in query parameter'}), 400 + + try: + active_thread_count = c.module(module_name).active_thread_count() + return jsonify({"active_thread_count": active_thread_count}) + except Exception as e: + return jsonify({'error': str(e)}), 404 + + +@app.route('/modules/users', methods=['GET']) +def module_users(): # may need to be checked + module_name = request.args.get('module_name') + if not module_name: + return jsonify({'error': 'Module name not provided in query parameter'}), 400 + + try: + users = c.module(module_name).users() + return jsonify(users) + except Exception as e: + return jsonify({'error': str(e)}), 404 + +@app.route('/modules/namespaces', methods=['GET']) +def namespaces(): + module_name = request.args.get('module_name', None) + module = c.module() + try: + if module_name: + namespaces_list = module.namespace(module_name) + else: + namespaces_list = module.namespace() + return jsonify(namespaces_list) + except Exception as e: + return jsonify({'error': str(e)}), 500 + +# returns list of modules being ACTIVELY served +@app.route('/modules/servers', methods=['GET']) +def list_servers(): + module_name = request.args.get('module_name') + + try: + if module_name: + served_modules = c.servers(module_name) + else: + served_modules = c.servers() + return jsonify(served_modules) + except Exception as e: + return jsonify({'error': str(e)}), 500 + + +@app.route('/modules/info', methods=['GET']) +def get_module_info(): + module_name = request.args.get('module_name') + + if not module_name: + return jsonify({'error': 'Module name not provided in query parameter'}), 400 + + try: + module = c.module(module_name) # Adjusted this line + module_info = module.info({}) + return jsonify(module_info) + except Exception as e: + return jsonify({'error': str(e)}), 404 + + +@app.route('/modules/stats', methods=['GET']) +def module_stats(): + module_name = request.args.get('module_name') + module = c.module() # Assuming c.module() is defined elsewhere and works correctly. + + if not module_name: + return jsonify({'error': 'Module name not provided in query parameter'}), 400 + + try: + if module_name: + modules_tree = module.stats(module_name) + else: + modules_tree = module.stats() + + # Check if the result is a DataFrame and convert + if isinstance(modules_tree, pd.DataFrame): + # Convert DataFrame to a dictionary in a JSON-friendly format + modules_tree = modules_tree.to_dict(orient='records') # 'records' format means list of dicts + + return jsonify(modules_tree) + except Exception as e: + return jsonify({'error': str(e)}), 500 + +@app.route('/info', methods=['GET']) +def infos(): + module_name = request.args.get('module_name') + module = c.module() + infos = {} + if not module_name: + infos['total_module'] = len(c.modules()) + infos['total_user'] = random.randrange(start=1,stop=1000) + infos['module_pending'] = random.randrange(start=1,stop=100) + infos['review_request'] = random.randrange(start=1,stop=100) + + return jsonify(infos) + else : + return jsonify({"error":""}), 500 + +@app.route('/modules/history', methods=['GET']) +def history(): + module_name = request.args.get('module_name') + history_array = np.random.randint(60,101, size=7) + + + if not module_name: + history = {'history': history_array.tolist()} + + return jsonify(history) + else : + return jsonify({"error":""}), 500 + +@app.route('/servers/analytics', methods=['GET']) +def server_analytics(): + module_name = request.args.get('module_name') + history_array = np.random.randint(60,101, size=7) + servers = c.servers() + if not module_name: + server_anal = {'history': history_array.tolist()} + + return jsonify(server_anal) + else : + return jsonify({"error":""}), 500 + + +# TODO: add routes to consume PM2 data + + +if __name__ == '__main__': + app.run(host='0.0.0.0', port=5000, debug=True) + + +# c tree # list of modules with their location on the filesystem + +# c modules.fintest info # info abt a module + +# c modules.fintest keys # keys associated with a module + +# c modules.fintest stats # stats abt a module + +# c modules.fintest users + +# c modules.fintest active_thread_count + +# c namespace model.openai.free or c namespace + +# c servers & TODO: c server history (c servers gives list of modules being actively served) \ No newline at end of file diff --git a/bin/c b/bin/c deleted file mode 100644 index b48065ad0..000000000 --- a/bin/c +++ /dev/null @@ -1,8 +0,0 @@ -#!/usr/bin/env python3 -import sys -import commune - -if __name__ == '__main__': - commune.cli(sys.argv[1:]) - - \ No newline at end of file diff --git a/commune.sh b/commune.sh deleted file mode 100755 index 8f79f6e21..000000000 --- a/commune.sh +++ /dev/null @@ -1,161 +0,0 @@ -#!/usr/bin/env bash -# -# Copyright (c) 2020 Ocean Protocol contributors -# SPDX-License-Identifier: Apache-2.0 -# -# Usage: ./start_ocean.sh -# -export DOCKER_BUILDKIT=0 -export COMPOSE_DOCKER_CLI_BUILD=0 - - -# Specify the ethereum default RPC container provider -export RAY_PORT="6378" -export NETWORK_RPC_HOST="172.15.0.3" -export NETWORK_RPC_PORT="8545" -export NETWORK_RPC_URL="http://"${NETWORK_RPC_HOST}:${NETWORK_RPC_PORT} - -export GANACHE_PORT="8545" -export GANACHE_HOST="172.15.0.3" -export GANACHE_URL="http://"${GANACHE_HOST}:${GANACHE_PORT} - -# export NETWORK_RPC_URL='https://polygon-mumbai.g.alchemy.com/v2/YtTw29fEGWDXcMKpljSM63DbOrgXgJRx' -# Use this seed on ganache to always create the same wallets -export GANACHE_MNEMONIC=${GANACHE_MNEMONIC:-"taxi music thumb unique chat sand crew more leg another off lamp"} - -# WEB INFURA STUFF -export WEB3_INFURA_PROJECT_ID="4b1e6d019d6644de887db1255319eff8" -export WEB3_INFURA_URL=" https://mainnet.infura.io/v3/${WEB3_INFURA_PROJECT_ID}" - -# ALCHEMY STUFF -export WEB3_ALCHEMY_PROJECT_ID="RrtpZjiUVoViiDEaYxhN9o6m1CSIZvlL" -export WEB3_ALCHEMY_URL="https://eth-mainnet.g.alchemy.com/v2/${WEB3_INFURA_PROJECT_ID}" -# Ocean contracts - -export PRIVATE_KEY="0x8467415bb2ba7c91084d932276214b11a3dd9bdb2930fefa194b666dd8020b99" - - -IP="localhost" -optspec=":-:" -set -e - -# Patch $DIR if spaces -DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )" -DIR="${DIR/ /\\ }" - -# Default versions of Aquarius, Provider -export COMPOSE_FILES="" -export PROJECT_NAME="commune" -export FORCEPULL="false" -export FORCEBUILD="false" - -# Export User UID and GID -export LOCAL_USER_ID=$(id -u) -export LOCAL_GROUP_ID=$(id -g) - - -# colors -COLOR_R="\033[0;31m" # red -COLOR_G="\033[0;32m" # green -COLOR_Y="\033[0;33m" # yellow -COLOR_B="\033[0;34m" # blue -COLOR_M="\033[0;35m" # magenta -COLOR_C="\033[0;36m" # cyan - -# reset -COLOR_RESET="\033[00m" - -# These paths are used in the docker-compose files -export SUBSPACE_PATH="./subspace" -export GANACHE_PATH="./ganache" -export FRONTEND_PATH="./frontend" -export COMMUNE_PATH="./" -export IPFS_PATH="./ipfs" - - -while :; do - case $1 in - - --ganache) - COMPOSE_FILES+=" -f ganache/docker-compose.yml" - - ;; - - --commune) - COMPOSE_FILES+=" -f ./docker-compose.yml" - - ;; - # BLOCKCHAIN NODES - --subspace) - - COMPOSE_FILES+=" -f subspace/docker-compose.yml" - - ;; - - --ipfs) - COMPOSE_FILES+=" -f ./ipfs/docker-compose.yml" - - ;; - - --frontend) - COMPOSE_FILES+=" -f ./frontend/docker-compose.yml" - - ;; - - --all) - COMPOSE_FILES="" - COMPOSE_FILES+=" -f ./docker-compose.yml" - # COMPOSE_FILES+=" -f ipfs/docker-compose.yml" - COMPOSE_FILES+=" -f ganache/docker-compose.yml" - COMPOSE_FILES+=" -f subspace/docker-compose.yml" - COMPOSE_FILES+=" -f frontend/docker-compose.yml" - ;; - - --light) - COMPOSE_FILES+=" -f ./docker-compose.yml" - ;; - - --pull) - FORCEPULL="true" - - ;; - - --build) - FORCEBUILD="true" - - ;; - - --down) - printf $COLOR_R'Doing a deep clean ...\n\n'$COLOR_RESET - # eval docker network rm ${PROJECT_NAME}_default || true; - eval docker-compose --project-name=$PROJECT_NAME "$COMPOSE_FILES" down; - break; - ;; - - - --restart) - printf $COLOR_R'Doing a deep clean ...\n\n'$COLOR_RESET - eval docker-compose --project-name=$PROJECT_NAME "$COMPOSE_FILES" down; - eval docker-compose "$DOCKER_COMPOSE_EXTRA_OPTS" --project-name=$PROJECT_NAME "$COMPOSE_FILES" up -d - break - ;; - - --) # End of all options. - shift - break - ;; - -?*) - printf $COLOR_R'WARN: Unknown option (ignored): %s\n'$COLOR_RESET "$1" >&2 - break - ;; - *) - [ ${FORCEPULL} = "true" ] && eval docker-compose "$DOCKER_COMPOSE_EXTRA_OPTS" --project-name=$PROJECT_NAME "$COMPOSE_FILES" pull - [ ${FORCEBUILD} = "true" ] && eval docker-compose "$DOCKER_COMPOSE_EXTRA_OPTS" --project-name=$PROJECT_NAME "$COMPOSE_FILES" build - eval docker-compose "$DOCKER_COMPOSE_EXTRA_OPTS" --project-name=$PROJECT_NAME "$COMPOSE_FILES" up -d - break - esac - shift -done - - - diff --git a/commune/.dockerignore b/commune/.dockerignore new file mode 100644 index 000000000..41da0ad48 --- /dev/null +++ b/commune/.dockerignore @@ -0,0 +1 @@ +./data diff --git a/commune/.gitignore b/commune/.gitignore new file mode 100644 index 000000000..50e0f0b49 --- /dev/null +++ b/commune/.gitignore @@ -0,0 +1,324 @@ +# Byte-compiled / optimized / DLL files + +**/__pycache__ +**/*/__pycache__ +*.py[cod] +*$py.class +*.pyc +env/ + +# benchmark results +benchmarks/results/ +benchmarks/results/* +!benchmarks/results + +*.csv +*.torch +*.pt +*.log + +# runs/data/models/logs/~ +data/** +commune/data/ + +# C extensions +*.so + +# IDE +*.idea/ + +# VSCODE +.vscode/ + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ +# Generated by Cargo +# will have compiled files and executables +**/target/ +# These are backup files generated by rustfmt +**/*.rs.bk + +.DS_Store + +# The cache for docker container dependency +.cargo + +# The cache for chain data in container +.local + +!data/.gitkeep + +# misc +.DS_Store +.env.local +.env.development.local +.env.test.local +.env.production.local + +# PIPY Stuff +bittensor.egg-info +bittensor*.egg +bdist.* + +npm-debug.log* +yarn-debug.log* +yarn-error.log* + +**/build/* +**/dist/* +**/runs/* +**/env/* +**/tmp/* + +**/.bash_history +**/*.xml +**/*.pstats +**/*.png + +# Replicate library +**/.replicate +replicate.yaml +**/run.sh +# Byte-compiled / optimized / DLL files +**/__pycache__/ +*.py[cod] +*$py.class +*.pyc + +# weigths and biases +wandb/ + +# benchmark results +benchmarks/results/ +benchmarks/results/* +!benchmarks/results + +*.csv +*.torch +*.pt +*.log + + +# C extensions +*.so + +# IDE +*.idea/ + +# VSCODE +.vscode/ + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + + + +# PyBuilder +.pybuilder/ + +# Jupyter Notebook +.ipynb_checkpoints + + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Scraping +images +logs +websites.json + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Vector Store +embeddings.db + diff --git a/commune/__init__.py b/commune/__init__.py index 78f07d287..7e371fdfe 100755 --- a/commune/__init__.py +++ b/commune/__init__.py @@ -1,19 +1,19 @@ - from .module import Module -# call it whatever you want, but it's the same thing -Block = Lego = Module -from .module.config import Config -from .modules.cli import cli +c = Block = Lego = M = Module # alias c.Module as c.Block, c.Lego, c.M +from .cli import cli +# import sys +# sys.path += [c.pwd()] + +# from .modules.subspace import subspace # from .model import Model -config = Config -import warnings -warnings.filterwarnings("ignore") # set the module functions as globals for k,v in Module.__dict__.items(): globals()[k] = v - -for f in Module.get_class_methods() + Module.get_static_methods(): +for f in Module.class_functions() + Module.static_functions(): globals()[f] = getattr(Module, f) - +for f in Module.self_functions(): + globals()[f] = lambda *args, **kwargs: getattr(Module(), f)(*args, **kwargs) +globals()['cli'] = cli + diff --git a/commune/api.py b/commune/api.py deleted file mode 100644 index 107487be9..000000000 --- a/commune/api.py +++ /dev/null @@ -1,58 +0,0 @@ - - -import commune - - -class API(commune.Module): - def __init__(self, module:str = 'module', refresh=False): - - self.build(refresh=refresh) - self.module = commune.connect('module') - self.merge(self.module) - - @classmethod - def deploy_api(cls, refresh:bool = True): - commune.launch(name='module', mode='ray', refresh=refresh) - - @classmethod - def serve_api(cls): - return commune.get_actor('module').serve(ray_get=False) - - @classmethod - def build(cls, refresh:bool=True): - if not cls.actor_exists('module') or refresh: - commune.cmd('python3 commune/api.py -fn deploy_api -args "[\'True\']"') - - module = commune.get_actor('module') - if not module.server_running(): - module.serve(ray_get=False) - import time - time_elapsed = 0 - while True: - time.sleep(1) - time_elapsed += 1 - server_running = module.server_running() - commune.print(f'Is Server Running {server_running} Time Elapsed: {time_elapsed}', color='yellow') - if server_running: - break - - commune.print(f'COMMUNE IS SERVED {server_running}', color='green') - - - - @classmethod - def run(cls): - args = cls.argparse() - - if args.function in ['deploy_api', 'serve_api', 'build']: - obj = cls - else: - obj = cls().module - - output =getattr(obj, args.function)(*args.args, **args.kwargs) - commune.print(output, 'green') - - -if __name__ == "__main__": - API.run() - diff --git a/commune/api/api.py b/commune/api/api.py new file mode 100644 index 000000000..2cce0bc69 --- /dev/null +++ b/commune/api/api.py @@ -0,0 +1,27 @@ +import commune as c + +class Api(c.Module): + def __init__(self, module=None): + self.module = c.module(module) + + def get_api_key(self, module=None): + if module != None: + cls = c.module(module) + api_keys = cls.api_keys() + if len(api_keys) == 0: + return None + else: + return c.choice(api_keys) + + def api_keys(self): + return c.get(self.module.resolve_path('api_keys'), []) + + def rm_api_keys(self): + c.put(self.resolve_path('api_keys'), []) + return {'api_keys': []} + + def send_api_keys(self, module:str, network='local'): + api_keys = self.api_keys() + assert len(api_keys) > 0, 'no api keys to send' + module = c.connect(module, network=network) + return module.add_api_keys(api_keys) diff --git a/commune/app/app.py b/commune/app/app.py new file mode 100644 index 000000000..29d388398 --- /dev/null +++ b/commune/app/app.py @@ -0,0 +1,76 @@ +import commune as c +import os +import json + + +class App(c.Module): + + def start(self, + module:str = 'app', + fn='app', + port=8501, + public:bool = False, + remote:bool = False, + kwargs=None): + kwargs = kwargs or {} + if public: + port = c.free_port() + + while c.port_used(port): + c.print(f'Port {port} is already in use', color='red') + port = port + 1 + if remote: + remote_kwargs = c.locals2kwargs(locals()) + remote_kwargs['remote'] = False + c.remote_fn(module=module, fn='st', kwargs=remote_kwargs) + ip = c.ip() + url = f'http://{ip}:{port}' + + return {'success': True, + 'msg': f'running {module} on {port}', + 'url': url} + module = c.module(module) + module_filepath = module.filepath() + # add port to the command + cmd = f'streamlit run {module_filepath} --server.port {port}' + + if kwargs == None: + kwargs = {} + + kwargs_str = json.dumps(kwargs) + kwargs_str = kwargs_str.replace('"', "'") + + cmd += f' -- --fn {fn} --kwargs "{kwargs_str}"' + + module2dashboard = self.get('module2dashboard', {}) + if module in module2dashboard: + try: + module_port = module2dashboard[module]['port'] + c.kill_port(module_port) + except Exception as e: + c.print(f'Error: {e}', color='red') + path = module.path() + module2dashboard[path] = { + 'port': port, + 'fn': fn, + 'kwargs': kwargs, + 'cmd': cmd + } + self.put('module2dashboard', module2dashboard) + cwd = os.path.dirname(module_filepath) + return c.cmd(cmd, verbose=True, cwd=cwd) + + + + def module2dashboard(self): + return self.get('module2dashboard', {}) + + + def app(self): + import streamlit as st + st.write('Hello World!') + + +App.run(__name__) + + diff --git a/commune/app/login/login.py b/commune/app/login/login.py new file mode 100644 index 000000000..6907a7029 --- /dev/null +++ b/commune/app/login/login.py @@ -0,0 +1,22 @@ +import commune as c +import streamlit as st + +class Login(c.Module): + def __init__(self): + self.set_config(locals()) + + def passwords(self): + return self.get('allowed_password', []) + + def add_password(self, password): + passwords = self.passwords() + passwords.append(str(password)) + self.put('allowed_password', passwords) + + def app(self, x:int = 1, y:int = 2) -> int: + password = st.text_input('Password', '123456', type='password') + self.key = c.module('key').from_password(c.hash(password)) + st.write(self.key.ss58_address) + + +Login.run(__name__) \ No newline at end of file diff --git a/commune/cli.py b/commune/cli.py new file mode 100644 index 000000000..720f13584 --- /dev/null +++ b/commune/cli.py @@ -0,0 +1,169 @@ + +import commune as c +from munch import Munch + +class cli(c.Module): + """ + Create and init the CLI class, which handles the coldkey, hotkey and tao transfer + """ + # + + def __init__(self, + args = None, + module = 'module', + verbose = True, + save: bool = True): + self.base_module = c.module(module) + args = args or self.argv() + output = self.get_output(args) + + if c.is_generator(output): + for output_item in output: + if isinstance(c, Munch): + output_item = output_item.toDict() + c.print(output_item, verbose=verbose) + else: + if isinstance(output, Munch): + output = output.toDict() + c.print(output, verbose=verbose) + + if save and c.jsonable(output): + self.history_module().add({'input': 'c ' + ' '.join(args), 'output': output}) + + def get_output(self, args): + + args, kwargs = self.parse_args(args) + + + base_module_attributes = list(set(self.base_module.functions() + self.base_module.get_attributes())) + # is it a fucntion, assume it is for the module + # handle module/function + is_fn = args[0] in base_module_attributes + + + if '/' in args[0]: + args = args[0].split('/') + args[1:] + is_fn = False + + if is_fn: + # is a function + module = self.base_module + fn = args.pop(0) + else: + module = args.pop(0) + if isinstance(module, str): + module = c.module(module) + fn = args.pop(0) + + + if module.classify_fn(fn) == 'self': + module = module() + + fn_obj = getattr(module, fn) + + if callable(fn_obj): + output = fn_obj(*args, **kwargs) + elif c.is_property(fn_obj): + output = getattr(module(), fn) + else: + output = fn_obj + if callable(fn): + output = fn(*args, **kwargs) + + return output + + + + @classmethod + def parse_args(cls, argv = None): + if argv is None: + argv = cls.argv() + + args = [] + kwargs = {} + parsing_kwargs = False + for arg in argv: + # TODO fix exception with "=" + # if any([arg.startswith(_) for _ in ['"', "'"]]): + # assert parsing_kwargs is False, 'Cannot mix positional and keyword arguments' + # args.append(cls.determine_type(arg)) + if '=' in arg: + parsing_kwargs = True + key, value = arg.split('=', 1) + # use determine_type to convert the value to its actual type + kwargs[key] = cls.determine_type(value) + else: + assert parsing_kwargs is False, 'Cannot mix positional and keyword arguments' + args.append(cls.determine_type(arg)) + + return args, kwargs + + @classmethod + def determine_type(cls, x): + + if x.startswith('py(') and x.endswith(')'): + try: + return eval(x[3:-1]) + except: + return x + if x.lower() in 'null' or x == 'None': # convert 'null' or 'None' to None + return None + elif x.lower() in ['true', 'false']: # convert 'true' or 'false' to bool + return bool(x.lower() == 'true') + elif x.startswith('[') and x.endswith(']'): # this is a list + try: + list_items = x[1:-1].split(',') + # try to convert each item to its actual type + x = [cls.determine_type(item.strip()) for item in list_items] + if len(x) == 1 and x[0] == '': + x = [] + return x + + except: + # if conversion fails, return as string + return x + elif x.startswith('{') and x.endswith('}'): + # this is a dictionary + if len(x) == 2: + return {} + try: + dict_items = x[1:-1].split(',') + # try to convert each item to a key-value pair + return {key.strip(): cls.determine_type(value.strip()) for key, value in [item.split(':', 1) for item in dict_items]} + except: + # if conversion fails, return as string + return x + else: + # try to convert to int or float, otherwise return as string + try: + return int(x) + except ValueError: + try: + return float(x) + except ValueError: + return x + + + @classmethod + def history_module(cls, path='history'): + return c.m('history')(folder_path=cls.resolve_path(path)) + + @classmethod + def history(cls,**kwargs): + history = cls.history_module().history(**kwargs) + return history + + @classmethod + def rm_history(cls,*args, **kwargs): + history = cls.history_module().rm_history(*args, **kwargs) + return history + + + @classmethod + def history_paths(cls, **kwargs): + history = cls.history_module().history_paths(**kwargs) + return history + +def main(): + import sys + cli() diff --git a/commune/client/client.py b/commune/client/client.py new file mode 100644 index 000000000..c06f5e34b --- /dev/null +++ b/commune/client/client.py @@ -0,0 +1,374 @@ + + +from typing import * +import asyncio +import commune as c +import aiohttp +import json + +STREAM_PREFIX = 'data: ' +BYTES_PER_MB = 1e6 + +class Client(c.Module): + count = 0 + def __init__( + self, + address : str = '0.0.0.0:8000', + network: bool = 'local', + key : str = None, + save_history: bool = True, + history_path : str = 'history', + loop: 'asyncio.EventLoop' = None, + debug: bool = False, + serializer= 'serializer', + default_fn = 'info', + + **kwargs + ): + self.loop = c.get_event_loop() if loop == None else loop + + self.set_client(address = address, network=network) + self.serializer = c.module(serializer)() + self.key = c.get_key(key) + self.start_timestamp = c.timestamp() + self.save_history = save_history + self.history_path = history_path + self.debug = debug + self.default_fn = default_fn + + def prepare_request(self, args: list = None, kwargs: dict = None, params=None, message_type = "v0"): + + if isinstance(args, dict): + kwargs = args + args = None + + if params != None: + assert type(params) in [list, dict], f'params must be a list or dict, not {type(params)}' + if isinstance(params, list): + args = params + elif isinstance(params, dict): + kwargs = params + kwargs = kwargs or {} + args = args if args else [] + kwargs = kwargs if kwargs else {} + + # serialize this into a json string + if message_type == "v0": + """ + { + 'data' : { + 'args': args, + 'kwargs': kwargs, + 'timestamp': timestamp, + } + 'signature': signature + } + + """ + + input = { + "args": args, + "kwargs": kwargs, + "timestamp": c.timestamp(), + } + request = self.serializer.serialize(input) + request = self.key.sign(request, return_json=True) + # key emoji + elif message_type == "v1": + + inputs = {'params': kwargs, + 'ticket': self.key.ticket() } + if len(args) > 0: + inputs['args'] = args + request = self.serializer.serialize(input) + else: + raise ValueError(f"Invalid message_type: {message_type}") + + return request + + + async def send_request(self, url:str, request: dict, headers=None, timeout:int=10, verbose=False): + # start a client session and send the request + + if not url.startswith('http'): + url = 'http://' + url + + c.print(f"🛰️ Call {url} 🛰️ (🔑{self.key.ss58_address})", color='green', verbose=verbose) + + async with aiohttp.ClientSession() as session: + async with session.post(url, json=request, headers=headers) as response: + + if response.content_type == 'application/json': + result = await asyncio.wait_for(response.json(), timeout=timeout) + + elif response.content_type == 'text/plain': + result = await asyncio.wait_for(response.text(), timeout=timeout) + + elif response.content_type == 'text/event-stream': + if self.debug: + progress_bar = c.tqdm(desc='MB per Second', position=0) + result = {} + async for line in response.content: + + event_data = line.decode('utf-8') + event_bytes = len(event_data) + + if self.debug : + progress_bar.update(event_bytes/(BYTES_PER_MB)) + + # remove the "data: " prefix + if event_data.startswith(STREAM_PREFIX): + event_data = event_data[len(STREAM_PREFIX):] + + event_data = event_data.strip() + + # skip empty lines + if event_data == "": + continue + + # if the data is formatted as a json string, load it {data: ...} + if isinstance(event_data, bytes): + event_data = event_data.decode('utf-8') + + # if the data is formatted as a json string, load it {data: ...} + if isinstance(event_data, str): + if event_data.startswith('{') and event_data.endswith('}') and 'data' in event_data: + event_data = json.loads(event_data)['data'] + result += [event_data] + + # process the result if its a json string + if result.startswith('{') and result.endswith('}') or \ + result.startswith('[') and result.endswith(']'): + result = ''.join(result) + result = json.loads(result) + else: + raise ValueError(f"Invalid response content type: {response.content_type}") + if type(result) in [str, dict]: + result = self.serializer.deserialize(result) + if isinstance(result, dict) and 'data' in result: + result = result['data'] + + return result + + + def process_output(self, result): + ## handles + if isinstance(result, str): + result = json.loads(result) + if 'data' in result: + result = self.serializer.deserialize(result) + return result['data'] + else: + return result + + + def resolve_key(self,key=None): + if key == None: + key = self.key + if isinstance(key, str): + key = c.get_key(key) + return key + + def prepare_url(self, address, fn): + address = address or self.address + fn = fn or self.default_fn + if '/' in address.split('://')[-1]: + address = address.split('://')[-1] + url = f"{address}/{fn}/" + return url + + async def async_forward(self, + fn: str, + args: list = None, + kwargs: dict = None, + params: dict = None, + address : str = None, + timeout: int = 10, + headers : dict ={'Content-Type': 'application/json'}, + message_type = "v0", + key : str = None, + verbose = False, + **extra_kwargs + ): + key = self.resolve_key(key) + url = self.prepare_url(address, fn) + # resolve the kwargs at least + kwargs =kwargs or {} + kwargs.update(extra_kwargs) + request = self.prepare_request(args=args, kwargs=kwargs, params=params, message_type=message_type) + result = await self.send_request(url=url, request=request, headers=headers, timeout=timeout, verbose=verbose) + + if self.save_history: + input = self.serializer.deserialize(request) + path = self.history_path+ '/' + self.key.ss58_address + '/' + self.address+ '/'+ str(input['timestamp']) + output = { + 'address': address, + 'fn': fn, + 'input': input, + 'result': result, + 'latency': c.time() - input['timestamp'], + } + self.put(path, output) + return result + + + def age(self): + return self.start_timestamp - c.timestamp() + + def set_client(self, + address : str = None, + verbose: bool = 1, + network : str = 'local', + possible_modes = ['http', 'https'], + ): + # we dont want to load the namespace if we have the address + if not c.is_address(address): + module = address # we assume its a module name + assert module != None, 'module must be provided' + namespace = c.get_namespace(search=module, network=network) + if module in namespace: + address = namespace[module] + else: + address = module + if '://' in address: + mode = address.split('://')[0] + assert mode in possible_modes, f'Invalid mode {mode}' + address = address.split('://')[-1] + address = address.replace(c.ip(), '0.0.0.0') + self.address = address + return {'address': self.address} + + @classmethod + def history(cls, key=None, history_path='history'): + key = c.get_key(key) + return cls.ls(history_path + '/' + key.ss58_address) + + + def forward(self,*args,return_future:bool=False, timeout:str=4, **kwargs): + forward_future = asyncio.wait_for(self.async_forward(*args, **kwargs), timeout=timeout) + if return_future: + return forward_future + else: + return self.loop.run_until_complete(forward_future) + + + + @classmethod + def call(cls, module : str, + fn:str = None, + *args, + timeout : int = 10, + prefix_match:bool = False, + network:str = 'local', + key:str = None, + kwargs = None, + params = None, + **extra_kwargs) -> None: + + if '//' in module: + module = module.split('//')[-1] + if '/' in module: + # adjust the split + if fn != None: + args = [fn] + list(args) + module , fn = module.split('/') + + module = cls.connect(module, + network=network, + prefix_match=prefix_match, + virtual=False, + key=key) + # if isinstance(kwargs, str): + # kwargs = c.str2dict(kwargs) + if kwargs == None: + kwargs = {} + kwargs.update(extra_kwargs) + return asyncio.run(module.async_forward(fn=fn, + args=args, + kwargs=kwargs, + params=params)) + + + @classmethod + def call_search(cls, + search : str, + *args, + timeout : int = 10, + network:str = 'local', + key:str = None, + kwargs = None, + **extra_kwargs) -> None: + if '/' in search: + search, fn = search.split('/') + namespace = c.namespace(search=search, network=network) + future2module = {} + for module, address in namespace.items(): + c.print(f"Calling {module}/{fn}", color='green') + future = c.submit(cls.call, + args = [module, fn] + list(args), + kwargs = {'timeout': timeout, + 'network': network, 'key': key, + 'kwargs': kwargs, + **extra_kwargs} , timeout=timeout) + future2module[future] = module + futures = list(future2module.keys()) + result = {} + progress_bar = c.tqdm(len(futures)) + for future in c.as_completed(futures, timeout=timeout): + module = future2module.pop(future) + futures.remove(future) + progress_bar.update(1) + result[module] = future.result() + + return result + + + + __call__ = forward + + def __str__ ( self ): + return "Client({})".format(self.address) + def __repr__ ( self ): + return self.__str__() + def __exit__ ( self ): + self.__del__() + + def virtual(self): + from .virtual import VirtualClient + return VirtualClient(module = self) + + def __repr__(self) -> str: + return super().__repr__() + + + @classmethod + def connect(cls, + module:str, + network : str = 'local', + mode = 'http', + virtual:bool = True, + **kwargs): + + + + client = cls(address=module, + virtual=virtual, + network=network, + **kwargs) + # if virtual turn client into a virtual client, making it act like if the server was local + if virtual: + return client.virtual() + + return client + + + def test(self, module='module::test_client'): + c.serve(module) + c.sleep(1) + c.print(c.server_exists(module)) + c.print('Module started') + + info = c.call(module+'/info') + key = c.get_key(module) + assert info['ss58_address'] == key.ss58_address + return {'info': info, 'key': str(key)} diff --git a/commune/client/virtual.py b/commune/client/virtual.py new file mode 100644 index 000000000..0cbb41435 --- /dev/null +++ b/commune/client/virtual.py @@ -0,0 +1,41 @@ + +import commune as c +from functools import partial +import asyncio + + +class VirtualClient: + def __init__(self, module: str ='ReactAgentModule'): + if isinstance(module, str): + self.module_client = c.connect(module) + self.loop = self.module_client.loop + self.success = self.module_client.success + else: + self.module_client = module + + def remote_call(self, *args, return_future= False, timeout:int=10, **kwargs): + remote_fn = kwargs.pop('remote_fn') + future = asyncio.wait_for(self.module_client.async_forward(fn=remote_fn, args=args, kwargs=kwargs, timeout=timeout), timeout=timeout) + if return_future: + return future + else: + loop = asyncio.get_event_loop() + return loop.run_until_complete(future) + + def __str__(self): + return f'' + + def __repr__(self): + return self.__str__() + + protected_attributes = [ 'module_client', 'remote_call'] + def __getattr__(self, key): + + if key in self.protected_attributes : + return getattr(self, key) + else: + return lambda *args, **kwargs : self.remote_call( remote_fn=key, *args, **kwargs) + + + + diff --git a/commune/client/ws.py b/commune/client/ws.py new file mode 100644 index 000000000..3eb6dbf6b --- /dev/null +++ b/commune/client/ws.py @@ -0,0 +1,68 @@ +import asyncio +import websockets +import commune as c +import json + +class WSClient(c.Module): + + + def __init__(self, + address:str = '0.0.0.0:50087', + start:bool = True, + network: dict = None, + ): + if ':' in address: + ip, port = address.split(':') + self.ip = ip + self.port = port + namespace = c.namespace(network=network) + self.address = namespace.get(address, None) + + + def resolve_address(self, address=None): + if address == None: + address = self.address + if not 'ws://' in address: + address = f'ws://{address}' + assert isinstance(address, str), f'address must be a string, not {type(address)}' + return address + + def forward(self, data, address=None): + return + + async def async_forward(self, data='hello', address = None, **kwargs): + address = self.resolve_address(address=address, **kwargs) + async with websockets.connect(address) as websocket: + await websocket.send(data) + response = await websocket.recv() + return response + + def forward(self, + fn:str = 'fn', + args:list = [], + kwargs:dict = {}, + address:str = None, + timeout:int = 10, + **extra_kwargs): + + data = { + 'fn': fn, + 'args': args, + 'kwargs': kwargs, + **extra_kwargs + } + data = json.dumps(data) + loop = asyncio.get_event_loop() + future = self.async_forward(data=data, address=address) + future = asyncio.wait_for(future, timeout=timeout) + result = loop.run_until_complete(future) + return result + + + @staticmethod + async def recv(address): + chunks = [] + async with websockets.connect(address) as websocket: + chunk = await websocket.recv(address) + chunks.append(chunk) + return chunks diff --git a/commune/code/code.py b/commune/code/code.py new file mode 100644 index 000000000..96c49569f --- /dev/null +++ b/commune/code/code.py @@ -0,0 +1,188 @@ +import commune as c +import json +from typing import * + +class Coder(c.Module): + def comment(self, + fn='coder/call', + model = 'model.openai', + timeout=20, + **model_params): + ''' + ### Function Documentation + + #### `call(self, fn='coder/call', model='model.openai', timeout=20, **model_params)` + + This function is responsible for generating documentation for a given piece of code by utilizing a language model. + + Parameters: + - `fn` (str): The name of the function that needs documentation. Default value is `'coder/call'`. + - `model` (str): The identifier of the language model to be used. Default is `'model.openai'`. + - `timeout` (int): The maximum amount of time (in seconds) to wait for the model to generate the documentation. Default is `20`. + - `**model_params`: Arbitrary keyword arguments that will be passed to the `connect` method of the `c` object when connecting to the language model. + + Returns: + - `docs` (str): The generated documentation for the specified code. + + The function performs the following steps: + 1. Connects to the specified language model using the provided parameters. + 2. Constructs an input JSON object containing the instruction, code, and a placeholder for documentation. + 3. Requests the language model to generate documentation based on the provided input. + 4. Processes the generated documentation response. + 5. Adds the generated documentation to the function using the `c.add_docs()` method. + 6. Returns the generated documentation. + + **Example Usage:** + + ```python + # assuming the 'c' object and 'call' method are part of a class + caller = YourClass() + documentation = caller.call( + fn='your_function_name', + model='your_model_identifier', + timeout=30, + model_params={'additional': 'parameters'} + ) + print(documentation) + ``` + + **Note:** + - The `c` object is assumed to be a pre-defined object with methods `connect`, `fn_code`, and `add_docs`. + - `self.process_response` is assumed to be a method that processes the generated documentation response. Its functionality is not detailed in the provided code. + ''' + model = c.connect(model, **model_params) + input = json.dumps({ + 'instruction': 'given the code, document the function in a professional manner in the docs section', + 'code': c.fn_code(fn), + 'docs': None, + }) + # get the docs + docs = model.generate(input, timeout=timeout) + docs = self.process_response(docs) + + # add docs to the function + c.add_docs(fn, docs) + + return docs + + call = document_fn = comment + + def document_module(self, + module='agent.coder', + fns = None, + model = 'model.openai', + **model_params + ): + fns = c.module(module).fns() + for fn in fns: + c.print(f'Documenting function {fn} in module {module}...') + + try: + future = c.submit(self.document_fn, dict(fn=module+'/'+fn, model=model, **model_params)) + future.result() + except: + c.print(f'Failed to document function {fn} in module {module}...') + print(f'Documenting function {fn} in module {module}...') + + return + + def process_response(self, response): + ''' + """ + Documentation for `process_response` function: + + This function is responsible for processing a given response and ensuring it's in a proper JSON format. If the response is in a string format, the function attempts to load it as a JSON object. If the loading fails, it simply passes without raising any exceptions. + + Parameters: + - self: The instance of the class that this method is bound to. + - response: A response object that is to be processed. It can be a string or already a + ''' + if isinstance(response, str): + try: + response = json.loads(response) + except: + pass + + return response + + + + def file2fns(self, filepath): + ''' + """ + Documentation for `get_fns` function: + + This function retrieves the list of functions available in a given module. + + Parameters: + - self: The instance of the class that this method is bound to. + - module: The name of the module for which the list of functions is to be retrieved. + + Returns: + - fns: A list of function names available in the specified module. + ''' + + if c.module_exists(filepath): + filepath = c.filepath() + if not filepath.endswith('.py'): + filepath = filepath + '.py' + code = c.get_text(filepath) + lines = code.split('\n') + fns = [] + for line in lines: + if '):' in line.strip() and 'def ' in line.split('):')[0].strip(): + fn = line.split('def ')[1].split('):')[0].split('(')[0] + if ' ' in fn or ']' in fn: + continue + fns.append(fn) + + + return fns + + + @property + def get_function_default_map(self, include_parents=False): + return self.get_function_default_map(obj=self, include_parents=False) + + @classmethod + def get_function_default_map(cls, obj:Any= None, include_parents=False) -> Dict[str, Dict[str, Any]]: + obj = obj if obj else cls + default_value_map = {} + function_signature = cls.fn_signature_map(obj=obj,include_parents=include_parents) + for fn_name, fn in function_signature.items(): + default_value_map[fn_name] = {} + if fn_name in ['self', 'cls']: + continue + for var_name, var in fn.items(): + if len(var.split('=')) == 1: + var_type = var + default_value_map[fn_name][var_name] = 'NA' + + elif len(var.split('=')) == 2: + var_value = var.split('=')[-1].strip() + default_value_map[fn_name][var_name] = eval(var_value) + + return default_value_map + + + + def file2file(self, path, **kwargs): + ''' + """ + Documentation for `file2file` function: + + This function reads the content of a file and writes it to another file. + + Parameters: + - self: The instance of the class that this method is bound to. + - path: The path to the file to be read. + - new_path: The path to the file to be written. If not provided, the content is written to the same file. + + Returns: + - success: A boolean value indicating whether the operation was successful. + ''' + content = c.get_text(path) + content = self.model.forward(content, **kwargs) + c.put_text(path, content) + return content + \ No newline at end of file diff --git a/commune/crypto/crypto.py b/commune/crypto/crypto.py new file mode 100644 index 000000000..58455a036 --- /dev/null +++ b/commune/crypto/crypto.py @@ -0,0 +1,46 @@ +import commune as c +import hashlib + +class Hash(c.Module): + @classmethod + def hash(cls, x, mode: str='sha256',*args,**kwargs): + x = cls.python2str(x) + if mode == 'keccak': + return c.import_object('web3.main.Web3').keccak(text=x, *args, **kwargs).hex() + elif mode == 'ss58': + return c.import_object('scalecodec.utils.ss58.ss58_encode')(x, *args,**kwargs) + elif mode == 'python': + return hash(x) + elif mode == 'md5': + return hashlib.md5(x.encode()).hexdigest() + elif mode == 'sha256': + return hashlib.sha256(x.encode()).hexdigest() + elif mode == 'sha512': + return hashlib.sha512(x.encode()).hexdigest() + elif mode =='sha3_512': + return hashlib.sha3_512(x.encode()).hexdigest() + else: + raise ValueError(f'unknown mode {mode}') + + #TODO: add quantum resistant hash functions + + + return hash_output + + @classmethod + def hash_modes(cls): + return ['keccak', 'ss58', 'python', 'md5', 'sha256', 'sha512', 'sha3_512'] + + @classmethod + def test(cls, x='bro'): + for mode in cls.hash_modes(): + try: + cls.print(f'SUCCESS {mode}: x -> {cls.hash(x, mode=mode)}', color='green') + except Exception as e: + cls.print(f'FAILED {mode}: x -> {e}', color='red') + + def __call__(self, *args, **kwargs): + return self.hash(*args, **kwargs) + +if __name__ == "__main__": + Hash.run() \ No newline at end of file diff --git a/commune/crypto/dh.py b/commune/crypto/dh.py new file mode 100644 index 000000000..04143e7eb --- /dev/null +++ b/commune/crypto/dh.py @@ -0,0 +1,71 @@ +import commune as c + + +class DH(c.Module): + description = 'Diffie-Hellman key exchange' + + def __init__(self, public_key1 = 'hey', public_key2='bro', private_key = 'test'): + self.public_key1 = self.str2int(public_key1) + self.public_key2 = self.str2int(public_key2) + self.private_key = self.str2int(c.hash(c.get_key(private_key).mnemonic) if c.key_exists(private_key) else private_key) + # convert string to int via binary + + c.print(self.__dict__) + + + self.full_key = None + + def str2int(self, x): + nchars = len(x) + x = sum(ord(x[byte])<<8*(nchars-byte-1) for byte in range(nchars)) + return c.print(x) + + + def generate_partial_key(self): + partial_key = self.public_key1**self.private_key + partial_key = partial_key%self.public_key2 + return partial_key + + def generate_full_key(self, partial_key_r): + full_key = partial_key_r**self.private_key + full_key = full_key%self.public_key2 + self.full_key = full_key + return full_key + + def encrypt_message(self, message): + encrypted_message = "" + key = self.full_key + for c in message: + encrypted_message += chr(ord(c)+key) + return encrypted_message + + def decrypt_message(self, encrypted_message): + decrypted_message = "" + key = self.full_key + for c in encrypted_message: + decrypted_message += chr(ord(c)-key) + return decrypted_message + + + @classmethod + def test(cls, public_key1='hey', public_key2='bro'): + dh1 = cls(public_key1=public_key1, public_key2=public_key1, private_key='test1') + dh2 = cls(public_key1=public_key1, public_key2=public_key1, private_key='test2') + partial_key1 = dh1.generate_partial_key() + partial_key2 = dh2.generate_partial_key() + full_key1 = dh1.generate_full_key(partial_key2) + full_key2 = dh2.generate_full_key(partial_key1) + message = 'hello' + encrypted_message = dh1.encrypt_message(message) + decrypted_message = dh2.decrypt_message(encrypted_message) + c.print(f'message: {message}') + c.print(f'encrypted_message: {encrypted_message}') + c.print(f'decrypted_message: {decrypted_message}') + c.print(f'full_key1: {full_key1}') + c.print(f'full_key2: {full_key2}') + c.print(f'partial_key1: {partial_key1}') + c.print(f'partial_key2: {partial_key2}') + assert message == decrypted_message + assert full_key1 == full_key2 + assert partial_key1 == partial_key2 + c.print('SUCCESS', color='green') diff --git a/commune/crypto/hash.py b/commune/crypto/hash.py new file mode 100644 index 000000000..58455a036 --- /dev/null +++ b/commune/crypto/hash.py @@ -0,0 +1,46 @@ +import commune as c +import hashlib + +class Hash(c.Module): + @classmethod + def hash(cls, x, mode: str='sha256',*args,**kwargs): + x = cls.python2str(x) + if mode == 'keccak': + return c.import_object('web3.main.Web3').keccak(text=x, *args, **kwargs).hex() + elif mode == 'ss58': + return c.import_object('scalecodec.utils.ss58.ss58_encode')(x, *args,**kwargs) + elif mode == 'python': + return hash(x) + elif mode == 'md5': + return hashlib.md5(x.encode()).hexdigest() + elif mode == 'sha256': + return hashlib.sha256(x.encode()).hexdigest() + elif mode == 'sha512': + return hashlib.sha512(x.encode()).hexdigest() + elif mode =='sha3_512': + return hashlib.sha3_512(x.encode()).hexdigest() + else: + raise ValueError(f'unknown mode {mode}') + + #TODO: add quantum resistant hash functions + + + return hash_output + + @classmethod + def hash_modes(cls): + return ['keccak', 'ss58', 'python', 'md5', 'sha256', 'sha512', 'sha3_512'] + + @classmethod + def test(cls, x='bro'): + for mode in cls.hash_modes(): + try: + cls.print(f'SUCCESS {mode}: x -> {cls.hash(x, mode=mode)}', color='green') + except Exception as e: + cls.print(f'FAILED {mode}: x -> {e}', color='red') + + def __call__(self, *args, **kwargs): + return self.hash(*args, **kwargs) + +if __name__ == "__main__": + Hash.run() \ No newline at end of file diff --git a/commune/demo_c.py b/commune/demo_c.py deleted file mode 100644 index 669f3bc9e..000000000 --- a/commune/demo_c.py +++ /dev/null @@ -1,12 +0,0 @@ -import commune as c - -class Demo(c.Module): - def __init__(self, i_hate_configs:bool=True): - self.i_hate_configs = i_hate_configs - - def bro(self, x='fam'): - return f'whadup {x} i_hate_configs: {self.i_hate_configs}' - - def hey(self, x='fam'): - return f'whadup {x}' - \ No newline at end of file diff --git a/commune/executor/executor.py b/commune/executor/executor.py new file mode 100644 index 000000000..c36c0a7d5 --- /dev/null +++ b/commune/executor/executor.py @@ -0,0 +1,240 @@ +import commune as c +import os +import sys +import time +import queue +import weakref +import itertools +import threading +from typing import Callable +from concurrent.futures._base import Future +import commune as c + + +Task = c.module('executor.task') + +NULL_ENTRY = (sys.maxsize, Task(None, (), {})) + + +class Executor(c.Module): + modes = ['thread', 'process'] + mode = 'thread' + + """Base threadpool executor with a priority queue""" + + # Used to assign unique thread names when thread_name_prefix is not supplied. + _counter = itertools.count().__next__ + # submit.__doc__ = _base.Executor.submit.__doc__ + threads_queues = weakref.WeakKeyDictionary() + + def __init__( + self, + max_workers: int =None, + maxsize : int =200 , + thread_name_prefix : str ="", + ): + """Initializes a new ThreadPoolExecutor instance. + Args: + max_workers: The maximum number of threads that can be used to + execute the given calls. + thread_name_prefix: An optional name prefix to give our threads. + """ + + max_workers = (os.cpu_count() or 1) * 5 if max_workers == None else max_workers + if max_workers <= 0: + raise ValueError("max_workers must be greater than 0") + + self.max_workers = max_workers + self.work_queue = queue.PriorityQueue(maxsize=maxsize) + self.idle_semaphore = threading.Semaphore(0) + self.threads = [] + self.broken = False + self.shutdown = False + self.shutdown_lock = threading.Lock() + self.thread_name_prefix = thread_name_prefix or ("ThreadPoolExecutor-%d" % self._counter() ) + + + @classmethod + def executor(cls, max_workers:int = None, mode:str = 'thread', **kwargs): + assert mode in cls.modes, f"mode must be one of {cls.modes}" + module = cls if mode == cls.mode else c.module(f'executor.{mode}') + return module(max_workers=max_workers, **kwargs) + + @classmethod + def test(cls): + return [c.module('executor.thread').test(), c.module('executor.process').test()] + + + @property + def is_empty(self): + return self.work_queue.empty() + + + def submit(self, + fn: Callable, + params = None, + args:dict=None, + kwargs:dict=None, + priority:int=1, + timeout=200, + return_future:bool=True, + wait = True, + path:str=None, **extra_kwargs) -> Future: + if params != None: + if isinstance(params, dict): + kwargs = params + elif isinstance(params, list): + args = params + else: + raise ValueError("params must be a list or a dict") + # check if the queue is full and if so, raise an exception + if self.work_queue.full(): + if wait: + while self.work_queue.full(): + time.sleep(0.1) + else: + return {'success': False, 'msg':"cannot schedule new futures after maxsize exceeded"} + args = args or [] + kwargs = kwargs or {} + kwargs.update(extra_kwargs) + + with self.shutdown_lock: + + if self.broken: + raise Exception("ThreadPoolExecutor is broken") + if self.shutdown: + raise RuntimeError("cannot schedule new futures after shutdown") + priority = kwargs.get("priority", priority) + if "priority" in kwargs: + del kwargs["priority"] + task = Task(fn=fn, args=args, kwargs=kwargs, timeout=timeout, path=path) + # add the work item to the queue + self.work_queue.put((priority, task), block=False) + # adjust the thread count to match the new task + self.adjust_thread_count() + + # return the future (MAYBE WE CAN RETURN THE TASK ITSELF) + if return_future: + return task.future + + return task.future.result() + + + def adjust_thread_count(self): + # if idle threads are available, don't spin new threads + if self.idle_semaphore.acquire(timeout=0): + return + + # When the executor gets lost, the weakref callback will wake up + # the worker threads. + def weakref_cb(_, q=self.work_queue): + q.put(NULL_ENTRY) + + num_threads = len(self.threads) + if num_threads < self.max_workers: + thread_name = "%s_%d" % (self.thread_name_prefix or self, num_threads) + t = threading.Thread( + name=thread_name, + target=self.worker, + args=( + weakref.ref(self, weakref_cb), + self.work_queue, + ), + ) + t.daemon = True + t.start() + self.threads.append(t) + self.threads_queues[t] = self.work_queue + + def shutdown(self, wait=True): + with self.shutdown_lock: + self.shutdown = True + self.work_queue.put(NULL_ENTRY) + if wait: + for t in self.threads: + try: + t.join(timeout=2) + except Exception: + pass + + @staticmethod + def worker(executor_reference, work_queue): + c.new_event_loop(nest_asyncio=True) + + try: + while True: + work_item = work_queue.get(block=True) + priority = work_item[0] + + if priority == sys.maxsize: + # Wake up queue management thread. + work_queue.put(NULL_ENTRY) + break + + item = work_item[1] + + if item is not None: + item.run() + # Delete references to object. See issue16284 + del item + continue + + executor = executor_reference() + # Exit if: + # - The interpreter is shutting down OR + # - The executor that owns the worker has been collected OR + # - The executor that owns the worker has been shutdown. + if executor is None or executor.shutdown: + # Flag the executor as shutting down as early as possible if it + # is not gc-ed yet. + if executor is not None: + executor.shutdown = True + # Notice other workers + work_queue.put(NULL_ENTRY) + return + del executor + except Exception as e: + e = c.detailed_error(e) + + @property + def num_tasks(self): + return self.work_queue.qsize() + + @classmethod + def as_completed(futures: list): + assert isinstance(futures, list), "futures must be a list" + return [f for f in futures if not f.done()] + + @staticmethod + def wait(futures:list) -> list: + futures = [futures] if not isinstance(futures, list) else futures + results = [] + for future in c.as_completed(futures): + results += [future.result()] + return results + + @classmethod + def test(cls): + def fn(x): + result = x*2 + c.print(result) + return result + + self = cls() + futures = [] + for i in range(10): + futures += [self.submit(fn=fn, kwargs=dict(x=i))] + for future in c.tqdm(futures): + future.result() + for i in range(10): + futures += [self.submit(fn=fn, kwargs=dict(x=i))] + + results = c.wait(futures, timeout=10) + + while self.num_tasks > 0: + c.print(self.num_tasks, 'tasks remaining', color='red') + + + return {'success': True, 'msg': 'thread pool test passed'} + + \ No newline at end of file diff --git a/commune/executor/process/process_executor.py b/commune/executor/process/process_executor.py new file mode 100644 index 000000000..7ce9e91d3 --- /dev/null +++ b/commune/executor/process/process_executor.py @@ -0,0 +1,902 @@ +# Copyright 2009 Brian Quinlan. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Implements ProcessPoolExecutor. + +The following diagram and text describe the data-flow through the system: + +|======================= In-process =====================|== Out-of-process ==| + ++----------+ +----------+ +--------+ +-----------+ +---------+ +| | => | Work Ids | | | | Call Q | | Process | +| | +----------+ | | +-----------+ | Pool | +| | | ... | | | | ... | +---------+ +| | | 6 | => | | => | 5, call() | => | | +| | | 7 | | | | ... | | | +| Process | | ... | | Local | +-----------+ | Process | +| Pool | +----------+ | Worker | | #1..n | +| Executor | | Thread | | | +| | +----------- + | | +-----------+ | | +| | <=> | Work Items | <=> | | <= | Result Q | <= | | +| | +------------+ | | +-----------+ | | +| | | 6: call() | | | | ... | | | +| | | future | | | | 4, result | | | +| | | ... | | | | 3, except | | | ++----------+ +------------+ +--------+ +-----------+ +---------+ + +Executor.submit() called: +- creates a uniquely numbered _WorkItem and adds it to the "Work Items" dict +- adds the id of the _WorkItem to the "Work Ids" queue + +Local worker thread: +- reads work ids from the "Work Ids" queue and looks up the corresponding + WorkItem from the "Work Items" dict: if the work item has been cancelled then + it is simply removed from the dict, otherwise it is repackaged as a + _CallItem and put in the "Call Q". New _CallItems are put in the "Call Q" + until "Call Q" is full. NOTE: the size of the "Call Q" is kept small because + calls placed in the "Call Q" can no longer be cancelled with Future.cancel(). +- reads _ResultItems from "Result Q", updates the future stored in the + "Work Items" dict and deletes the dict entry + +Process #1..n: +- reads _CallItems from "Call Q", executes the calls, and puts the resulting + _ResultItems in "Result Q" +""" + +__author__ = 'Brian Quinlan (brian@sweetapp.com)' + +import os +from concurrent.futures import _base +import queue +import multiprocessing as mp +# This import is required to load the multiprocessing.connection submodule +# so that it can be accessed later as `mp.connection` +import multiprocessing.connection +from multiprocessing.queues import Queue +import threading +import weakref +from functools import partial +import itertools +import sys +from traceback import format_exception +import commune as c +import inspect + + +_threads_wakeups = weakref.WeakKeyDictionary() +_global_shutdown = False + + +class _ThreadWakeup: + def __init__(self): + self._closed = False + self._reader, self._writer = mp.Pipe(duplex=False) + + def close(self): + # Please note that we do not take the shutdown lock when + # calling clear() (to avoid deadlocking) so this method can + # only be called safely from the same thread as all calls to + # clear() even if you hold the shutdown lock. Otherwise we + # might try to read from the closed pipe. + if not self._closed: + self._closed = True + self._writer.close() + self._reader.close() + + def wakeup(self): + if not self._closed: + self._writer.send_bytes(b"") + + def clear(self): + if not self._closed: + while self._reader.poll(): + self._reader.recv_bytes() + + +def _python_exit(): + global _global_shutdown + _global_shutdown = True + items = list(_threads_wakeups.items()) + for _, thread_wakeup in items: + # call not protected by ProcessPoolExecutor._shutdown_lock + thread_wakeup.wakeup() + for t, _ in items: + t.join() + +# Register for `_python_exit()` to be called just before joining all +# non-daemon threads. This is used instead of `atexit.register()` for +# compatibility with subinterpreters, which no longer support daemon threads. +# See bpo-39812 for context. +threading._register_atexit(_python_exit) + +# Controls how many more calls than processes will be queued in the call queue. +# A smaller number will mean that processes spend more time idle waiting for +# work while a larger number will make Future.cancel() succeed less frequently +# (Futures in the call queue cannot be cancelled). +EXTRA_QUEUED_CALLS = 1 + + +# On Windows, WaitForMultipleObjects is used to wait for processes to finish. +# It can wait on, at most, 63 objects. There is an overhead of two objects: +# - the result queue reader +# - the thread wakeup reader +_MAX_WINDOWS_WORKERS = 63 - 2 + +# Hack to embed stringification of remote traceback in local traceback + +class _RemoteTraceback(Exception): + def __init__(self, tb): + self.tb = tb + def __str__(self): + return self.tb + +class _ExceptionWithTraceback: + def __init__(self, exc, tb): + tb = ''.join(format_exception(type(exc), exc, tb)) + self.exc = exc + # Traceback object needs to be garbage-collected as its frames + # contain references to all the objects in the exception scope + self.exc.__traceback__ = None + self.tb = '\n"""\n%s"""' % tb + def __reduce__(self): + return _rebuild_exc, (self.exc, self.tb) + +def _rebuild_exc(exc, tb): + exc.__cause__ = _RemoteTraceback(tb) + return exc + +class _WorkItem(object): + def __init__(self, future, fn, args, kwargs): + self.future = future + self.fn = fn + self.args = args + self.kwargs = kwargs + +class _ResultItem(object): + def __init__(self, work_id, exception=None, result=None, exit_pid=None): + self.work_id = work_id + self.exception = exception + self.result = result + self.exit_pid = exit_pid + +class _CallItem(object): + def __init__(self, work_id, fn, args, kwargs): + self.work_id = work_id + self.fn = fn + self.args = args + self.kwargs = kwargs + + +class _SafeQueue(Queue): + """Safe Queue set exception to the future object linked to a job""" + def __init__(self, max_size=0, *, ctx, pending_work_items, shutdown_lock, + thread_wakeup): + self.pending_work_items = pending_work_items + self.shutdown_lock = shutdown_lock + self.thread_wakeup = thread_wakeup + super().__init__(max_size, ctx=ctx) + + def _on_queue_feeder_error(self, e, obj): + if isinstance(obj, _CallItem): + tb = format_exception(type(e), e, e.__traceback__) + e.__cause__ = _RemoteTraceback('\n"""\n{}"""'.format(''.join(tb))) + work_item = self.pending_work_items.pop(obj.work_id, None) + with self.shutdown_lock: + self.thread_wakeup.wakeup() + # work_item can be None if another process terminated. In this + # case, the executor_manager_thread fails all work_items + # with BrokenProcessPool + if work_item is not None: + work_item.future.set_exception(e) + else: + super()._on_queue_feeder_error(e, obj) + + +def _get_chunks(*iterables, chunksize): + """ Iterates over zip()ed iterables in chunks. """ + it = zip(*iterables) + while True: + chunk = tuple(itertools.islice(it, chunksize)) + if not chunk: + return + yield chunk + + +def _process_chunk(fn, chunk): + """ Processes a chunk of an iterable passed to map. + + Runs the function passed to map() on a chunk of the + iterable passed to map. + + This function is run in a separate process. + + """ + return [fn(*args) for args in chunk] + + +def _sendback_result(result_queue, work_id, result=None, exception=None, + exit_pid=None): + """Safely send back the given result or exception""" + try: + result_queue.put(_ResultItem(work_id, result=result, + exception=exception, exit_pid=exit_pid)) + except BaseException as e: + exc = _ExceptionWithTraceback(e, e.__traceback__) + result_queue.put(_ResultItem(work_id, exception=exc, + exit_pid=exit_pid)) + + +def _process_worker(call_queue, result_queue, initializer, initargs, max_tasks=None): + """Evaluates calls from call_queue and places the results in result_queue. + + This worker is run in a separate process. + + Args: + call_queue: A ctx.Queue of _CallItems that will be read and + evaluated by the worker. + result_queue: A ctx.Queue of _ResultItems that will written + to by the worker. + initializer: A callable initializer, or None + initargs: A tuple of args for the initializer + """ + if initializer is not None: + try: + initializer(*initargs) + except BaseException: + _base.LOGGER.critical('Exception in initializer:', exc_info=True) + # The parent will notice that the process stopped and + # mark the pool broken + return + num_tasks = 0 + exit_pid = None + + while True: + call_item = call_queue.get(block=True) + if call_item is None: + # Wake up queue management thread + result_queue.put(os.getpid()) + return + + if max_tasks is not None: + num_tasks += 1 + if num_tasks >= max_tasks: + exit_pid = os.getpid() + + try: + r = call_item.fn(*call_item.args, **call_item.kwargs) + except BaseException as e: + exc = _ExceptionWithTraceback(e, e.__traceback__) + _sendback_result(result_queue, call_item.work_id, exception=exc, + exit_pid=exit_pid) + else: + _sendback_result(result_queue, call_item.work_id, result=r, + exit_pid=exit_pid) + del r + + # Liberate the resource as soon as possible, to avoid holding onto + # open files or shared memory that is not needed anymore + del call_item + + if exit_pid is not None: + return + + +class _ExecutorManagerThread(threading.Thread): + """Manages the communication between this process and the worker processes. + + The manager is run in a local thread. + + Args: + executor: A reference to the ProcessPoolExecutor that owns + this thread. A weakref will be own by the manager as well as + references to internal objects used to introspect the state of + the executor. + """ + + def __init__(self, executor): + # Store references to necessary internals of the executor. + + # A _ThreadWakeup to allow waking up the queue_manager_thread from the + # main Thread and avoid deadlocks caused by permanently locked queues. + self.thread_wakeup = executor._executor_manager_thread_wakeup + self.shutdown_lock = executor._shutdown_lock + + # A weakref.ref to the ProcessPoolExecutor that owns this thread. Used + # to determine if the ProcessPoolExecutor has been garbage collected + # and that the manager can exit. + # When the executor gets garbage collected, the weakref callback + # will wake up the queue management thread so that it can terminate + # if there is no pending work item. + def weakref_cb(_, + thread_wakeup=self.thread_wakeup, + shutdown_lock=self.shutdown_lock): + mp.util.debug('Executor collected: triggering callback for' + ' QueueManager wakeup') + with shutdown_lock: + thread_wakeup.wakeup() + + self.executor_reference = weakref.ref(executor, weakref_cb) + + # A list of the ctx.Process instances used as workers. + self.processes = executor._processes + + # A ctx.Queue that will be filled with _CallItems derived from + # _WorkItems for processing by the process workers. + self.call_queue = executor._call_queue + + # A ctx.SimpleQueue of _ResultItems generated by the process workers. + self.result_queue = executor._result_queue + + # A queue.Queue of work ids e.g. Queue([5, 6, ...]). + self.work_ids_queue = executor._work_ids + + # Maximum number of tasks a worker process can execute before + # exiting safely + self.max_tasks_per_child = executor._max_tasks_per_child + + # A dict mapping work ids to _WorkItems e.g. + # {5: <_WorkItem...>, 6: <_WorkItem...>, ...} + self.pending_work_items = executor._pending_work_items + + super().__init__() + + def run(self): + # Main loop for the executor manager thread. + + while True: + # gh-109047: During Python finalization, self.call_queue.put() + # creation of a thread can fail with RuntimeError. + try: + self.add_call_item_to_queue() + except BaseException as exc: + cause = format_exception(exc) + self.terminate_broken(cause) + return + + result_item, is_broken, cause = self.wait_result_broken_or_wakeup() + + if is_broken: + self.terminate_broken(cause) + return + if result_item is not None: + self.process_result_item(result_item) + + process_exited = result_item.exit_pid is not None + if process_exited: + p = self.processes.pop(result_item.exit_pid) + p.join() + + # Delete reference to result_item to avoid keeping references + # while waiting on new results. + del result_item + + if executor := self.executor_reference(): + if process_exited: + with self.shutdown_lock: + executor._adjust_process_count() + else: + executor._idle_worker_semaphore.release() + del executor + + if self.is_shutting_down(): + self.flag_executor_shutting_down() + + # When only canceled futures remain in pending_work_items, our + # next call to wait_result_broken_or_wakeup would hang forever. + # This makes sure we have some running futures or none at all. + self.add_call_item_to_queue() + + # Since no new work items can be added, it is safe to shutdown + # this thread if there are no pending work items. + if not self.pending_work_items: + self.join_executor_internals() + return + + def add_call_item_to_queue(self): + # Fills call_queue with _WorkItems from pending_work_items. + # This function never blocks. + while True: + if self.call_queue.full(): + return + try: + work_id = self.work_ids_queue.get(block=False) + except queue.Empty: + return + else: + work_item = self.pending_work_items[work_id] + + if work_item.future.set_running_or_notify_cancel(): + self.call_queue.put(_CallItem(work_id, + work_item.fn, + work_item.args, + work_item.kwargs), + block=True) + else: + del self.pending_work_items[work_id] + continue + + def wait_result_broken_or_wakeup(self): + # Wait for a result to be ready in the result_queue while checking + # that all worker processes are still running, or for a wake up + # signal send. The wake up signals come either from new tasks being + # submitted, from the executor being shutdown/gc-ed, or from the + # shutdown of the python interpreter. + result_reader = self.result_queue._reader + assert not self.thread_wakeup._closed + wakeup_reader = self.thread_wakeup._reader + readers = [result_reader, wakeup_reader] + worker_sentinels = [p.sentinel for p in list(self.processes.values())] + ready = mp.connection.wait(readers + worker_sentinels) + + cause = None + is_broken = True + result_item = None + if result_reader in ready: + try: + result_item = result_reader.recv() + is_broken = False + except BaseException as exc: + cause = format_exception(exc) + + elif wakeup_reader in ready: + is_broken = False + + # No need to hold the _shutdown_lock here because: + # 1. we're the only thread to use the wakeup reader + # 2. we're also the only thread to call thread_wakeup.close() + # 3. we want to avoid a possible deadlock when both reader and writer + # would block (gh-105829) + self.thread_wakeup.clear() + + return result_item, is_broken, cause + + def process_result_item(self, result_item): + # Process the received a result_item. This can be either the PID of a + # worker that exited gracefully or a _ResultItem + + # Received a _ResultItem so mark the future as completed. + work_item = self.pending_work_items.pop(result_item.work_id, None) + # work_item can be None if another process terminated (see above) + if work_item is not None: + if result_item.exception: + work_item.future.set_exception(result_item.exception) + else: + work_item.future.set_result(result_item.result) + + def is_shutting_down(self): + # Check whether we should start shutting down the executor. + executor = self.executor_reference() + # No more work items can be added if: + # - The interpreter is shutting down OR + # - The executor that owns this worker has been collected OR + # - The executor that owns this worker has been shutdown. + return (_global_shutdown or executor is None + or executor._shutdown_thread) + + def _terminate_broken(self, cause): + # Terminate the executor because it is in a broken state. The cause + # argument can be used to display more information on the error that + # lead the executor into becoming broken. + + # Mark the process pool broken so that submits fail right now. + executor = self.executor_reference() + if executor is not None: + executor._broken = ('A child process terminated ' + 'abruptly, the process pool is not ' + 'usable anymore') + executor._shutdown_thread = True + executor = None + + # All pending tasks are to be marked failed with the following + # BrokenProcessPool error + bpe = BrokenProcessPool("A process in the process pool was " + "terminated abruptly while the future was " + "running or pending.") + if cause is not None: + bpe.__cause__ = _RemoteTraceback( + f"\n'''\n{''.join(cause)}'''") + + # Mark pending tasks as failed. + for work_id, work_item in self.pending_work_items.items(): + try: + work_item.future.set_exception(bpe) + except _base.InvalidStateError: + # set_exception() fails if the future is cancelled: ignore it. + # Trying to check if the future is cancelled before calling + # set_exception() would leave a race condition if the future is + # cancelled between the check and set_exception(). + pass + # Delete references to object. See issue16284 + del work_item + self.pending_work_items.clear() + + # Terminate remaining workers forcibly: the queues or their + # locks may be in a dirty state and block forever. + for p in self.processes.values(): + p.terminate() + + self.call_queue._terminate_broken() + + # clean up resources + self._join_executor_internals(broken=True) + + def terminate_broken(self, cause): + with self.shutdown_lock: + self._terminate_broken(cause) + + def flag_executor_shutting_down(self): + # Flag the executor as shutting down and cancel remaining tasks if + # requested as early as possible if it is not gc-ed yet. + executor = self.executor_reference() + if executor is not None: + executor._shutdown_thread = True + # Cancel pending work items if requested. + if executor._cancel_pending_futures: + # Cancel all pending futures and update pending_work_items + # to only have futures that are currently running. + new_pending_work_items = {} + for work_id, work_item in self.pending_work_items.items(): + if not work_item.future.cancel(): + new_pending_work_items[work_id] = work_item + self.pending_work_items = new_pending_work_items + # Drain work_ids_queue since we no longer need to + # add items to the call queue. + while True: + try: + self.work_ids_queue.get_nowait() + except queue.Empty: + break + # Make sure we do this only once to not waste time looping + # on running processes over and over. + executor._cancel_pending_futures = False + + def shutdown_workers(self): + n_children_to_stop = self.get_n_children_alive() + n_sentinels_sent = 0 + # Send the right number of sentinels, to make sure all children are + # properly terminated. + while (n_sentinels_sent < n_children_to_stop + and self.get_n_children_alive() > 0): + for i in range(n_children_to_stop - n_sentinels_sent): + try: + self.call_queue.put_nowait(None) + n_sentinels_sent += 1 + except queue.Full: + break + + def join_executor_internals(self): + with self.shutdown_lock: + self._join_executor_internals() + + def _join_executor_internals(self, broken=False): + # If broken, call_queue was closed and so can no longer be used. + if not broken: + self.shutdown_workers() + + # Release the queue's resources as soon as possible. + self.call_queue.close() + self.call_queue.join_thread() + self.thread_wakeup.close() + + # If .join() is not called on the created processes then + # some ctx.Queue methods may deadlock on Mac OS X. + for p in self.processes.values(): + if broken: + p.terminate() + p.join() + + def get_n_children_alive(self): + # This is an upper bound on the number of children alive. + return sum(p.is_alive() for p in self.processes.values()) + + +_system_limits_checked = False +_system_limited = None + + +def _check_system_limits(): + global _system_limits_checked, _system_limited + if _system_limits_checked: + if _system_limited: + raise NotImplementedError(_system_limited) + _system_limits_checked = True + try: + import multiprocessing.synchronize + except ImportError: + _system_limited = ( + "This Python build lacks multiprocessing.synchronize, usually due " + "to named semaphores being unavailable on this platform." + ) + raise NotImplementedError(_system_limited) + try: + nsems_max = os.sysconf("SC_SEM_NSEMS_MAX") + except (AttributeError, ValueError): + # sysconf not available or setting not available + return + if nsems_max == -1: + # indetermined limit, assume that limit is determined + # by available memory only + return + if nsems_max >= 256: + # minimum number of semaphores available + # according to POSIX + return + _system_limited = ("system provides too few semaphores (%d" + " available, 256 necessary)" % nsems_max) + raise NotImplementedError(_system_limited) + + +def _chain_from_iterable_of_lists(iterable): + """ + Specialized implementation of itertools.chain.from_iterable. + Each item in *iterable* should be a list. This function is + careful not to keep references to yielded objects. + """ + for element in iterable: + element.reverse() + while element: + yield element.pop() + + +class BrokenProcessPool(_base.BrokenExecutor): + """ + Raised when a process in a ProcessPoolExecutor terminated abruptly + while a future was in the running state. + """ + +class ProcessPoolExecutor(_base.Executor,c.Module): + def __init__(self, max_workers=None, mp_context=None, + initializer=None, initargs=(), *, max_tasks_per_child=None): + """Initializes a new ProcessPoolExecutor instance. + + Args: + max_workers: The maximum number of processes that can be used to + execute the given calls. If None or not given then as many + worker processes will be created as the machine has processors. + mp_context: A multiprocessing context to launch the workers created + using the multiprocessing.get_context('start method') API. This + object should provide SimpleQueue, Queue and Process. + initializer: A callable used to initialize worker processes. + initargs: A tuple of arguments to pass to the initializer. + max_tasks_per_child: The maximum number of tasks a worker process + can complete before it will exit and be replaced with a fresh + worker process. The default of None means worker process will + live as long as the executor. Requires a non-'fork' mp_context + start method. When given, we default to using 'spawn' if no + mp_context is supplied. + """ + _check_system_limits() + + if max_workers is None: + self._max_workers = os.cpu_count() or 1 + if sys.platform == 'win32': + self._max_workers = min(_MAX_WINDOWS_WORKERS, + self._max_workers) + else: + if max_workers <= 0: + raise ValueError("max_workers must be greater than 0") + elif (sys.platform == 'win32' and + max_workers > _MAX_WINDOWS_WORKERS): + raise ValueError( + f"max_workers must be <= {_MAX_WINDOWS_WORKERS}") + + self._max_workers = max_workers + + if mp_context is None: + if max_tasks_per_child is not None: + mp_context = mp.get_context("spawn") + else: + mp_context = mp.get_context() + self._mp_context = mp_context + + # https://github.com/python/cpython/issues/90622 + self._safe_to_dynamically_spawn_children = ( + self._mp_context.get_start_method(allow_none=False) != "fork") + + if initializer is not None and not callable(initializer): + raise TypeError("initializer must be a callable") + self._initializer = initializer + self._initargs = initargs + + if max_tasks_per_child is not None: + if not isinstance(max_tasks_per_child, int): + raise TypeError("max_tasks_per_child must be an integer") + elif max_tasks_per_child <= 0: + raise ValueError("max_tasks_per_child must be >= 1") + if self._mp_context.get_start_method(allow_none=False) == "fork": + # https://github.com/python/cpython/issues/90622 + raise ValueError("max_tasks_per_child is incompatible with" + " the 'fork' multiprocessing start method;" + " supply a different mp_context.") + self._max_tasks_per_child = max_tasks_per_child + + # Management thread + self._executor_manager_thread = None + + # Map of pids to processes + self._processes = {} + + # Shutdown is a two-step process. + self._shutdown_thread = False + self._shutdown_lock = threading.Lock() + self._idle_worker_semaphore = threading.Semaphore(0) + self._broken = False + self._queue_count = 0 + self._pending_work_items = {} + self._cancel_pending_futures = False + + # _ThreadWakeup is a communication channel used to interrupt the wait + # of the main loop of executor_manager_thread from another thread (e.g. + # when calling executor.submit or executor.shutdown). We do not use the + # _result_queue to send wakeup signals to the executor_manager_thread + # as it could result in a deadlock if a worker process dies with the + # _result_queue write lock still acquired. + # + # _shutdown_lock must be locked to access _ThreadWakeup.close() and + # .wakeup(). Care must also be taken to not call clear or close from + # more than one thread since _ThreadWakeup.clear() is not protected by + # the _shutdown_lock + self._executor_manager_thread_wakeup = _ThreadWakeup() + + # Create communication channels for the executor + # Make the call queue slightly larger than the number of processes to + # prevent the worker processes from idling. But don't make it too big + # because futures in the call queue cannot be cancelled. + queue_size = self._max_workers + EXTRA_QUEUED_CALLS + self._call_queue = _SafeQueue( + max_size=queue_size, ctx=self._mp_context, + pending_work_items=self._pending_work_items, + shutdown_lock=self._shutdown_lock, + thread_wakeup=self._executor_manager_thread_wakeup) + # Killed worker processes can produce spurious "broken pipe" + # tracebacks in the queue's own worker thread. But we detect killed + # processes anyway, so silence the tracebacks. + self._call_queue._ignore_epipe = True + self._result_queue = mp_context.SimpleQueue() + self._work_ids = queue.Queue() + + def _start_executor_manager_thread(self): + if self._executor_manager_thread is None: + # Start the processes so that their sentinels are known. + if not self._safe_to_dynamically_spawn_children: # ie, using fork. + self._launch_processes() + self._executor_manager_thread = _ExecutorManagerThread(self) + self._executor_manager_thread.start() + _threads_wakeups[self._executor_manager_thread] = \ + self._executor_manager_thread_wakeup + + def _adjust_process_count(self): + # if there's an idle process, we don't need to spawn a new one. + if self._idle_worker_semaphore.acquire(blocking=False): + return + + process_count = len(self._processes) + if process_count < self._max_workers: + # Assertion disabled as this codepath is also used to replace a + # worker that unexpectedly dies, even when using the 'fork' start + # method. That means there is still a potential deadlock bug. If a + # 'fork' mp_context worker dies, we'll be forking a new one when + # we know a thread is running (self._executor_manager_thread). + #assert self._safe_to_dynamically_spawn_children or not self._executor_manager_thread, 'https://github.com/python/cpython/issues/90622' + self._spawn_process() + + def _launch_processes(self): + # https://github.com/python/cpython/issues/90622 + assert not self._executor_manager_thread, ( + 'Processes cannot be fork()ed after the thread has started, ' + 'deadlock in the child processes could result.') + for _ in range(len(self._processes), self._max_workers): + self._spawn_process() + + def _spawn_process(self): + p = self._mp_context.Process( + target=_process_worker, + args=(self._call_queue, + self._result_queue, + self._initializer, + self._initargs, + self._max_tasks_per_child)) + p.start() + self._processes[p.pid] = p + + + def submit(self, fn, *args, return_future:bool = True, init_kwargs:dict=None, **kwargs): + with self._shutdown_lock: + fn = c.resolve_fn(fn) + if self._broken: + raise BrokenProcessPool(self._broken) + if self._shutdown_thread: + raise RuntimeError('cannot schedule new futures after shutdown') + if _global_shutdown: + raise RuntimeError('cannot schedule new futures after ' + 'interpreter shutdown') + + f = _base.Future() + w = _WorkItem(f, fn, args, kwargs) + + self._pending_work_items[self._queue_count] = w + self._work_ids.put(self._queue_count) + self._queue_count += 1 + # Wake up queue management thread + self._executor_manager_thread_wakeup.wakeup() + if self._safe_to_dynamically_spawn_children: + self._adjust_process_count() + self._start_executor_manager_thread() + if return_future: + return f + return f.result() + + + submit.__doc__ = _base.Executor.submit.__doc__ + + def map(self, fn, *iterables, timeout=None, chunksize=1): + """Returns an iterator equivalent to map(fn, iter). + + Args: + fn: A callable that will take as many arguments as there are + passed iterables. + timeout: The maximum number of seconds to wait. If None, then there + is no limit on the wait time. + chunksize: If greater than one, the iterables will be chopped into + chunks of size chunksize and submitted to the process pool. + If set to one, the items in the list will be sent one at a time. + + Returns: + An iterator equivalent to: map(func, *iterables) but the calls may + be evaluated out-of-order. + + Raises: + TimeoutError: If the entire result iterator could not be generated + before the given timeout. + Exception: If fn(*args) raises for any values. + """ + if chunksize < 1: + raise ValueError("chunksize must be >= 1.") + + results = super().map(partial(_process_chunk, fn), + _get_chunks(*iterables, chunksize=chunksize), + timeout=timeout) + return _chain_from_iterable_of_lists(results) + + def shutdown(self, wait=True, *, cancel_futures=False): + with self._shutdown_lock: + self._cancel_pending_futures = cancel_futures + self._shutdown_thread = True + if self._executor_manager_thread_wakeup is not None: + # Wake up queue management thread + self._executor_manager_thread_wakeup.wakeup() + + if self._executor_manager_thread is not None and wait: + self._executor_manager_thread.join() + # To reduce the risk of opening too many files, remove references to + # objects that use file descriptors. + self._executor_manager_thread = None + self._call_queue = None + if self._result_queue is not None and wait: + self._result_queue.close() + self._result_queue = None + self._processes = None + self._executor_manager_thread_wakeup = None + + @property + def num_tasks(self): + return self._call_queue.__sizeof__() + + @staticmethod + def fn(x=2): + result = x*2 + return result + shutdown.__doc__ = _base.Executor.shutdown.__doc__ + @classmethod + def test(cls): + + self = cls() + futures = [] + for i in range(10): + futures += [self.submit('module/ls', return_future=True)] + # for future in c.tqdm(futures): + # future.result() + c.print(c.wait(futures)) + + return {'success': True, 'msg': 'process pool test passed'} + \ No newline at end of file diff --git a/commune/executor/task.py b/commune/executor/task.py new file mode 100644 index 000000000..18d4da8dc --- /dev/null +++ b/commune/executor/task.py @@ -0,0 +1,141 @@ +# Workers are created as daemon threads. This is done to allow the interpreter +# to exit when there are still idle threads in a ThreadPoolExecutor's thread +# pool (i.e. shutdown() was not called). However, allowing workers to die with +# the interpreter has two undesirable properties: +# - The workers would still be running during interpreter shutdown, +# meaning that they would fail in unpredictable ways. +# - The workers could be killed while evaluating a work item, which could +# be bad if the callable being evaluated has external side-effects e.g. +# writing to a file. +# +# To work around this problem, an exit handler is installed which tells the +# workers to exit when their work queues are empty and then waits until the +# threads finish. + +import time +from concurrent.futures._base import Future +import commune as c + +class Task(c.Module): + def __init__(self, + fn:str, + args:list, + kwargs:dict, + timeout:int=10, + priority:int=1, + save:bool = False, + path = None, + **extra_kwargs): + + self.future = Future() + self.fn = fn # the function to run + self.start_time = time.time() # the time the task was created + self.args = args # the arguments of the task + self.kwargs = kwargs # the arguments of the task + self.timeout = timeout # the timeout of the task + self.priority = priority # the priority of the task + self.data = None # the result of the task + + self.fn_name = fn.__name__ if fn != None else str(fn) # the name of the function + # for the sake of simplicity, we'll just add all the extra kwargs to the task object + self.extra_kwargs = extra_kwargs + self.save = save + self.status = 'pending' # pending, running, done + self.__dict__.update(extra_kwargs) + # save the task state + + + @property + def lifetime(self) -> float: + return time.time() - self.start_time + + @property + def state(self) -> dict: + return { + 'fn': self.fn.__name__, + 'kwargs': self.kwargs, + 'args': self.args, + 'timeout': self.timeout, + 'start_time': self.start_time, + 'priority': self.priority, + 'status': self.status, + 'data': self.data, + **self.extra_kwargs + } + + @property + def save_state(self): + + self.path + path = f"{self.status}_{self.fn_name}_args={str(self.args)}_kwargs={str(self.kwargs)}" + if self.path != None: + path = f"{self.path}/{path}" + if self.status == 'pending': + return self.put(self.status2path[self.status], self.state) + elif self.status in ['complete', 'failed']: + if c.exists(self.paths['pending']): + c.rm(self.paths['pending']) + return self.put(self.paths[self.status], self.state) + else: + raise ValueError(f"Task status must be pending or complete, not {self.status}") + + def run(self): + """Run the given work item""" + # Checks if future is canceled or if work item is stale + if (not self.future.set_running_or_notify_cancel()) or ( + (time.time() - self.start_time) > self.timeout + ): + self.future.set_exception(TimeoutError('Task timed out')) + + try: + data = self.fn(*self.args, **self.kwargs) + self.status = 'complete' + except Exception as e: + + # what does this do? A: it sets the exception of the future, and sets the status to failed + data = c.detailed_error(e) + if 'event loop' in data['error']: + c.new_event_loop(nest_asyncio=True) + self.status = 'failed' + + self.future.set_result(data) + # store the result of the task + + self.data = data + + if self.save: + self.save_state() + + def result(self) -> object: + return self.future.result() + + @property + def _condition(self) -> bool: + return self.future._condition + @property + def _state(self, *args, **kwargs) -> bool: + return self.future._state + + @property + def _waiters(self) -> bool: + return self.future._waiters + + def cancel(self) -> bool: + self.future.cancel() + + def running(self) -> bool: + return self.future.running() + + def done(self) -> bool: + return self.future.done() + + def __lt__(self, other): + if isinstance(other, Task): + return self.priority < other.priority + elif isinstance(other, int): + return self.priority < other + else: + raise TypeError(f"Cannot compare Task with {type(other)}") + + + diff --git a/commune/executor/thread/thread_executor.py b/commune/executor/thread/thread_executor.py new file mode 100644 index 000000000..ed8341805 --- /dev/null +++ b/commune/executor/thread/thread_executor.py @@ -0,0 +1,229 @@ + +import os +import sys +import time +import queue +import random +import weakref +import itertools +import threading + +from loguru import logger +from typing import Callable +import concurrent +from concurrent.futures._base import Future +import commune as c +import gc + +Task = c.module('executor.task') + +NULL_ENTRY = (sys.maxsize, Task(None, (), {})) + +class ThreadPoolExecutor(c.Module): + """Base threadpool executor with a priority queue""" + + # Used to assign unique thread names when thread_name_prefix is not supplied. + _counter = itertools.count().__next__ + # submit.__doc__ = _base.Executor.submit.__doc__ + threads_queues = weakref.WeakKeyDictionary() + + def __init__( + self, + max_workers: int =None, + maxsize : int =200 , + thread_name_prefix : str ="", + ): + """Initializes a new ThreadPoolExecutor instance. + Args: + max_workers: The maximum number of threads that can be used to + execute the given calls. + thread_name_prefix: An optional name prefix to give our threads. + """ + + max_workers = (os.cpu_count() or 1) * 5 if max_workers == None else max_workers + if max_workers <= 0: + raise ValueError("max_workers must be greater than 0") + + self.max_workers = max_workers + self.work_queue = queue.PriorityQueue(maxsize=maxsize) + self.idle_semaphore = threading.Semaphore(0) + self.threads = [] + self.broken = False + self.shutdown = False + self.shutdown_lock = threading.Lock() + self.thread_name_prefix = thread_name_prefix or ("ThreadPoolExecutor-%d" % self._counter() ) + + @property + def is_empty(self): + return self.work_queue.empty() + + + def submit(self, + fn: Callable, + params = None, + args:dict=None, + kwargs:dict=None, + priority:int=1, + timeout=200, + return_future:bool=True, + wait = True, + path:str=None) -> Future: + if params != None: + if isinstance(params, dict): + kwargs = params + elif isinstance(params, list): + args = params + else: + raise ValueError("params must be a list or a dict") + # check if the queue is full and if so, raise an exception + if self.work_queue.full(): + if wait: + while self.work_queue.full(): + time.sleep(0.1) + else: + return {'success': False, 'msg':"cannot schedule new futures after maxsize exceeded"} + + args = args or [] + kwargs = kwargs or {} + + with self.shutdown_lock: + + if self.broken: + raise Exception("ThreadPoolExecutor is broken") + if self.shutdown: + raise RuntimeError("cannot schedule new futures after shutdown") + priority = kwargs.get("priority", priority) + if "priority" in kwargs: + del kwargs["priority"] + task = Task(fn=fn, args=args, kwargs=kwargs, timeout=timeout, path=path) + # add the work item to the queue + self.work_queue.put((priority, task), block=False) + # adjust the thread count to match the new task + self.adjust_thread_count() + + # return the future (MAYBE WE CAN RETURN THE TASK ITSELF) + if return_future: + return task.future + + return task.future.result() + + + def adjust_thread_count(self): + # if idle threads are available, don't spin new threads + if self.idle_semaphore.acquire(timeout=0): + return + + # When the executor gets lost, the weakref callback will wake up + # the worker threads. + def weakref_cb(_, q=self.work_queue): + q.put(NULL_ENTRY) + + num_threads = len(self.threads) + if num_threads < self.max_workers: + thread_name = "%s_%d" % (self.thread_name_prefix or self, num_threads) + t = threading.Thread( + name=thread_name, + target=self.worker, + args=( + weakref.ref(self, weakref_cb), + self.work_queue, + ), + ) + t.daemon = True + t.start() + self.threads.append(t) + self.threads_queues[t] = self.work_queue + + def shutdown(self, wait=True): + with self.shutdown_lock: + self.shutdown = True + self.work_queue.put(NULL_ENTRY) + if wait: + for t in self.threads: + try: + t.join(timeout=2) + except Exception: + pass + + @staticmethod + def worker(executor_reference, work_queue): + c.new_event_loop(nest_asyncio=True) + + try: + while True: + work_item = work_queue.get(block=True) + priority = work_item[0] + + if priority == sys.maxsize: + # Wake up queue management thread. + work_queue.put(NULL_ENTRY) + break + + item = work_item[1] + + if item is not None: + item.run() + # Delete references to object. See issue16284 + del item + continue + + executor = executor_reference() + # Exit if: + # - The interpreter is shutting down OR + # - The executor that owns the worker has been collected OR + # - The executor that owns the worker has been shutdown. + if executor is None or executor.shutdown: + # Flag the executor as shutting down as early as possible if it + # is not gc-ed yet. + if executor is not None: + executor.shutdown = True + # Notice other workers + work_queue.put(NULL_ENTRY) + return + del executor + except Exception as e: + e = c.detailed_error(e) + + @property + def num_tasks(self): + return self.work_queue.qsize() + + @classmethod + def as_completed(futures: list): + assert isinstance(futures, list), "futures must be a list" + return [f for f in futures if not f.done()] + + @staticmethod + def wait(futures:list) -> list: + futures = [futures] if not isinstance(futures, list) else futures + results = [] + for future in c.as_completed(futures): + results += [future.result()] + return results + + + @classmethod + def test(cls): + def fn(x): + result = x*2 + c.print(result) + return result + + self = cls() + futures = [] + for i in range(10): + futures += [self.submit(fn=fn, kwargs=dict(x=i))] + for future in c.tqdm(futures): + future.result() + for i in range(10): + futures += [self.submit(fn=fn, kwargs=dict(x=i))] + + results = c.wait(futures, timeout=10) + + while self.num_tasks > 0: + c.print(self.num_tasks, 'tasks remaining', color='red') + + + return {'success': True, 'msg': 'thread pool test passed'} + + \ No newline at end of file diff --git a/commune/history.py b/commune/history.py new file mode 100644 index 000000000..51fac6aca --- /dev/null +++ b/commune/history.py @@ -0,0 +1,57 @@ +import commune as c +import os + +class History(c.Module): + def __init__(self, folder_path='history'): + self.folder_path = self.resolve_path(folder_path) + + def set_folder_path(self, path): + self.folder_path = self.resolve_path(path) # set the folder path to the resolved path + assert os.path.isdir(self.folder_path), f"History path {self.folder_path} does not exist" # check if the path exists + c.print(f"History path: {self.folder_path}", color='green') # print the path + + def add(self, item:dict, path=None): + if 'timestamp' not in item: + item['timestamp'] = c.timestamp() + path = path or (self.folder_path + '/' + str(item['timestamp'])) + return self.put(path, item) + + def paths(self, key=None, max_age=None): + files = [] + current_timestamp = c.timestamp() + for file in c.ls(self.folder_path): + timestamp = self.get_file_timestamp(file) + if max_age and current_timestamp - timestamp > max_age: + continue + files.append(file) + return files + + def get_file_timestamp(self, file): + return int(file.split('/')[-1].split('.')[0]) + + def history_paths(self, search=None, n=1000, reverse=False): + paths = self.ls(self.folder_path) + sorted_paths = sorted(paths, reverse=reverse) + if search: + sorted_paths = [p for p in sorted_paths if search in p] + return sorted_paths[:n] + + def history(self, search=None, n=100, reverse=True, idx=None): + history_paths = self.history_paths(n=n, reverse=reverse, search=search) + history = [c.get(s) for s in history_paths] + if idx: + return history[idx] + return history + + def rm_history(self, search=None, n=100, reverse=True): + history_paths = self.history_paths(n=n, reverse=reverse, search=search) + for path in history_paths: + c.rm(path) + return history_paths + + def last_n(self, n=1): + return self.history(n=n) + + + + diff --git a/commune/modules/key/aes/aes.py b/commune/key/aes.py old mode 100755 new mode 100644 similarity index 83% rename from commune/modules/key/aes/aes.py rename to commune/key/aes.py index 9d02396c2..eff1a7f0d --- a/commune/modules/key/aes/aes.py +++ b/commune/key/aes.py @@ -7,10 +7,10 @@ import sys import inspect import time -import commune -class AESKey(commune.Module): +import commune as c +class AESKey(c.Module): - def __init__(self, key:str ): + def __init__(self, key:str = 'dummy' ): self.bs = AES.block_size self.key_phrase = hashlib.sha256(key.encode()).digest() @@ -19,7 +19,6 @@ def encrypt(self, data, return_string = True): data = self._pad(data) iv = Random.new().read(AES.block_size) cipher = AES.new(self.key_phrase, AES.MODE_CBC, iv) - encrypted_bytes = base64.b64encode(iv + cipher.encrypt(data.encode())) encrypted_data = encrypted_bytes.decode() if return_string else encrypted_bytes @@ -29,10 +28,7 @@ def decrypt(self, enc): enc = base64.b64decode(enc) iv = enc[:AES.block_size] cipher = AES.new(self.key_phrase, AES.MODE_CBC, iv) - try: - decrypted_data = self._unpad(cipher.decrypt(enc[AES.block_size:])).decode('utf-8') - except UnicodeDecodeError as e: - raise Exception('Bro, use another password, this one aint working') + decrypted_data = self._unpad(cipher.decrypt(enc[AES.block_size:])).decode('utf-8') return self.str2python(decrypted_data) def _pad(self, s): @@ -56,41 +52,36 @@ def test_encrypt_decrypt(cls, key='dummy'): ] import time for test_object in test_objects: - start_time = time.clock() + start_time = time.time() encrypted = self.encrypt(test_object) decrypted = self.decrypt(encrypted) assert decrypted == test_object, f'FAILED: {test_encrypt_decrypt} {test_object} FAILED' size_bytes = sys.getsizeof(test_object) - seconds = time.clock() - start_time + seconds = time.time() - start_time rate = size_bytes / seconds print('PASSED test_encrypt_decrypt') return True - - - - @classmethod def test_encrypt_decrypt_throughput(cls, key='dummy'): import streamlit as st self = cls(key=key) test_object = [1,2,3,5]*1000000 - start_time = time.clock() + start_time = time.time() encrypted = self.encrypt(test_object) - seconds = time.clock() - start_time + seconds = time.time() - start_time size_bytes = sys.getsizeof(test_object) encrypt_rate = size_bytes / seconds - start_time = time.clock() + start_time = time.time() decrypted = self.decrypt(encrypted) - seconds = time.clock() - start_time + seconds = time.time() - start_time size_bytes = sys.getsizeof(test_object) decrypt_rate = size_bytes / seconds - st.write(f'ENCRYPT SPEED (MB per Second): {encrypt_rate//1000}') st.write(f'DECRYPT SPEED (MB per Second): {decrypt_rate//1000}') @@ -98,19 +89,15 @@ def test_encrypt_decrypt_throughput(cls, key='dummy'): return True - - - - @classmethod def test(cls): import streamlit as st + self = cls() for attr in dir(cls): if attr[:len('test_')] == 'test_': getattr(cls, attr)() st.write('PASSED',attr) - @classmethod def streamlit(cls): import streamlit as st diff --git a/commune/key/app.py b/commune/key/app.py new file mode 100644 index 000000000..8e39ba3d2 --- /dev/null +++ b/commune/key/app.py @@ -0,0 +1,60 @@ +import commune as c +import streamlit as st + +class KeyDashboard(c.Module): + + def __init__(self, state: dict=None): + + self.keys = c.keys() + self.key2index = {k:i for i,k in enumerate(self.keys)} + + def select_key(self): + key = 'module' + key = st.selectbox('Select Key', self.keys, index=self.key2index[key]) + self.key = c.get_key(key) + if self.key.path == None: + self.key.path = key + + + st.write('Address: ', self.key.ss58_address) + + + def create_key(self, expander=False): + new_key = st.text_input('Name of Key', '', key='create') + create_key_button = st.button('Create Key') + if create_key_button and len(new_key) > 0: + c.add_kesy(new_key) + key = c.get_key(new_key) + + def rename_key(self): + + old_key = st.selectbox('Select Key', self.keys, index=self.key2index[self.key.path], key='select old rename key') + new_key = st.text_input('New of Key', '', key='rename') + rename_key_button = st.button('Rename Key') + if rename_key_button and len(new_key) > 0: + if c.key_exists(new_key): + st.error('Key already exists') + c.rename_key(old_key,new_key) + key = c.get_key(new_key) + + def remove_key(self): + with st.form(key='Remove Key'): + rm_keys = st.multiselect('Select Key(s) to Remove', self.keys, [], key='rm_key') + rm_key_button = st.form_submit_button('Remove Key') + if rm_key_button: + c.rm_keys(rm_keys) + + @classmethod + def dashboard(cls, *args, **kwargs): + self = cls(*args, **kwargs) + + for k in ['select', 'create', 'rename', 'remove']: + fn_name = k + '_key' + with st.expander(fn_name.capitalize().replace('_',' ')): + getattr(self, fn_name)() + + return self.key + + +KeyDashboard.run(__name__) + diff --git a/commune/key/evm.py b/commune/key/evm.py new file mode 100644 index 000000000..c2a29c98e --- /dev/null +++ b/commune/key/evm.py @@ -0,0 +1,309 @@ +# +# Copyright 2022 Ocean Protocol Foundation +# SPDX-License-Identifier: Apache-2.0 +# +import logging +import os +from typing import Dict, Optional, Union +import json +from eth_account.datastructures import SignedMessage +from eth_account.messages import SignableMessage +from hexbytes.main import HexBytes +from web3.main import Web3 +from eth_account.messages import encode_defunct +from eth_keys import keys +from copy import deepcopy +from eth_account import Account + +import commune as c +from typing import List, Dict, Union, Optional, Any + +logger = logging.getLogger(__name__) + + +class EVMAccount(c.Module, Account): + + _last_tx_count = dict() + def __init__( + self, + network:str = 'local.main', + **kwargs + ) -> None: + """Initialises EVMAccount object.""" + # assert private_key, "private_key is required." + self.config = self.set_config( kwargs=kwargs) + Account.__init__(self, **kwargs) + self.set_network(network) + + + @property + def private_key(self): + return self._private_key + + @staticmethod + def reset_tx_count() -> None: + EVMAccount._last_tx_count = dict() + + def get_nonce(self, address: str = None) -> int: + # We cannot rely on `web3.eth.get_transaction_count` because when sending multiple + # transactions in a row without wait in between the network may not get the chance to + # update the transaction count for the self address in time. + # So we have to manage this internally per self address. + address = self.resolve_address(address) + if address not in EVMAccount._last_tx_count: + EVMAccount._last_tx_count[address] = self.web3.eth.get_transaction_count(address) + else: + EVMAccount._last_tx_count[address] += 1 + + return EVMAccount._last_tx_count[address] + + + def sign_tx( + self, + tx: Dict[str, Union[int, str, bytes]], + ) -> HexBytes: + if tx.get('nonce') == None: + nonce = self.get_nonce(web3=self.web3, address=self.address) + if tx.get('gasePrice') == None: + gas_price = int(self.web3.eth.gas_price * 1.1) + max_gas_price = os.getenv('ENV_MAX_GAS_PRICE', None) + if gas_price and max_gas_price: + gas_price = min(gas_price, max_gas_price) + + tx["gasPrice"] = gas_price + + + signed_tx = self.web3.eth.account.sign_transaction(tx, self.private_key) + logger.debug(f"Using gasPrice: {gas_price}") + logger.debug(f"`EVMAccount` signed tx is {signed_tx}") + return signed_tx.rawTransaction + + @property + def nonce(self): + return self.web3.eth.get_transaction_count(self.address) + + @property + def gas_prices(self): + return self.web3.eth.generate_gas_price() + + @property + def tx_metadata(self) -> Dict[str, Union[int, str, bytes]]: + ''' + Default tx metadata + ''' + + return { + 'from': self.address, + 'nonce': self.nonce, + 'gasPrice':self.gas_price, + } + def send_contract_tx(self, fn:str , value=0): + ''' + send a contract transaction for your python objecs + ''' + tx_metadata = self.tx_metadata + tx_metadata['value'] = value + tx = fn.buildTransaction(tx_metadata) + tx = self.send_tx(tx) + return tx + + def send_tx(self, tx): + ''' + Send a transaction + ''' + rawTransaction = self.sign_tx(tx=tx) + # 7. Send tx and wait for receipt + tx_hash = self.web3.eth.send_raw_transaction(rawTransaction) + tx_receipt = self.web3.eth.wait_for_transaction_receipt(tx_hash) + + return tx_receipt.__dict__ + + + def resolve_message(self, message) : + message = self.python2str(message) + if isinstance(message, str): + message = encode_defunct(text=message) + elif isinstance(message, SignableMessage): + message = message + else: + raise NotImplemented + + return message + + + def sign(self, message: Union[SignableMessage,str, dict], include_message:bool = True) -> SignedMessage: + """Sign a transaction. + Args: + message: The message to sign. + signature_only: If True, only the signature is returned. + """ + signable_message = self.resolve_message(message) + + signed_message = self.sign_message(signable_message) + signed_message_dict = {} + for k in ['v', 'r', 's', 'signature', 'messageHash']: + signed_message_dict[k] = getattr(signed_message, k) + if isinstance(signed_message_dict[k], HexBytes): + signed_message_dict[k] = signed_message_dict[k].hex() + + if include_message: + signed_message_dict['message'] = message + signed_message = signed_message_dict + + + return signed_message + + @property + def public_key(self): + return self.private_key_to_public_key(self.private_key) + + + @staticmethod + def private_key_to_public_key(private_key: str) -> str: + ''' + Conert private key to public key + ''' + private_key_object = keys.PrivateKey(private_key) + return private_key_object.public_key + + + + def keys_str(self) -> str: + s = [] + s += [f"address: {self.address}"] + if self.private_key is not None: + s += [f"private key: {self.private_key}"] + s += [f"public key: {self.public_key}"] + s += [""] + return "\n".join(s) + + def resolve_web3(self, web3=None): + if web3 == None: + web3 == self.web3 + assert web3 != None + return web3 + + def resolve_address(self, address=None): + if address == None: + address = self.address + assert address != None + return address + + + def get_balance(self, token:str=None, address:str=None): + address = self.resolve_address(address) + + if token == None: + # return native token + balance = self.web3.eth.get_balance(self.address) + else: + raise NotImplemented + + return balance + + @property + def accounts(self): + return self.config.get('accounts', []) + + + @classmethod + def streamlit(cls): + import streamlit as st + st.write(f'### {cls.__name__}') + self = cls.deploy(actor={'refresh': False, 'wrap': True}) + + + def replicate(self, private_key, web3=None): + return EVMAccount(private_key=private_key, web3=self.web3) + + + def set_network(self, network:str= 'local.main') -> None: + ''' + Set network + ''' + if isinstance(network, str): + network = { + 'module': 'web3.evm.network', + 'kwargs': { + 'network': network + } + } + if network == None: + network = self.config['network'] + + # launch network + self.network = self.launch(**network) + self.web3 = self.network.web3 + + @staticmethod + def hex2str(input:HexBytes) -> str: + ''' + HexBytes to str + ''' + return input.hex() + + def recover_signer(self, message:Any, + signature:str, + vrs:Union[tuple, list]=None): + ''' + recover + ''' + + message = self.resolve_message(message) + recovered_address = Account.recover_message(message, signature=signature, vrs=vrs) + return recovered_address + + def verify(self, message:Any, signature:str = None, vrs:Union[tuple, list]=None, address:str=None) -> bool: + ''' + verify message from the signature or vrs based on the address + ''' + address = self.resolve_address(address) + recovered_address = self.recover_signer(message, signature=signature, vrs=vrs) + return bool(recovered_address == address) + + @classmethod + def test_sign(cls): + self = cls() + message = {'bro': 'bro'} + signature = self.sign(message) + assert self.verify(message, signature=signature['signature']) + + @classmethod + def test_hash(cls): + self = cls() + print(self.hash('hello world')) + + + def test(self): + self.test_sign() + self.test_hash() + self.test_recover_message() + self.test_verify_message() + + + @classmethod + def from_password(cls, password:str, salt:str='commune', prompt=False): + + from web3.auto import w3 + from Crypto.Protocol.KDF import PBKDF2 + + # Prompt the user for a password and salt + if prompt : + password = input("Enter password: ") + # Derive a key using PBKDF2 + key = PBKDF2(password.encode(), salt, dkLen=32, count=100000) + + # Create an account using the key + account = Account.privateKeyToAccount(key) + + # Print the account address and private key + print("Account address:", account.address) + print("Private key:", account.privateKey.hex()) + + return account + +if __name__ == '__main__': + EVMAccount.run() + + + diff --git a/commune/key/key.md b/commune/key/key.md new file mode 100644 index 000000000..bf7f6ea7f --- /dev/null +++ b/commune/key/key.md @@ -0,0 +1,110 @@ +The key is a sr25519 key that is used to sign, encrypt, decrypt and verify any string or messege. +We can also replicate the key to other chains through using the same seed to generate the other keys. This means you can have one key instead of multiple keys for each chain, which is more convient and secure. + +![Alt text](image_key.png) + +c.add_key("alice") + +key already exists at alice +{ + 'crypto_type': 1, + 'seed_hex': '518fad1043efc934a759334215ef54d48e1f8836355ed864bbb797f90ecb32b7', + 'derive_path': None, + 'path': 'alice', + 'ss58_format': 42, + 'public_key': '7cd0e327f4f6649719158892dafe766a5efd0185cb5fe17548d294f00f12661b', + 'private_key': +'943fb89150a67192919a43004f87685faba470e754fe4ff0af6a93e7fc54dc0a6cceb6fbc29d610d5486ba78969f609ea83753fb9e32d58df0c67f13 +dfcbbd68', + 'mnemonic': 'quantum belt rival casual benefit obscure sight wool pupil jaguar guide mango', + 'ss58_address': '5EtMr6n6APFay8FFdhhP9sMPwvv1Nfcm5yxiRTxviHH4WVZg' +} +Now this generates a random key and if you want to save it to a file you can do so like this. + +c.add_key("alice") + +or + +c add_key alice + + + +{ + 'crypto_type': 1, + 'seed_hex': +'518fad1043efc934a759334215ef54d48e1f8836355ed864bbb797f90ecb32b7', + 'derive_path': None, + 'path': 'alice', + 'ss58_format': 42, + 'public_key': +'7cd0e327f4f6649719158892dafe766a5efd0185cb5fe17548d294f00f12661b', + 'private_key': +'943fb89150a67192919a43004f87685faba470e754fe4ff0af6a93e7fc54dc0a6cceb6fb +c29d610d5486ba78969f609ea83753fb9e32d58df0c67f13dfcbbd68', + 'mnemonic': 'quantum belt rival casual benefit obscure sight wool +pupil jaguar guide mango', + 'ss58_address': '5EtMr6n6APFay8FFdhhP9sMPwvv1Nfcm5yxiRTxviHH4WVZg' +} + + +# Refreshing existing key + +c add_key alice refresh=True + + + +To list all the keys you can do so like this. + +c.keys("alice") + +or + +c keys alice + +[ + 'alice', +] + + +# Save Keys + +To save the keys to a file you can do so like this. + +c save_keys + + + +To sign a message you can do so like this. + +key = c.get_key("alice") +key.sign("hello world") + + +Bytes Output: + +b'\xd6RV\xf4)\x88\x9aC\x99$\xe5E\xa5N=\xcf\xf4\x7f\xc7\\\xfe\xa1V\xdd\xc0 +\xfc\x1bz:\x17\xa1$[\x84Al\xb0\xee\x0b\xedg\xc2\xe7\x93\x00\xf1~}\xd2r;\x +f2\xb4.\x90\xf2k\xd1\x10\xd9\xd5\x8f\x9d\x85' + +Dictionary Output: + +{"data":"hello world","signature":"0x7e7","public_key":"0x7cd0e327f4f6649719158892dafe766a5efd0185cb5fe17548d294f00f12661b"} + +String Output: + +This cotains the data and signature joined by a seperator. + +{DATA}{SEPERATOR}{SIGNATURE} + + +key2balance map + + +c key2balance + + +key2stake map + +c key2stake + + diff --git a/commune/key/key.py b/commune/key/key.py new file mode 100644 index 000000000..296d75b44 --- /dev/null +++ b/commune/key/key.py @@ -0,0 +1,1590 @@ + +import json +from scalecodec.utils.ss58 import ss58_encode, ss58_decode, get_ss58_format +from scalecodec.base import ScaleBytes +from typing import Union, Optional +import time +import binascii +import re +import secrets +from base64 import b64encode + +import nacl.bindings +import nacl.public +from eth_keys.datatypes import PrivateKey +from substrateinterface.utils import ss58 + +from substrateinterface.constants import DEV_PHRASE +from substrateinterface.exceptions import ConfigurationError +from substrateinterface.key import extract_derive_path +from substrateinterface.utils.ecdsa_helpers import mnemonic_to_ecdsa_private_key, ecdsa_verify, ecdsa_sign +from substrateinterface.utils.encrypted_json import decode_pair_from_encrypted_json, encode_pair + +from bip39 import bip39_to_mini_secret, bip39_generate, bip39_validate +import sr25519 +import ed25519_zebra +import commune as c + +__all__ = ['Keypair', 'KeypairType', 'MnemonicLanguageCode'] + + +class KeypairType: + """ + Type of cryptography, used in `Keypair` instance to encrypt and sign data + + * ED25519 = 0 + * SR25519 = 1 + * ECDSA = 2 + + """ + ED25519 = 0 + SR25519 = 1 + ECDSA = 2 + + +class MnemonicLanguageCode: + """ + Available language codes to generate mnemonics + + * ENGLISH = 'en' + * CHINESE_SIMPLIFIED = 'zh-hans' + * CHINESE_TRADITIONAL = 'zh-hant' + * FRENCH = 'fr' + * ITALIAN = 'it' + * JAPANESE = 'ja' + * KOREAN = 'ko' + * SPANISH = 'es' + + """ + ENGLISH = 'en' + CHINESE_SIMPLIFIED = 'zh-hans' + CHINESE_TRADITIONAL = 'zh-hant' + FRENCH = 'fr' + ITALIAN = 'it' + JAPANESE = 'ja' + KOREAN = 'ko' + SPANISH = 'es' + + +class Keypair(c.Module): + keys_path = c.data_path + '/keys.json' + def __init__(self, + ss58_address: str = None, + public_key: Union[bytes, str] = None, + private_key: Union[bytes, str] = None, + ss58_format: int = 42, + seed_hex: Union[str, bytes] = None, + crypto_type: int = KeypairType.SR25519, + derive_path: str = None, + mnemonic: str = None, + path:str = None, + **kwargs + ): + """ + Allows generation of Keypairs from a variety of input combination, such as a public/private key combination, + mnemonic or URI containing soft and hard derivation paths. With these Keypairs data can be signed and verified + + Parameters + ---------- + ss58_address: Substrate address + public_key: hex string or bytes of public_key key + private_key: hex string or bytes of private key + ss58_format: Substrate address format, default to 42 when omitted + seed_hex: hex string of seed + crypto_type: Use KeypairType.SR25519 or KeypairType.ED25519 cryptography for generating the Keypair + """ + + # If no arguments are provided, generate a random keypair + if ss58_address == None \ + and public_key == None \ + and private_key == None \ + and seed_hex == None \ + and mnemonic == None: + + key = self.new_key() + seed_hex = key.__dict__.get('seed_hex', seed_hex) + private_key = key.__dict__.get('private_key', private_key) + crypto_type = key.__dict__.get('crypto_type', crypto_type) + derive_path = key.__dict__.get('derive_path', derive_path) + ss58_address = key.__dict__.get('ss58_address', ss58_address) + path = key.__dict__.get('path', path) + public_key = key.__dict__.get('public_key', public_key) + ss58_format = key.__dict__.get('ss58_format', ss58_format) + mnemonic = key.__dict__.get('mnemonic', mnemonic) + + + + self.crypto_type = crypto_type + self.seed_hex = seed_hex + self.derive_path = None + self.path = path + self.ss58_format = ss58_format + + + if crypto_type != KeypairType.ECDSA and ss58_address and not public_key: + public_key = ss58_decode(ss58_address, valid_ss58_format=ss58_format) + + if private_key: + + if type(private_key) == str: + private_key = c.str2bytes(private_key) + + if self.crypto_type == KeypairType.SR25519: + if len(private_key) != 64: + raise ValueError('Secret key should be 64 bytes long') + if not public_key: + public_key = sr25519.public_from_secret_key(private_key) + + if self.crypto_type == KeypairType.ECDSA: + private_key_obj = PrivateKey(private_key) + public_key = private_key_obj.public_key.to_address() + ss58_address = private_key_obj.public_key.to_checksum_address() + + if not public_key: + raise ValueError('No SS58 formatted address or public key provided') + + if type(public_key) is str: + public_key = bytes.fromhex(public_key.replace('0x', '')) + + if crypto_type == KeypairType.ECDSA: + if len(public_key) != 20: + raise ValueError('Public key should be 20 bytes long') + else: + if len(public_key) != 32: + raise ValueError('Public key should be 32 bytes long') + + if not ss58_address: + ss58_address = ss58_encode(public_key, ss58_format=ss58_format) + + self.public_key: bytes = public_key + + self.ss58_address: str = ss58_address + + self.private_key: bytes = private_key + + self.mnemonic = mnemonic + + @classmethod + def add_key(cls, path:str, mnemonic:str = None, password:str=None, refresh:bool=False, private_key=None, **kwargs): + + if cls.key_exists(path) and not refresh : + c.print(f'key already exists at {path}', color='red') + return json.loads(cls.get(path)) + c.print(f'generating key {path}') + key = cls.new_key(mnemonic=mnemonic, private_key=private_key, **kwargs) + key.path = path + key_json = key.to_json() + if password != None: + key_json = cls.encrypt(data=key_json, password=password) + cls.put(path, key_json) + cls.update() + return json.loads(key_json) + + + @classmethod + def update(cls, **kwargs): + return cls.key2address(update=True,**kwargs) + + @classmethod + def rename_key(self, new_path): + return self.mv_key(self.path, new_path) + + @classmethod + def mv_key(cls, path, new_path): + + assert cls.key_exists(path), f'key does not exist at {path}' + cls.put(new_path, cls.get_key(path).to_json()) + cls.rm_key(path) + assert cls.key_exists(new_path), f'key does not exist at {new_path}' + new_key = cls.get_key(new_path) + return {'success': True, 'from': path , 'to': new_path, 'key': new_key} + + + rename_key = mv_key + + @classmethod + def switch_keys(cls, path1:str, path2:str): + + assert path1 != path2 + assert cls.key_exists(path1), f'key does not exist at {path1}' + assert cls.key_exists(path2), f'key does not exist at {path2}' + + before = { + path1: cls.key2address(path1), + path2: cls.key2address(path2) + } + + + key1 = c.get_key(path1) + key2 = c.get_key(path2) + cls.put(path1, key2.to_json()) + cls.put(path2, key1.to_json()) + + + after = { + path1 : cls.key2address(path1), + path2 : cls.key2address(path2) + } + + assert before[path1] == after[path2] + assert before[path2] == after[path1] + + return {'success': True, 'before': before, 'after': after, 'msg': f'switched {path1} and {path2}'} + + swap_keys = switch_keys + @classmethod + def add_keys(cls, name, n=100, verbose:bool = False, **kwargs): + response = [] + for i in range(n): + key_name = f'{name}.{i}' + if bool == True: + c.print(f'generating key {key_name}') + response.append(cls.add_key(key_name, **kwargs)) + + return response + + @classmethod + def key_info(cls, path='module', create_if_not_exists=False, **kwargs): + kwargs['json'] = True + return cls.get_key(path, create_if_not_exists=create_if_not_exists, **kwargs) + + @classmethod + def key_info_map(cls, *args, **kwargs): + return {key: cls.key_info(key) for key in cls.keys(*args, **kwargs)} + + @classmethod + def load_key(cls, path=None): + key_info = cls.get(path) + key_info = c.jload(key_info) + if key_info['path'] == None: + key_info['path'] = path.replace('.json', '').split('/')[-1] + + cls.add_key(**key_info) + return {'status': 'success', 'message': f'key loaded from {path}'} + + + @classmethod + def load_keys(cls, path=keys_path, verbose:bool = False, refresh:bool = False, **kwargs): + return cls.load_mems(path, verbose=verbose, refresh=refresh, **kwargs) + + @classmethod + def save_keys(cls, path='saved_keys.json', **kwargs): + path = cls.resolve_path(path) + c.print(f'saving mems to {path}') + mems = cls.mems() + c.put_json(path, mems) + return {'saved_mems':list(mems.keys()), 'path':path} + + savemems = savekeys = save_keys + + @classmethod + def load_keys(cls, path='saved_keys.json', refresh=False, **kwargs): + + """""" + mems = c.get_json(path) + for k,mem in mems.items(): + try: + cls.add_key(k, mnemonic=mem, refresh=refresh, **kwargs) + except Exception as e: + c.print(f'failed to load mem {k} due to {e}', color='red') + return {'loaded_mems':list(mems.keys()), 'path':path} + loadkeys = loadmems = load_keys + + + @classmethod + def mems(cls, search=None): + mems = {} + for key in cls.keys(search): + try: + mems[key] = cls.getmem(key) + except Exception as e: + c.print(f'failed to get mem for {key} due to {e}', color='red') + if search: + mems = {k:v for k,v in mems.items() if search in k or search in v} + return mems + + mnemonics = mems + + + @classmethod + def get_key(cls, + path:str, + password:str=None, + json:bool=False, + create_if_not_exists:bool = True, + **kwargs): + if cls.key_exists(path) == False: + if create_if_not_exists == True: + key = cls.add_key(path, **kwargs) + c.print(f'key does not exist, generating new key -> {key["ss58_address"]}') + else: + raise ValueError(f'key does not exist at --> {path}') + + key_json = cls.get(path) + + # if key is encrypted, decrypt it + if c.is_encrypted(key_json): + key_json = c.decrypt(data=key_json, password=password) + if key_json == None: + c.print({'status': 'error', 'message': f'key is encrypted, please {path} provide password'}, color='red') + return None + + + if isinstance(key_json, str): + key_json = c.jload(key_json) + + + if json: + key_json['path'] = path + return key_json + else: + return cls.from_json(key_json) + + + + @classmethod + def get_keys(cls, search=None, clean_failed_keys=False): + keys = {} + for key in cls.keys(): + if str(search) in key or search == None: + try: + keys[key] = cls.get_key(key) + except Exception as e: + c.print(f'failed to get key {key} due to {e}', color='red') + continue + if keys[key] == None: + if clean_failed_keys: + cls.rm_key(key) + keys.pop(key) + + return keys + + + @classmethod + def key2address(cls, search=None, max_age=None, update=False, **kwargs): + path = 'key2address' + key2address = [] + key2address = cls.get(path, key2address,max_age=max_age, update=update) + if len(key2address) == 0: + key2address = { k: v.ss58_address for k,v in cls.get_keys(search).items()} + cls.put(path, key2address) + if search != None: + key2address = {k:v for k,v in key2address.items() if search in k} + + return key2address + + @classmethod + def address2key(cls, search:Optional[str]=None, update:bool=False): + address2key = { v: k for k,v in cls.key2address(update=update).items()} + if search != None : + return address2key.get(search, None) + return address2key + + @classmethod + def get_address(cls, key): + return cls.key2address()[key] + get_addy = get_address + @classmethod + def has_address(cls, address): + return address in cls.address2key() + + @classmethod + def get_key_for_address(cls, address, ): + return cls.address2key().get(address) + + key_storage_path = c.repo_path + + + @classmethod + def key_paths(cls): + return cls.ls() + + @classmethod + def key2path(cls) -> dict: + """ + defines the path for each key + """ + path2key_fn = lambda path: '.'.join(path.split('/')[-1].split('.')[:-1]) + key2path = {path2key_fn(path):path for path in cls.key_paths()} + return key2path + + @classmethod + def keys(cls, search : str = None, **kwargs): + keys = list(cls.key2path().keys()) + if search != None: + keys = [key for key in keys if search in key] + return keys + + @classmethod + def n(cls, *args, **kwargs): + return len(cls.key2address(*args, **kwargs)) + + @classmethod + def key_exists(cls, key, **kwargs): + key_exists = key in cls.keys(**kwargs) + if not key_exists: + addresses = list(cls.key2address().values()) + if key in addresses: + key_exists = True + return key_exists + + + @classmethod + def get_key_address(cls, key): + return cls.get_key(key).ss58_address + + + @classmethod + def rm_key(cls, key=None): + + key2path = cls.key2path() + keys = list(key2path.keys()) + if key not in keys: + raise Exception(f'key {key} not found, available keys: {keys}') + c.rm(key2path[key]) + cls.update() + assert c.exists(key2path[key]) == False, 'key not deleted' + + return {'deleted':[key]} + + @property + def crypto_type_name(self): + return self.crypto_type2name(self.crypto_type) + + @classmethod + def rm_keys(cls, rm_keys, verbose:bool=False): + + if isinstance(rm_keys, str): + rm_keys = cls.keys(rm_keys) + + assert isinstance(rm_keys, list), f'rm_keys must be list, got {type(rm_keys)}' + + for rm_key in rm_keys: + cls.rm_key(rm_key) + + return {'removed_keys':rm_keys} + + @classmethod + def rm_all_keys(cls): + return cls.rm_keys(cls.keys()) + + crypto_types = ['ED25519', 'SR25519', 'ECDSA'] + + @classmethod + def crypto_type_map(cls): + crypto_type_map = {k:v for k,v in KeypairType.__dict__.items() if k in cls.crypto_types } + return crypto_type_map + + @classmethod + def crypto_name2type(cls, name:str): + crypto_type_map = cls.crypto_type_map() + name = name.upper() + for k,v in crypto_type_map.items(): + if k.startswith(name.upper()): + return v + return crypto_type_map[name.upper()] + + @classmethod + def crypto_type2name(cls, crypto_type:str): + crypto_type_map ={v:k for k,v in cls.crypto_type_map().items()} + return crypto_type_map[crypto_type] + + + @classmethod + def resolve_crypto_type(cls, crypto_type): + + if isinstance(crypto_type, str): + crypto_type = crypto_type.upper() + crypto_type = cls.crypto_name2type(crypto_type) + elif isinstance(crypto_type, int): + assert crypto_type in list(KeypairType.__dict__.values()), f'crypto_type {crypto_type} not supported' + + assert crypto_type in list(KeypairType.__dict__.values()), f'crypto_type {crypto_type} not supported' + return crypto_type + + @classmethod + def new_key(cls, + mnemonic:str = None, + suri:str = None, + private_key: str = None, + crypto_type: Union[int,str] = 'sr25519', + json: bool = False, + verbose:bool=False, + **kwargs): + ''' + yo rody, this is a class method you can gen keys whenever fam + ''' + mnemonic = kwargs.pop('m', mnemonic) + + if verbose: + c.print(f'generating {crypto_type} keypair, {suri}', color='green') + + crypto_type = cls.resolve_crypto_type(crypto_type) + + if suri: + key = cls.create_from_uri(suri, crypto_type=crypto_type) + elif mnemonic: + key = cls.create_from_mnemonic(mnemonic, crypto_type=crypto_type) + elif private_key: + key = cls.create_from_private_key(private_key,crypto_type=crypto_type) + else: + mnemonic = cls.generate_mnemonic() + key = cls.create_from_mnemonic(mnemonic, crypto_type=crypto_type) + if json: + return key.to_json() + + return key + + create = gen = new_key + + + + def to_json(self, password: str = None ) -> dict: + state_dict = self.copy(self.__dict__) + for k,v in state_dict.items(): + if type(v) in [bytes]: + state_dict[k] = v.hex() + if password != None: + state_dict[k] = self.encrypt(data=state_dict[k], password=password) + if '_ss58_address' in state_dict: + state_dict['ss58_address'] = state_dict.pop('_ss58_address') + state_dict = json.dumps(state_dict) + + return state_dict + + @classmethod + def from_json(cls, obj: Union[str, dict], password: str = None) -> dict: + if type(obj) == str: + obj = json.loads(obj) + if obj == None: + return None + + for k,v in obj.items(): + if c.is_encrypted(obj[k]) and password != None: + obj[k] = cls.decrypt(data=obj[k], password=password) + if 'ss58_address' in obj: + obj['_ss58_address'] = obj.pop('ss58_address') + return cls(**obj) + + @classmethod + def sand(cls): + + for k in cls.new_key(suri=2): + + password = 'fam' + enc = cls.encrypt(k, password=password) + dec = cls.decrypt(enc, password='bro ') + + + + + + @classmethod + def generate_mnemonic(cls, words: int = 12, language_code: str = MnemonicLanguageCode.ENGLISH) -> str: + """ + Generates a new seed phrase with given amount of words (default 12) + + Parameters + ---------- + words: The amount of words to generate, valid values are 12, 15, 18, 21 and 24 + language_code: The language to use, valid values are: 'en', 'zh-hans', 'zh-hant', 'fr', 'it', 'ja', 'ko', 'es'. Defaults to `MnemonicLanguageCode.ENGLISH` + + Returns + ------- + str: Seed phrase + """ + return bip39_generate(words, language_code) + + @classmethod + def validate_mnemonic(cls, mnemonic: str, language_code: str = MnemonicLanguageCode.ENGLISH) -> bool: + """ + Verify if specified mnemonic is valid + + Parameters + ---------- + mnemonic: Seed phrase + language_code: The language to use, valid values are: 'en', 'zh-hans', 'zh-hant', 'fr', 'it', 'ja', 'ko', 'es'. Defaults to `MnemonicLanguageCode.ENGLISH` + + Returns + ------- + bool + """ + return bip39_validate(mnemonic, language_code) + + + # def resolve_crypto_type() + @classmethod + def create_from_mnemonic(cls, mnemonic: str = None, ss58_format=42, crypto_type=KeypairType.SR25519, + language_code: str = MnemonicLanguageCode.ENGLISH, return_kwargs:bool = False) -> 'Keypair': + """ + Create a Keypair for given memonic + + Parameters + ---------- + mnemonic: Seed phrase + ss58_format: Substrate address format + crypto_type: Use `KeypairType.SR25519` or `KeypairType.ED25519` cryptography for generating the Keypair + language_code: The language to use, valid values are: 'en', 'zh-hans', 'zh-hant', 'fr', 'it', 'ja', 'ko', 'es'. Defaults to `MnemonicLanguageCode.ENGLISH` + + Returns + ------- + Keypair + """ + if not mnemonic: + mnemonic = cls.generate_mnemonic(language_code=language_code) + + if crypto_type == KeypairType.ECDSA: + if language_code != MnemonicLanguageCode.ENGLISH: + raise ValueError("ECDSA mnemonic only supports english") + + private_key = mnemonic_to_ecdsa_private_key(mnemonic) + keypair = cls.create_from_private_key(private_key, ss58_format=ss58_format, crypto_type=crypto_type) + + else: + seed_array = bip39_to_mini_secret(mnemonic, "", language_code) + + keypair = cls.create_from_seed( + seed_hex=binascii.hexlify(bytearray(seed_array)).decode("ascii"), + ss58_format=ss58_format, + crypto_type=crypto_type, + return_kwargs=return_kwargs + ) + + if return_kwargs: + kwargs = keypair + return kwargs + + + keypair.mnemonic = mnemonic + + + + return keypair + + from_mnemonic = from_mem = create_from_mnemonic + + @classmethod + def create_from_seed( + cls, + seed_hex: Union[bytes, str], + ss58_format: Optional[int] = 42, + crypto_type=KeypairType.SR25519, + return_kwargs:bool = False + + ) -> 'Keypair': + """ + Create a Keypair for given seed + + Parameters + ---------- + seed_hex: hex string of seed + ss58_format: Substrate address format + crypto_type: Use KeypairType.SR25519 or KeypairType.ED25519 cryptography for generating the Keypair + + Returns + ------- + Keypair + """ + + + if type(seed_hex) is str: + seed_hex = bytes.fromhex(seed_hex.replace('0x', '')) + + if crypto_type == KeypairType.SR25519: + public_key, private_key = sr25519.pair_from_seed(seed_hex) + elif crypto_type == KeypairType.ED25519: + private_key, public_key = ed25519_zebra.ed_from_seed(seed_hex) + else: + raise ValueError('crypto_type "{}" not supported'.format(crypto_type)) + + ss58_address = ss58_encode(public_key, ss58_format) + + + kwargs = dict( + ss58_address=ss58_address, public_key=public_key, private_key=private_key, + ss58_format=ss58_format, crypto_type=crypto_type, seed_hex=seed_hex + ) + + if return_kwargs: + return kwargs + else: + return cls(**kwargs) + @classmethod + def from_password(cls, password:str, **kwargs): + return cls.create_from_uri(password, **kwargs) + + pwd2key = password2key = from_password + + + @classmethod + def from_uri( + cls, + suri: str, + ss58_format: Optional[int] = 42, + crypto_type=KeypairType.SR25519, + language_code: str = MnemonicLanguageCode.ENGLISH + ) -> 'Keypair': + """ + Creates Keypair for specified suri in following format: `[mnemonic]/[soft-path]//[hard-path]` + + Parameters + ---------- + suri: + ss58_format: Substrate address format + crypto_type: Use KeypairType.SR25519 or KeypairType.ED25519 cryptography for generating the Keypair + language_code: The language to use, valid values are: 'en', 'zh-hans', 'zh-hant', 'fr', 'it', 'ja', 'ko', 'es'. Defaults to `MnemonicLanguageCode.ENGLISH` + + Returns + ------- + Keypair + """ + crypto_type = cls.resolve_crypto_type(crypto_type) + suri = str(suri) + if not suri.startswith('//'): + suri = '//' + suri + + if suri and suri.startswith('/'): + suri = DEV_PHRASE + suri + + suri_regex = re.match(r'^(?P.[^/]+( .[^/]+)*)(?P(//?[^/]+)*)(///(?P.*))?$', suri) + + suri_parts = suri_regex.groupdict() + + if crypto_type == KeypairType.ECDSA: + if language_code != MnemonicLanguageCode.ENGLISH: + raise ValueError("ECDSA mnemonic only supports english") + + private_key = mnemonic_to_ecdsa_private_key( + mnemonic=suri_parts['phrase'], + str_derivation_path=suri_parts['path'][1:], + passphrase=suri_parts['password'] or '' + ) + derived_keypair = cls.create_from_private_key(private_key, ss58_format=ss58_format, crypto_type=crypto_type) + else: + + if suri_parts['password']: + raise NotImplementedError(f"Passwords in suri not supported for crypto_type '{crypto_type}'") + + derived_keypair = cls.create_from_mnemonic( + suri_parts['phrase'], ss58_format=ss58_format, crypto_type=crypto_type, language_code=language_code + ) + + if suri_parts['path'] != '': + + derived_keypair.derive_path = suri_parts['path'] + + if crypto_type not in [KeypairType.SR25519]: + raise NotImplementedError('Derivation paths for this crypto type not supported') + + derive_junctions = extract_derive_path(suri_parts['path']) + + child_pubkey = derived_keypair.public_key + child_privkey = derived_keypair.private_key + + for junction in derive_junctions: + + if junction.is_hard: + + _, child_pubkey, child_privkey = sr25519.hard_derive_keypair( + (junction.chain_code, child_pubkey, child_privkey), + b'' + ) + + else: + + _, child_pubkey, child_privkey = sr25519.derive_keypair( + (junction.chain_code, child_pubkey, child_privkey), + b'' + ) + + derived_keypair = Keypair(public_key=child_pubkey, private_key=child_privkey, ss58_format=ss58_format) + + return derived_keypair + create_from_uri = from_uri + from_mnem = from_mnemonic = create_from_mnemonic + @classmethod + def create_from_private_key( + cls, private_key: Union[bytes, str], public_key: Union[bytes, str] = None, ss58_address: str = None, + ss58_format: int = 42, crypto_type: int = KeypairType.SR25519 + ) -> 'Keypair': + """ + Creates Keypair for specified public/private keys + Parameters + ---------- + private_key: hex string or bytes of private key + public_key: hex string or bytes of public key + ss58_address: Substrate address + ss58_format: Substrate address format, default = 42 + crypto_type: Use KeypairType.[SR25519|ED25519|ECDSA] cryptography for generating the Keypair + + Returns + ------- + Keypair + """ + + return cls( + ss58_address=ss58_address, public_key=public_key, private_key=private_key, + ss58_format=ss58_format, crypto_type=crypto_type + ) + from_private_key = create_from_private_key + + + @classmethod + def create_from_encrypted_json(cls, json_data: Union[str, dict], passphrase: str, + ss58_format: int = None) -> 'Keypair': + """ + Create a Keypair from a PolkadotJS format encrypted JSON file + + Parameters + ---------- + json_data: Dict or JSON string containing PolkadotJS export format + passphrase: Used to encrypt the keypair + ss58_format: Which network ID to use to format the SS58 address (42 for testnet) + + Returns + ------- + Keypair + """ + + if type(json_data) is str: + json_data = json.loads(json_data) + + private_key, public_key = decode_pair_from_encrypted_json(json_data, passphrase) + + if 'sr25519' in json_data['encoding']['content']: + crypto_type = KeypairType.SR25519 + elif 'ed25519' in json_data['encoding']['content']: + crypto_type = KeypairType.ED25519 + # Strip the nonce part of the private key + private_key = private_key[0:32] + else: + raise NotImplementedError("Unknown KeypairType found in JSON") + + if ss58_format is None and 'address' in json_data: + ss58_format = get_ss58_format(json_data['address']) + + return cls.create_from_private_key(private_key, public_key, ss58_format=ss58_format, crypto_type=crypto_type) + + def export_to_encrypted_json(self, passphrase: str, name: str = None) -> dict: + """ + Export Keypair to PolkadotJS format encrypted JSON file + + Parameters + ---------- + passphrase: Used to encrypt the keypair + name: Display name of Keypair used + + Returns + ------- + dict + """ + if not name: + name = self.ss58_address + + if self.crypto_type != KeypairType.SR25519: + raise NotImplementedError(f"Cannot create JSON for crypto_type '{self.crypto_type}'") + + # Secret key from PolkadotJS is an Ed25519 expanded secret key, so has to be converted + # https://github.com/polkadot-js/wasm/blob/master/packages/wasm-crypto/src/rs/sr25519.rs#L125 + converted_private_key = sr25519.convert_secret_key_to_ed25519(self.private_key) + + encoded = encode_pair(self.public_key, converted_private_key, passphrase) + + json_data = { + "encoded": b64encode(encoded).decode(), + "encoding": {"content": ["pkcs8", "sr25519"], "type": ["scrypt", "xsalsa20-poly1305"], "version": "3"}, + "address": self.ss58_address, + "meta": { + "name": name, "tags": [], "whenCreated": int(time.time()) + } + } + + return json_data + + seperator = "" + + def sign(self, + data: Union[ScaleBytes, bytes, str], + return_json:bool=False, + return_string = False, + seperator = seperator + ) -> bytes: + """ + Creates a signature for given data + + Parameters + ---------- + data: data to sign in `Scalebytes`, bytes or hex string format + + Returns + ------- + signature in bytes + + """ + if not isinstance(data, str): + data = c.python2str(data) + if type(data) is ScaleBytes: + data = bytes(data.data) + elif data[0:2] == '0x': + data = bytes.fromhex(data[2:]) + elif type(data) is str: + data = data.encode() + + if not self.private_key: + raise ConfigurationError('No private key set to create signatures') + + if self.crypto_type == KeypairType.SR25519: + signature = sr25519.sign((self.public_key, self.private_key), data) + + elif self.crypto_type == KeypairType.ED25519: + signature = ed25519_zebra.ed_sign(self.private_key, data) + + elif self.crypto_type == KeypairType.ECDSA: + signature = ecdsa_sign(self.private_key, data) + + else: + raise ConfigurationError("Crypto type not supported") + + if return_json: + return { + 'data': data.decode(), + 'crypto_type': self.crypto_type, + 'signature': signature.hex(), + 'address': self.ss58_address, + } + + if return_string: + return f'{data.decode()}{seperator}{signature.hex()}' + return signature + + def ticket2address(self, ticket, **kwargs): + return self.verify(ticket, **kwargs) + + def signature2address(self, sig, **kwargs): + return self.verify(sig, return_address=True, **kwargs) + sig2addy = signature2address + + + def verify(self, + data: Union[ScaleBytes, bytes, str, dict], + signature: Union[bytes, str] = None, + public_key:Optional[str]= None, + return_address = False, + seperator = seperator, + ss58_format = 42, + max_age = None, + address = None, + **kwargs + ) -> bool: + """ + Verifies data with specified signature + + Parameters + ---------- + data: data to be verified in `Scalebytes`, bytes or hex string format + signature: signature in bytes or hex string format + public_key: public key in bytes or hex string format + + Returns + ------- + True if data is signed with this Keypair, otherwise False + """ + if isinstance(data, str) and seperator in data: + data, signature = data.split(seperator) + + if max_age != None: + staleness = c.timestamp() - int(data) + assert staleness < max_age, f'data is too old, {staleness} seconds old, max_age is {max_age}' + + data = c.copy(data) + + if isinstance(data, dict): + + signature = data.pop('signature') + public_key = c.ss58_decode(data['address']) + if 'data' in data: + data = data.pop('data') + + if not isinstance(data, str): + data = c.python2str(data) + + if address != None: + public_key = c.ss58_decode(address) + if public_key == None: + public_key = public_key or self.public_key + else: + if self.is_ss58(public_key): + public_key = c.ss58_decode(public_key) + + if isinstance(public_key, str): + public_key = bytes.fromhex(public_key.replace('0x', '')) + + if type(data) is ScaleBytes: + data = bytes(data.data) + elif data[0:2] == '0x': + data = bytes.fromhex(data[2:]) + elif type(data) is str: + data = data.encode() + + if type(signature) is str and signature[0:2] == '0x': + signature = bytes.fromhex(signature[2:]) + elif type(signature) is str: + signature = bytes.fromhex(signature) + if type(signature) is not bytes: + raise TypeError("Signature should be of type bytes or a hex-string") + + if self.crypto_type == KeypairType.SR25519: + crypto_verify_fn = sr25519.verify + elif self.crypto_type == KeypairType.ED25519: + crypto_verify_fn = ed25519_zebra.ed_verify + elif self.crypto_type == KeypairType.ECDSA: + crypto_verify_fn = ecdsa_verify + else: + raise ConfigurationError("Crypto type not supported") + + verified = crypto_verify_fn(signature, data, public_key) + + if not verified: + # Another attempt with the data wrapped, as discussed in https://github.com/polkadot-js/extension/pull/743 + # Note: As Python apps are trusted sources on its own, no need to wrap data when signing from this lib + verified = crypto_verify_fn(signature, b'' + data + b'', public_key) + + if return_address: + return ss58_encode(public_key, ss58_format=ss58_format) + return verified + + + + + + + @property + def encryption_key(self): + password = None + for k in ['private_key', 'mnemonic', 'sed_hex']: + if hasattr(self, k): + v = getattr(self, k) + if type(v) in [bytes]: + v = v.hex() + assert type(v) is str, f"Encryption key should be a string, not {type(v)}" + + assert password is not None, "No encryption key found, please make sure you have set either private_key, mnemonic or seed_hex" + + return password + + + + @property + def aes_key(self): + if not hasattr(self, '_aes_key'): + password = self.mnemonic or self.private_key + self._aes_key = c.module('key.aes')(c.bytes2str(password)) + return self._aes_key + + def encrypt(self, data: Union[str, bytes], password: str = None, **kwargs) -> bytes: + aes_key = self.resolve_aes_key(password) + return aes_key.encrypt(data, **kwargs) + + def resolve_aes_key(self, password = None): + if password == None: + key = Keypair.from_password(password) + else: + key = self + return key.aes_key + + def decrypt(self, data: Union[str, bytes], password=None, **kwargs) -> bytes: + aes_key = self.resolve_aes_key(password) + data = aes_key.decrypt(data) + return data + + def encrypt_message( + self, message: Union[bytes, str], recipient_public_key: bytes, nonce: bytes = secrets.token_bytes(24), + ) -> bytes: + """ + Encrypts message with for specified recipient + + Parameters + ---------- + message: message to be encrypted, bytes or string + recipient_public_key: recipient's public key + nonce: the nonce to use in the encryption + + Returns + ------- + Encrypted message + """ + + if not self.private_key: + raise ConfigurationError('No private key set to encrypt') + if self.crypto_type != KeypairType.ED25519: + raise ConfigurationError('Only ed25519 keypair type supported') + + + curve25519_public_key = nacl.bindings.crypto_sign_ed25519_pk_to_curve25519(recipient_public_key) + recipient = nacl.public.PublicKey(curve25519_public_key) + private_key = nacl.bindings.crypto_sign_ed25519_sk_to_curve25519(self.private_key + self.public_key) + sender = nacl.public.PrivateKey(private_key) + box = nacl.public.Box(sender, recipient) + return box.encrypt(message if isinstance(message, bytes) else message.encode("utf-8"), nonce) + + def decrypt_message(self, encrypted_message_with_nonce: bytes, sender_public_key: bytes) -> bytes: + """ + Decrypts message from a specified sender + + Parameters + ---------- + encrypted_message_with_nonce: message to be decrypted + sender_public_key: sender's public key + + Returns + ------- + Decrypted message + """ + + if not self.private_key: + raise ConfigurationError('No private key set to decrypt') + if self.crypto_type != KeypairType.ED25519: + raise ConfigurationError('Only ed25519 keypair type supported') + private_key = nacl.bindings.crypto_sign_ed25519_sk_to_curve25519(self.private_key + self.public_key) + recipient = nacl.public.PrivateKey(private_key) + curve25519_public_key = nacl.bindings.crypto_sign_ed25519_pk_to_curve25519(sender_public_key) + sender = nacl.public.PublicKey(curve25519_public_key) + return nacl.public.Box(recipient, sender).decrypt(encrypted_message_with_nonce) + + @classmethod + def sandbox(cls ): + key = cls.create_from_uri('//Alice') + c.print(c.module('bittensor').get_balance(key.ss58_address)) + + @classmethod + def test(cls): + self = cls.create_from_uri('//Alice') + module_fns = c.fns() + test_fns = [fn for fn in dir(self) if fn.startswith('test_') and fn not in module_fns ] + num_tests = len(test_fns) + results = {} + for i, fn in enumerate(test_fns): + try: + result = getattr(self, fn)() + except Exception as e: + result = c.detailed_error(e) + c.print(f'Failed ({i+1}/{num_tests}) {fn} due to {e}', color='red') + results[fn] = result + return {'success':True, 'msg': 'all tests passed', 'results':results} + @classmethod + def is_key(cls, key) -> bool: + return isinstance(key, Keypair) + + def test_signing(self): + sig = self.sign('test') + assert self.verify('test',sig, bytes.fromhex(self.public_key.hex())) + assert self.verify('test',sig, self.public_key) + sig = self.sign('test', return_string=True) + assert self.verify(sig, self.public_key) + return {'success':True} + + encrypted_prefix = 'ENCRYPTED::' + + @classmethod + def encrypt_key(cls, path = 'test.enc', password=None): + assert cls.exists(path), f'file {path} does not exist' + if password == None: + password = cls.generate_mnemonic() + data = cls.get(path) + enc_text = c.encrypt(data, password=password) + enc_text = f'{cls.encrypted_prefix}{enc_text}' + cls.put(path, enc_text) + return {'encrypted':enc_text, 'path':path , 'password':password} + + + @classmethod + def is_key_encrypted(cls, path, data=None): + data = data or cls.get(path) + if not isinstance(data, str): + return False + return data.startswith(cls.encrypted_prefix) + + + @classmethod + def test_key_encryption(cls, password='1234'): + path = 'test.enc' + c.add_key('test.enc', refresh=True) + assert cls.is_key_encrypted(path) == False, f'file {path} is encrypted' + cls.encrypt_key(path, password=password) + assert cls.is_key_encrypted(path) == True, f'file {path} is not encrypted' + cls.decrypt_key(path, password=password) + assert cls.is_key_encrypted(path) == False, f'file {path} is encrypted' + cls.rm(path) + assert not c.exists(path), f'file {path} not deleted' + return {'success': True, 'msg': 'test_key_encryption passed'} + + @classmethod + def decrypt_key(cls, path = 'test.enc', password=None): + + data = cls.get(path) + assert data.startswith(cls.encrypted_prefix), f'file {path} is not encrypted' + data = data[len(cls.encrypted_prefix):] + enc_text = c.decrypt(data, password=password) + cls.put(path, enc_text) + return {'encrypted':enc_text, 'path':path , 'password':password} + + + + def encrypt_file(self, path='test.encryption', password=None, prefix=encrypted_prefix): + if password == None: + password = self.private_key + text = c.get_text(path) + enc_text = self.encrypt(text, password=password) + enc_text = f'{prefix}{enc_text}' + c.put_text(path, enc_text) + return {'encrypted':enc_text, 'path':path } + + + + + def decrypt_file(self, path, password=None): + if password == None: + password = self.private_key + enc_text = c.get_text(path) + assert enc_text.startswith(self.encrypted_prefix), f'file {path} is not encrypted' + enc_text = enc_text[len(self.encrypted_prefix):] + dec_text = self.decrypt(enc_text, password=password) + if not isinstance(dec_text, str): + dec_text = json.dumps(dec_text) + c.put_text(path, dec_text) + + return {'success': True } + + + + def test_encryption_file(self, filepath='tests/dummy', value='test'): + filepath = self.resolve_path(filepath) + c.put(filepath, value) + decode = c.get(filepath) + self.encrypt_file(filepath) # encrypt file + decode = self.decrypt_file(filepath) # decrypt file + decode = c.get(filepath) + + assert decode == value, f'encryption failed, {decode} != {value}' + c.rm(filepath) + assert not c.exists(filepath), f'file {filepath} not deleted' + return {'success': True, + 'filepath': filepath, + + 'msg': 'test_encryption_file passed'} + + + @classmethod + def test_encryption(cls,value = 10): + key = cls.new_key() + enc = key.encrypt(value) + dec = key.decrypt(enc) + assert dec == value, f'encryption failed, {dec} != {value}' + return {'encrypted':enc, 'decrypted': dec} + + + def test_key_encryption(self, test_key='test.key'): + key = self.add_key(test_key, refresh=True) + og_key = self.get_key(test_key) + r = self.encrypt_key(test_key) + self.decrypt_key(test_key, password=r['password']) + key = self.get_key(test_key) + + assert key.ss58_address == og_key.ss58_address, f'key encryption failed, {key.ss58_address} != {self.ss58_address}' + + return {'success': True, 'msg': 'test_key_encryption passed'} + + + + + def test_key_management(self): + if self.key_exists('test'): + self.rm_key('test') + key1 = self.get_key('test') + assert self.key_exists('test'), f'Key management failed, key still exists' + self.mv_key('test', 'test2') + key2 = self.get_key('test2') + assert key1.ss58_address == key2.ss58_address, f'Key management failed, {key1.ss58_address} != {key2.ss58_address}' + assert self.key_exists('test2'), f'Key management failed, key does not exist' + assert not self.key_exists('test'), f'Key management failed, key still exists' + self.mv_key('test2', 'test') + assert self.key_exists('test'), f'Key management failed, key does not exist' + assert not self.key_exists('test2'), f'Key management failed, key still exists' + self.rm_key('test') + assert not self.key_exists('test'), f'Key management failed, key still exists' + return {'success': True, 'msg': 'test_key_management passed'} + + @classmethod + def getmem(cls, key): + + return cls.get_key(key).mnemonic + mem = getmem + def __str__(self): + return f'' + + + def save(self, path=None): + if path == None: + path = self.path + c.print(f'saving key to {path}') + c.put_json(path, self.to_json()) + return {'saved':path} + + def diplicate(self, new_path): + c.print(f'copying key from {self.path} to {new_path}') + c.cp(self.path, new_path) + return {'copied':new_path} + + + + def __repr__(self): + return self.__str__() + + def state_dict(self): + return self.__dict__ + + to_dict = state_dict + @classmethod + def dashboard(cls): + import streamlit as st + self = cls.new_key() + + + keys = self.keys() + + selected_keys = st.multiselect('Keys', keys) + buttons = {} + for key_name in selected_keys: + key = cls.get_key(key_name) + with st.expander('Key Info'): + st.write(key.to_dict()) + + + buttons[key_name] = {} + buttons[key_name]['sign'] = st.button('Sign', key_name) + + st.write(self.keys()) + + @classmethod + def key2type(cls): + keys = cls.keys(object=True) + return {k.path: k.crypto_type_name for k in keys} + @classmethod + def key2mem(cls, search=None): + keys = cls.keys(search, object=True) + key2mem = {k.path: k.mnemonic for k in keys} + return key2mem + + @classmethod + def type2keys(cls): + type2keys = {} + key2type = cls.key2type() + for k,t in key2type.items(): + type2keys[t] = type2keys.get(t, []) + [k] + return type2keys + + @classmethod + def pubkey2multihash(cls, pk:bytes) -> str: + import multihash + hashed_public_key = multihash.encode(pk, code=multihash.SHA2_256) + return hashed_public_key.hex() + + + + @classmethod + def duplicate_keys(cls) -> dict: + + key2address = cls.key2address() + duplicate_keys = {} + + for k,a in key2address.items(): + if a not in duplicate_keys: + duplicate_keys[a] = [] + + duplicate_keys[a] += [k] + + return {k:v for k,v in duplicate_keys.items() if len(v) > 1} + + @classmethod + def clean_all_keys(cls): + key2adress = c.key2address() + for k,a in key2adress.items(): + if c.key_exists(a): + c.print(f'removing {a}', color='red') + c.rm_key(a) + c.print('cleaning', k, a, c.key_exists(a)) + + + @staticmethod + def valid_ss58_address( address: str, valid_ss58_format:int=42 ) -> bool: + """ + Checks if the given address is a valid ss58 address. + + Args: + address(str): The address to check. + + Returns: + True if the address is a valid ss58 address for Bittensor, False otherwise. + """ + + try: + return ss58.valid_ss58_address( address, valid_ss58_format=valid_ss58_format ) # Default substrate ss58 format (legacy) + except Exception as e: + return False + + @classmethod + def from_private_key(cls, private_key:str): + return cls(private_key=private_key) + + @classmethod + def valid_ss58_address(cls, address: str ) -> bool: + """ + Checks if the given address is a valid ss58 address. + + Args: + address(str): The address to check. + + Returns: + True if the address is a valid ss58 address for Bittensor, False otherwise. + """ + try: + return ss58.is_valid_ss58_address( address, valid_ss58_format=c.__ss58_format__ ) + except (IndexError): + return False + + @classmethod + def is_valid_ed25519_pubkey(cls, public_key: Union[str, bytes] ) -> bool: + """ + Checks if the given public_key is a valid ed25519 key. + + Args: + public_key(Union[str, bytes]): The public_key to check. + + Returns: + True if the public_key is a valid ed25519 key, False otherwise. + + """ + try: + if isinstance( public_key, str ): + if len(public_key) != 64 and len(public_key) != 66: + raise ValueError( "a public_key should be 64 or 66 characters" ) + elif isinstance( public_key, bytes ): + if len(public_key) != 32: + raise ValueError( "a public_key should be 32 bytes" ) + else: + raise ValueError( "public_key must be a string or bytes" ) + + keypair = Keypair(public_key=public_key, + ss58_format=c.__ss58_format__) + + ss58_addr = keypair.ss58_address + return ss58_addr is not None + + except (ValueError, IndexError): + return False + + @classmethod + def is_valid_address_or_public_key(cls, address: Union[str, bytes] ) -> bool: + """ + Checks if the given address is a valid destination address. + + Args: + address(Union[str, bytes]): The address to check. + + Returns: + True if the address is a valid destination address, False otherwise. + """ + if isinstance( address, str ): + # Check if ed25519 + if address.startswith('0x'): + return cls.is_valid_ed25519_pubkey( address ) + else: + # Assume ss58 address + return cls.valid_ss58_address( address ) + elif isinstance( address, bytes ): + # Check if ed25519 + return cls.is_valid_ed25519_pubkey( address ) + else: + # Invalid address type + return False + + def id_card(self, return_json=True,**kwargs): + return self.sign(str(c.timestamp()), return_json=return_json, **kwargs) + + + def test_str_signing(self): + sig = self.sign('test', return_string=True) + # c.print(''+sig) + assert not self.verify('1'+sig) + assert self.verify(sig) + return {'success':True} + + def ticket(self, key=None, **kwargs): + return c.module('ticket')().create(key=self.key, **kwargs) + + def verify_ticket(self, ticket, **kwargs): + return c.module('ticket')().verify(ticket, key=self.key, **kwargs) + + def test_ticket(self): + ticket = self.ticket() + assert self.verify_ticket(ticket) + return {'success':True, 'msg':'test_ticket passed'} + def to_mnemonic(self, password=None): + from mnemonic import Mnemonic + return Mnemonic('english').to_mnemonic(self.private_key) + + + def ticket_staleness(self, ticket, **kwargs): + + return self.verify(ticket, **kwargs) + + def app(self): + c.module('key.app').app() + + + def test_move_key(self): + self.add_key('testfrom') + assert self.key_exists('testfrom') + og_key = self.get_key('testfrom') + self.mv_key('testfrom', 'testto') + assert self.key_exists('testto') + assert not self.key_exists('testfrom') + new_key = self.get_key('testto') + assert og_key.ss58_address == new_key.ss58_address + self.rm_key('testto') + assert not self.key_exists('testto') + return {'success':True, 'msg':'test_move_key passed', 'key':new_key.ss58_address} + + @staticmethod + def is_ss58(address): + # Check address length + if len(address) != 47: + return False + + # Check prefix + network_prefixes = ['1', '2', '5', '7'] # Add more prefixes as needed + if address[0] not in network_prefixes: + return False + + # Verify checksum + encoded = '0123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz' + address_without_checksum = address[:-1] + checksum = address[-1] + address_hash = 0 + for char in address_without_checksum: + address_hash = address_hash * 58 + encoded.index(char) + + # Calculate the expected checksum + expected_checksum = encoded[address_hash % 58] + + # Compare the expected checksum with the provided checksum + if expected_checksum != checksum: + return False + + return True + + +Keypair.run(__name__) + + + diff --git a/commune/miner/add/add.py b/commune/miner/add/add.py new file mode 100644 index 000000000..692d95ad7 --- /dev/null +++ b/commune/miner/add/add.py @@ -0,0 +1,10 @@ +import commune as c +class Miner(c.Module): + description = 'This is the miner module that adds two numbers together' + whitelist = ['forward'] + def forward(self, a=1, b=1): + return a + b + + + + diff --git a/commune/module/tests/test_config.py b/commune/miner/miner.md similarity index 100% rename from commune/module/tests/test_config.py rename to commune/miner/miner.md diff --git a/commune/miner/miner.py b/commune/miner/miner.py new file mode 100644 index 000000000..6d158c7ab --- /dev/null +++ b/commune/miner/miner.py @@ -0,0 +1,59 @@ +import commune as c +from typing import List + +class Miner(c.Module): + description: str + whitelist: List[str] + def __init__(self): + super().__init__() + self.description = 'Eden Miner v1' + self.whitelist = ['forward'] + + def forward(self, a=1, b=1): + return a + b + + def launcher_keys(self): + keys = c.keys() + return [k for k in keys if k.startswith('module::')] + + def transfer2launchers(self, amount=10, **kwargs): + destinations = self.launcher_keys() + amounts = [amount] * len(destinations) + return c.transfer_many(amounts=amounts, destinations=destinations, **kwargs) + + @classmethod + def register_many(cls, key2address , + timeout=60, + netuid = 0): + futures = [] + launcher_keys = c.launcher_keys() + future2launcher = {} + future2module = {} + registered_keys = c.m('subspace')().keys(netuid=netuid) + progress = c.tqdm(total=len(key2address)) + while len(key2address) > 0: + modules = list(key2address.keys()) + for i, module in enumerate(modules): + module_key = key2address[module] + if module_key in registered_keys: + c.print(f"Skipping {module} with key {module}") + key2address.pop(module) + progress.update(1) + continue + c.print(f"Registering {module} with key {module}") + launcher_key = launcher_keys[i % len(launcher_keys)] + kwargs=dict(name=module, module_key=module_key, serve=True, key=launcher_key) + future = c.submit(c.register, kwargs=kwargs, timeout=timeout) + future2launcher[future] = launcher_key + future2module[future] = module + + futures = list(future2launcher.keys()) + + for f in c.as_completed(futures, timeout=timeout): + module = future2module.pop(f) + launcher_key = future2launcher.pop(f) + module_key = key2address.pop(module) + c.print(f"Registered {module} module_key:{module_key} launcher_key:{launcher_key}") + r = f.result() + if c.is_error(r): + progress.update(1) \ No newline at end of file diff --git a/commune/module/bt/bt.py b/commune/module/bt/bt.py deleted file mode 100644 index 8434aabde..000000000 --- a/commune/module/bt/bt.py +++ /dev/null @@ -1,74 +0,0 @@ -from concurrent.futures import ThreadPoolExecutor -from queue import Queue -from flask import Flask, request, jsonify -import argparse -import json -import torch -from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline -from typing import List, Dict -import commune as c - - -class RobertMyersProcessor(c.Module): - - num_gpus = torch.cuda.device_count() - executor = ThreadPoolExecutor(max_workers=num_gpus) - num_gpus = torch.cuda.device_count() - processors = [RobertMyersProcessor(device=i) for i in range(num_gpus)] - cnt = 0 - - def __init__(self, device): - self.tokenizer = AutoTokenizer.from_pretrained('robertmyers/ltargon-22b') - self.model = AutoModelForCausalLM.from_pretrained('robertmyers/ltargon-22b', torch_dtype=torch.float16) - self.pipeline = pipeline( - "text-generation", self.model, tokenizer=self.tokenizer, - device=device, max_new_tokens=150, temperature=0.9, top_p=0.9, do_sample=True, pad_token_id=self.tokenizer.eos_token_id - ) - - def apply_system_prompts(self, prompt): - system_prompts = [ - "You are a helpful assistant.", - "Your responses should be as if they're being evaluated by a college professor.", - "Aim for concise, clear, and accurate answers.", - "Avoid colloquial language and slang.", - "Use formal language and proper grammar.", - "Ensure your information is factual and evidence-based.", - "Answer directly to the question without deviating from the topic.", - "Each answer should be structured and well-organized.", - "Provide succinct explanations, avoiding verbosity.", - "Cite relevant examples if necessary, but remain brief." - ] - return "\n".join(system_prompts) + "\n" + prompt - - def forward(self, history) -> str: - history_with_prompts = self.apply_system_prompts(history) - resp = self.pipeline(history_with_prompts)[0]['generated_text'].split(':')[-1].replace(str(history_with_prompts), "") - return resp.strip() - - def process_request(history, processor): - response = processor.forward(history) - return response - - - @classmethod - def run_server(cls): - app = Flask(__name__) - - @app.route('/process', methods=['POST']) - def handle_request(): - print("Request Received!") - cls.cnt += 1 - processor = cls.processors[cls.cnt % cls.num_gpus] - try: - history = json.loads(request.data) - future = cls.executor.submit(cls.process_request, history, processor) - response = future.result() - except Exception as e: - print("Error:", e) - response = {"error": str(e)} - return jsonify(response=response) - - parser = argparse.ArgumentParser(description='Run RobertMyers server.') - parser.add_argument('--port', type=int, default=2023, help='Port number to run the server on.') - args = parser.parse_args() - app.run(host='0.0.0.0', port=args.port) \ No newline at end of file diff --git a/commune/module/bt/server.py b/commune/module/bt/server.py deleted file mode 100644 index 8434aabde..000000000 --- a/commune/module/bt/server.py +++ /dev/null @@ -1,74 +0,0 @@ -from concurrent.futures import ThreadPoolExecutor -from queue import Queue -from flask import Flask, request, jsonify -import argparse -import json -import torch -from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline -from typing import List, Dict -import commune as c - - -class RobertMyersProcessor(c.Module): - - num_gpus = torch.cuda.device_count() - executor = ThreadPoolExecutor(max_workers=num_gpus) - num_gpus = torch.cuda.device_count() - processors = [RobertMyersProcessor(device=i) for i in range(num_gpus)] - cnt = 0 - - def __init__(self, device): - self.tokenizer = AutoTokenizer.from_pretrained('robertmyers/ltargon-22b') - self.model = AutoModelForCausalLM.from_pretrained('robertmyers/ltargon-22b', torch_dtype=torch.float16) - self.pipeline = pipeline( - "text-generation", self.model, tokenizer=self.tokenizer, - device=device, max_new_tokens=150, temperature=0.9, top_p=0.9, do_sample=True, pad_token_id=self.tokenizer.eos_token_id - ) - - def apply_system_prompts(self, prompt): - system_prompts = [ - "You are a helpful assistant.", - "Your responses should be as if they're being evaluated by a college professor.", - "Aim for concise, clear, and accurate answers.", - "Avoid colloquial language and slang.", - "Use formal language and proper grammar.", - "Ensure your information is factual and evidence-based.", - "Answer directly to the question without deviating from the topic.", - "Each answer should be structured and well-organized.", - "Provide succinct explanations, avoiding verbosity.", - "Cite relevant examples if necessary, but remain brief." - ] - return "\n".join(system_prompts) + "\n" + prompt - - def forward(self, history) -> str: - history_with_prompts = self.apply_system_prompts(history) - resp = self.pipeline(history_with_prompts)[0]['generated_text'].split(':')[-1].replace(str(history_with_prompts), "") - return resp.strip() - - def process_request(history, processor): - response = processor.forward(history) - return response - - - @classmethod - def run_server(cls): - app = Flask(__name__) - - @app.route('/process', methods=['POST']) - def handle_request(): - print("Request Received!") - cls.cnt += 1 - processor = cls.processors[cls.cnt % cls.num_gpus] - try: - history = json.loads(request.data) - future = cls.executor.submit(cls.process_request, history, processor) - response = future.result() - except Exception as e: - print("Error:", e) - response = {"error": str(e)} - return jsonify(response=response) - - parser = argparse.ArgumentParser(description='Run RobertMyers server.') - parser.add_argument('--port', type=int, default=2023, help='Port number to run the server on.') - args = parser.parse_args() - app.run(host='0.0.0.0', port=args.port) \ No newline at end of file diff --git a/commune/module/config/README.md b/commune/module/config/README.md deleted file mode 100644 index d28875377..000000000 --- a/commune/module/config/README.md +++ /dev/null @@ -1,18 +0,0 @@ - -# Configuring Pipelines - - - -## Config Loader - -The config loader has some cool properties that you can use to compose your configurations - -local_copy(path) : -- get a path with respect to root of config -copy(path) -- copy with respect to root of the global config (not your local root if another config is inheriting you) -get_cfg(path) - - pull config giving its path with respect to the root of the commune -ENV! path/to/${ENV_VAR} -- include env variables as well as local_var_dict = {} if put into the config.load function - diff --git a/commune/module/config/__init__.py b/commune/module/config/__init__.py deleted file mode 100644 index 3558f4208..000000000 --- a/commune/module/config/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from .config import Config \ No newline at end of file diff --git a/commune/module/config/config.py b/commune/module/config/config.py deleted file mode 100644 index 6c8e22a55..000000000 --- a/commune/module/config/config.py +++ /dev/null @@ -1,317 +0,0 @@ - - -import os -import re -import sys -import yaml -import glob -from munch import Munch -from typing import List, Optional, Union, Any, Dict, Callable - - -class Config ( Munch ): - """ - Implementation of the config class, which manages the config of different bittensor modules. - """ - root = '/'.join(__file__.split('/')[:-2]) # get root from __file__ == {root}/config/config_module.py - - def __init__(self, config: Union[str, Dict, Munch]=None, *args, **kwargs, ): - """ - Args: - config (Union[str, Dict, Munch]): - - str: path to config file or directory of config files - - """ - - - config = config if config else {} - - if isinstance(config, str) : - - self.config_path = config - config = self.load_config(path=self.config_path) - elif config == None: - config = {} - self.config = config - assert isinstance(self.config, dict) , f'The self.config should be a dictionary but is {type(self.config)}' - - Munch.__init__(self, self.config, *args, **kwargs) - self.recursive_munch(self) - @staticmethod - def recursive_munch(config): - if isinstance(config, dict): - for k,v in config.items(): - if isinstance(v, dict): - config[k] = Config.recursive_munch(v) - - config = Munch(config) - - return config - - - def load(self, path:str ,override:Dict[str, Any]={}): - self.cache = {} - self.config = self.load_config(path=path) - if self.config == None: - return {} - - if isinstance(override, dict) and len(override) > 0: - self.config = self.override_config(config=self.config, override=override) - - self.config = self.resolver_methods(config=self.config) - self.config = self.recursive_munch(self.config) - return self.config - - - def save_config(self, path:str=None, config:str=None): - - config = config if config else self.config - path = path if path else self.config_path - - assert isinstance(config, dict) - - with open(path, 'w') as file: - documents = yaml.dump(config, file) - - return config - - - def get_config(self, input, key_path, local_key_path=[]): - from commune.utils.dict import dict_get - - """ - - :param - input: input string (str) - :return: - Regex Match - - path of config within match - No Regex Match - - None (this means its not pointing to a config path) - """ - - config=input - - if isinstance(config, str): - config_path = re.compile('^(get_config)\((.+)\)').search(input) - # if there are any matches () - if config_path: - config_path = config_path.group(2) - config_keys = None - if ',' in config_path: - assert len(config_path.split(',')) == 2 - config_path ,config_keys = config_path.split(',') - - config = self.load_config(config_path) - config = self.resolve_config(config=config,root_key_path=key_path, local_key_path=key_path) - - if config_keys != None: - - config = dict_get(input_dict=config, keys=config_keys) - - return config - - def set_cache(self, key, value): - self.cache[key] = value - - def get_cache(self, key): - return self.cache[key] - - - def local_copy(self, input, key_path): - """ - - :param - input: input string (str) - :return: - Regex Match - - path of config within match - No Regex Match - - None (this means its not pointing to a config path) - """ - - variable_object = input - if isinstance(input, str): - - variable_path = None - if '::' in input: - assert len(input.split('::')) == 2 - function_name, variable_path = input.split('::') - else: - variable_path = re.compile('^(local_copy)\((.+)\)').search(input) - if variable_path: - variable_path = variable_path.group(2) - - if variable_path: - - # get the object - local_config_key_path = self.cache[list2str(key_path)] - - if local_config_key_path: - local_config = dict_get(input_dict=self.config, keys=self.cache[list2str(key_path)]) - else: - local_config = self.config - variable_object = dict_get(input_dict=local_config, - keys = variable_path) - - return variable_object - - - def copy(self, input, key_path): - """ - - :param - input: input string (str) - :return: - Regex Match - - path of config within match - No Regex Match - - None (this means its not pointing to a config path) - """ - - variable_object = input - - - if isinstance(input, str): - - variable_path = re.compile('^(copy)\((.+)\)').search(input) - - if variable_path: - variable_path = variable_path.group(2) - - # get the object - try: - variable_object = dict_get(input_dict=self.config, - keys = variable_path) - except KeyError as e: - raise(e) - - - return variable_object - - - def load_config(self, - path=None, - tag='!ENV'): - - if type(path) in [dict, list]: - return path - assert isinstance(path, str), path - """ - Load a yaml configuration file and resolve any environment variables - The environment variables must have !ENV before them and be in this format - to be parsed: ${VAR_NAME}. - E.g.: - client: - host: !ENV ${HOST} - port: !ENV ${PORT} - app: - log_path: !ENV '/var/${LOG_PATH}' - something_else: !ENV '${AWESOME_ENV_VAR}/var/${A_SECOND_AWESOME_VAR}' - - :param - str path: the path to the yaml file - str tag: the tag to look for - - :return - dict the dict configuration - """ - # pattern for global vars: look for ${word} - - with open(path) as conf_data: - config = yaml.load(conf_data, Loader=yaml.SafeLoader) - - return config - - def __repr__(self) -> str: - return self.__str__() - - def __str__(self) -> str: - return "\n" + yaml.dump(self.toDict()) - - def to_string(self, items) -> str: - """ Get string from items - """ - return "\n" + yaml.dump(items.toDict()) - - def update_with_kwargs( self, kwargs ): - """ Add config to self - """ - for key,val in kwargs.items(): - self[key] = val - - @classmethod - def default_dict_fns(cls): - - from commune.config.utils import dict_fn_local_copy, dict_fn_get_config - - default_dict_fns = { - 'local_copy': dict_fn_local_copy, - 'get_config': dict_fn_get_config - } - return default_dict_fns - - def dict_fn(cls, - fn:Callable, - input: Dict, - context:dict=None, - function_seperator: str='::', - default_dict_fns: Dict=None): - ''' - Apply a function to a dictionary based ont he function seperator: :: - ''' - - from copy import deepcopy - default_dict_fns = cls.default_dict_fns() if default_dict_fns == None else default_dict_fns - recursive_types = [dict, list, set, tuple] - - # get the keys of the input - if type(input) in [dict]: - # get the keys - keys = list(input.keys()) - elif type(input) in [set, list, tuple]: - - # the keys are the index of the list - keys = list(range(len(input))) - - # Convert the set,tuple into a list - if type(input) in [set, tuple]: - input = list(input) - - for key in keys: - if isinstance(input[key], str): - # if the string is sperated by the function and results in 2 strings - assert len(input[key].split(function_seperator)) == 2, \ - f'input value must be a string with the format: function_name::input_arg, but you have {input[key]}' - trigger_function = function_seperator in input[key] - - if trigger_function: - function_key, input_arg = input[key].split(function_seperator) - input[key] = default_dict_fns[function_key](input_arg, context=context) - - if type(input[key]) in [dict, list, tuple, set]: - # functions do not apply over dictionaries - input[key] = dict_fn(fn=fn, - input=input[key], - context=context, - function_seperator=function_seperator, - default_dict_fns=default_dict_fns) - - - return input - - - def override(self, override={}, config=None): - from commune.utils.dict import dict_put - """ - - """ - if config == None: - config = self.config - - for k,v in override.items(): - dict_put(input_dict=config,keys=k, value=v) - - return config - - diff --git a/commune/module/config/utils.py b/commune/module/config/utils.py deleted file mode 100644 index 653e26ece..000000000 --- a/commune/module/config/utils.py +++ /dev/null @@ -1,81 +0,0 @@ -import os -import sys -import re -import yaml -import argparse - - -from copy import deepcopy - -def parse_config(path=None, tag='!ENV'): - """ - Load a yaml configuration file and resolve any environment variables - The environment variables must have !ENV before them and be in this format - to be parsed: ${VAR_NAME}. - E.g.: - client: - host: !ENV ${HOST} - port: !ENV ${PORT} - app: - log_path: !ENV '/var/${LOG_PATH}' - something_else: !ENV '${AWESOME_ENV_VAR}/var/${A_SECOND_AWESOME_VAR}' - :param - str path: the path to the yaml file - str tag: the tag to look for - :return - dict the dict configuration - """ - # pattern for global vars: look for ${word} - pattern = re.compile('.*?\${(\w+)}.*?') - loader = yaml.SafeLoader - - # the tag will be used to mark where to start searching for the pattern - # e.g. somekey: !ENV somestring${MYENVVAR}blah blah blah - loader.add_implicit_resolver(tag, pattern, None) - - def constructor_env_variables(loader, node): - """ - Extracts the environment variable from the node's value - :param yaml.Loader loader: the yaml loader - :param node: the current node in the yaml - :return: the parsed string that contains the value of the environment - variable - """ - value = loader.construct_scalar(node) - match = pattern.findall(value) # to find all env variables in line - if match: - full_value = value - for g in match: - full_value = full_value.replace( - f'${{{g}}}', os.environ.get(g, g) - ) - return full_value - return value - - loader.add_constructor(tag, constructor_env_variables) - with open(path) as conf_data: - return yaml.load(conf_data, Loader=loader) - - -def dict_fn_local_copy(input,context={}): - keys = input.split('.') - dict_get(input_dict=context, keys=keys) - - -def dict_fn_get_config(input,context={}): - keys = input.split('.') - dict_get(input_dict=context, keys=keys) - - - - -def dict_fn_ray_get(input:str, context={}): - - if len(input.split('::')) == 1: - input = input - elif len(input.split('::')) == 2: - namespace, actor_name = input.split('::') - else: - raise NotImplemented(input) - - ray.get_actor() \ No newline at end of file diff --git a/commune/module/module.py b/commune/module/module.py index 3899f8f8c..c8cf2239f 100755 --- a/commune/module/module.py +++ b/commune/module/module.py @@ -1,8 +1,7 @@ - - -import inspect -import numpy as np import os +import inspect +import concurrent +import threading from copy import deepcopy from typing import Optional, Union, Dict, List, Any, Tuple, Callable from munch import Munch @@ -13,28 +12,38 @@ import argparse import asyncio from typing import Union, Dict, Optional, Any, List, Tuple -import warnings -warnings.filterwarnings("ignore", category=DeprecationWarning) +import nest_asyncio +import random + +nest_asyncio.apply() + # AGI BEGINS class c: + whitelist = ['info', + 'schema', + 'server_name', + 'is_admin', + 'namespace', + 'whitelist', + 'blacklist', + 'fns'] # whitelist of helper functions to load + cost = 1 description = """This is a module""" - base_module = 'module' - encrypted_prefix = 'ENCRYPTED' - homepath = os.path.expanduser('~') + base_module = 'module' # the base module + encrypted_prefix = 'ENCRYPTED' # the prefix for encrypted values + giturl = git_url = 'https://github.com/commune-ai/commune.git' # tge gutg + homepath = home_path = os.path.expanduser('~') # the home path root_module_class = 'c' # WE REPLACE THIS THIS Module at the end, kindof odd, i know, ill fix it fam, chill out dawg, i didnt sleep with your girl default_port_range = [50050, 50150] # the port range between 50050 and 50150 - default_ip = '0.0.0.0' + default_ip = local_ip = loopback = '0.0.0.0' address = '0.0.0.0:8888' # the address of the server (default) root_path = root = os.path.dirname(os.path.dirname(__file__)) # the path to the root of the library - libpath = os.path.dirname(root_path) # the path to the library - datapath = os.path.join(libpath, 'data') # the path to the data folder - modules_path = os.path.join(root_path, 'modules') # the path to the modules folder + libpath = lib_path = os.path.dirname(root_path) # the path to the library + libname = lib_name = lib = root_path.split('/')[-1] # the name of the library + datapath = os.path.join(root_path, 'data') # the path to the data folder + modules_path = os.path.join(lib_path, 'modules') # the path to the modules folder repo_path = os.path.dirname(root_path) # the path to the repo - library_name = libname = lib = root_dir = root_path.split('/')[-1] # the name of the library - pwd = os.getenv('PWD') # the current working directory from the process starts console = Console() # the consolve - helper_whitelist = ['info', 'schema','server_name', 'is_admin'] # whitelist of helper functions to load - whitelist = [] # whitelist of functions to load blacklist = [] # blacklist of functions to not to access for outside use server_mode = 'http' # http, grpc, ws (websocket) default_network = 'local' # local, subnet @@ -42,7 +51,6 @@ class c: home = os.path.expanduser('~') # the home directory __ss58_format__ = 42 # the ss58 format for the substrate address - def __init__(self, config:Dict=None, **kwargs): self.set_config(config=config,kwargs=kwargs) @@ -50,6 +58,84 @@ def __init__(self, config:Dict=None, **kwargs): def init(cls, *args, **kwargs): return cls(*args, **kwargs) + default_tag = 'base' + @property + def tag(self): + tag = None + if not hasattr(self, 'config') or not isinstance(self.config, dict): + self.config = c.dict2munch({}) + if 'tag' in self.config: + tag = self.config['tag'] + return tag + + @tag.setter + def tag(self, value): + if not hasattr(self, 'config') or not isinstance(self.config, dict): + self.config = c.dict2munch({}) + self.config['tag'] = value + return value + @classmethod + def pwd(cls): + pwd = os.getenv('PWD') # the current wor king directory from the process starts + return pwd + + def set_config(self, + config:Optional[Union[str, dict]]=None, + kwargs:dict=None, + to_munch: bool = True, + add_attributes: bool = False, + save_config:bool = False) -> Munch: + ''' + Set the config as well as its local params + ''' + kwargs = kwargs if kwargs != None else {} + + # in case they passed in a locals() dict, we want to resolve the kwargs and avoid ambiguous args + kwargs = c.locals2kwargs(kwargs) + + if 'config' in kwargs: + config = kwargs.pop('config') + + # get the config + config = self.config(config=config,kwargs=kwargs, to_munch=to_munch) + + # add the config attributes to the class (via munch -> dict -> class ) + if add_attributes: + self.__dict__.update(self.munch2dict(config)) + + self.config = config + self.kwargs = kwargs + + if save_config: + self.save_config(config=config) + + return self.config + + + @property + def key(self): + if not hasattr(self, '_key'): + self._key = c.get_key(self.server_name, create_if_not_exists=True) + return self._key + + @key.setter + def key(self, key: 'Key'): + self._key = c.get_key(key, create_if_not_exists=True) + return self._key + + @classmethod + def call(cls, *args, **kwargs) -> None: + return c.module('client').call( *args, **kwargs) + + @classmethod + async def async_call(cls, *args,**kwargs): + return c.call(*args, **kwargs) + + + @classmethod + def call_search(cls,*args, **kwargs) -> None: + return c.m('client').call_search(*args, **kwargs) + def getattr(self, k:str)-> Any: return getattr(self, k) @@ -61,9 +147,10 @@ def getclassattr(cls, k:str)-> Any: def module_file(cls) -> str: # get the file of the module return inspect.getfile(cls) + @classmethod - def module_dirpath(self, simple:bool=False) -> str: - return os.path.dirname(self.module_file(simple=simple)) + def module_dirpath(self) -> str: + return os.path.dirname(self.module_file()) @classmethod def __module_dir__(cls) -> str : @@ -72,98 +159,78 @@ def __module_dir__(cls) -> str : @classmethod def get_module_path(cls, obj=None, simple:bool=False) -> str: - import inspect # odd case where the module is a module in streamlit obj = cls.resolve_module(obj) - try: - module_path = inspect.getfile(obj) - except Exception as e: - if 'source code not available' in str(e): - return cls.class_name() - else: - raise e - + module_path = inspect.getfile(obj) # convert into simple if simple: - return cls.path2simple(path=module_path) + module_path = cls.path2simple(module_path) return module_path + @classmethod - def get_module_dirpath(cls, obj=None, simple:bool=False) -> str: - return os.path.dirname(c.get_module_path(obj=obj, simple=simple)) - get_module_dir = get_module_dirpath - - @classmethod - def filepath(cls) -> str: + def filepath(cls, obj=None) -> str: ''' removes the PWD with respect to where module.py is located ''' - return cls.get_module_path(simple=False) + obj = cls.resolve_module(obj) + module_path = inspect.getfile(obj) + return module_path + + @classmethod + def gitbranch(cls) -> str: + return c.cmd('git branch').split('\n')[0].replace('* ', '') + + @classmethod + def gitpath(cls ,root='https://github.com/commune-ai/commune/tree/'): + branch = cls.gitbranch() + root = root + branch + '/' + filepath = cls.filepath().replace(c.repo_path + '/', '') + return root + filepath + + pythonpath = pypath = filepath @classmethod - def configpath(cls) -> str: + def config_path(cls) -> str: ''' removes the PWD with respect to where module.py is located ''' - return cls.get_module_config_path() - cfgpath = config_path = configpath + return cls.config_path() + cfgpath = config_path = config_path - @classmethod def dirpath(cls) -> str: ''' removes the PWD with respect to where module.py is located ''' return os.path.dirname(cls.filepath()) - + folderpath = dirname = dirpath @classmethod def dlogs(cls, *args, **kwargs): + ''' + logs of the docker contianer + ''' return c.module('docker').logs(*args, **kwargs) @classmethod def images(cls, *args, **kwargs): + """ + images + """ return c.module('docker').images(*args, **kwargs) - - - @classmethod - def __local_file__(cls) -> str: - ''' - removes the PWD with respect to where module.py is located - ''' - return cls.get_module_path(simple=False).replace(cls.repo_path+'/', '') - - @classmethod - def __simple_file__(cls) -> str: - ''' - The simple representation of a module path with respect to the module.py - home/commune/module.py would assume the module_path would be home/commune/ - - Using this we convert the full path of the module into a simple path for more - human readable strings. We do the following - - 1. Remove the MODULE_PATH and assume the module represents the directory - 2. replace the "/" with "." - - - Examples: - commune/dataset/text/dataset.py -> dataset.text - commune/model/transformer/dataset.py -> model.transformer - - ''' - file = cls.get_module_path(simple=True) - - return file - @classmethod def module_path(cls, simple:bool=True) -> str: # get the module path - path = cls.get_module_path(simple=simple) - path = path.replace('modules.', '') - return path + obj = cls.resolve_module(cls) + module_path = inspect.getfile(obj) + # convert into simple + if simple: + module_path = cls.path2simple(module_path) + return module_path path = name = module_name = module_path @@ -174,64 +241,53 @@ def module_class(cls) -> str: def class_name(cls, obj= None) -> str: obj = obj if obj != None else cls return obj.__name__ + classname = class_name + @classmethod def get_class_name(cls, obj = None) -> str: obj = obj if obj != None else cls if not cls.is_class(obj): obj = type(obj) - return obj.__name__ - - @property - def module_tag(self) -> str: - ''' - The tag of the module for many flavors of the module to avoid name conflicts - (TODO: Should we call this flavor?) - - ''' - if not hasattr(self, '_module_tag'): - self.__dict__['_module_tag'] = None - return self._module_tag - - - @module_tag.setter - def module_tag(self, value): - # set the module tag - self._module_tag = value - return self._module_tag @classmethod def minimal_config(cls) -> Dict: ''' The miminal config a module can be - ''' - minimal_config = { - 'module': cls.__name__ - } + minimal_config = {'module': cls.__name__} return minimal_config @classmethod - def get_module_config_path(cls) -> str: + def config_path(cls) -> str: return cls.get_module_path(simple=False).replace('.py', '.yaml') - - @classmethod - def dict2munch(cls, x:Dict) -> Munch: + + @classmethod + def dict2munch(cls, x:dict, recursive:bool=True)-> Munch: ''' - Converts a dict to a munch + Turn dictionary into Munch ''' - from commune.utils.dict import dict2munch - return dict2munch(x) - + if isinstance(x, dict): + for k,v in x.items(): + if isinstance(v, dict) and recursive: + x[k] = c.dict2munch(v) + x = Munch(x) + return x + @classmethod - def munch2dict(cls, x:'Munch') -> Dict: + def munch2dict(cls, x:Munch, recursive:bool=True)-> dict: ''' - Converts a munch to a dict + Turn munch object into dictionary ''' - from commune.utils.dict import munch2dict - return munch2dict(x) - + if isinstance(x, Munch): + x = dict(x) + for k,v in x.items(): + if isinstance(v, Munch) and recursive: + x[k] = c.munch2dict(v) + + return x + @classmethod def munch(cls, x:Dict) -> Munch: ''' @@ -240,16 +296,22 @@ def munch(cls, x:Dict) -> Munch: return cls.dict2munch(x) @classmethod - def load_yaml(cls, path:str=None, root:bool = False) -> Dict: + def load_yaml(cls, path:str=None, default={}, **kwargs) -> Dict: '''f Loads a yaml file ''' - path = cls.resolve_path(path, root=root) - - from commune.utils.dict import load_yaml - config = load_yaml(path) - return config + import yaml + path = cls.resolve_path(path) + + try: + with open(path, 'r') as file: + data = yaml.safe_load(file) + except: + data = default + return data + + get_yaml = load_yaml @classmethod @@ -265,16 +327,24 @@ def fn2code(cls, search=None, module=None)-> Dict[str, str]: c.print(f'Error: {e}', color='red') return fn_code_map - - @classmethod - def fn_code(cls,fn:str, detail:bool=False, ) -> str: + def fn_code(cls,fn:str, + detail:bool=False, + seperator: str = '/' + ) -> str: ''' Returns the code of a function ''' + if isinstance(fn, str): + if seperator in fn: + module_path, fn = fn.split(seperator) + module = c.module(module_path) + fn = getattr(module, fn) + else: + fn = getattr(cls, fn) - code_text = inspect.getsource(getattr(cls, fn)) + code_text = inspect.getsource(fn) text_lines = code_text.split('\n') if 'classmethod' in text_lines[0] or 'staticmethod' in text_lines[0] or '@' in text_lines[0]: text_lines.pop(0) @@ -290,19 +360,20 @@ def fn_code(cls,fn:str, detail:bool=False, ) -> str: } return fn_code - + + fncode = fn_code @classmethod def sandbox(cls): - - c.cmd(f'python3 {c.libpath}/sandbox.py') + c.cmd(f'python3 {c.libpath}/sandbox.py', verbose=True) + return sand = sandbox @classmethod - def save_yaml(cls, path:str, data: dict, root:bool = False) -> Dict: + def save_yaml(cls, path:str, data: dict) -> Dict: ''' Loads a yaml file ''' - path = cls.resolve_path(path, root=root) + path = cls.resolve_path(path) from commune.utils.dict import save_yaml if isinstance(data, Munch): @@ -312,35 +383,14 @@ def save_yaml(cls, path:str, data: dict, root:bool = False) -> Dict: put_yaml = save_yaml - def merge_config(self, config:Dict, overrite_keys:bool = False) -> Dict: - ''' - Merges the config with the current config - ''' - if hasattr(config, 'to_dict'): - config = config.to_dict() - - elif isinstance(config, Munch): - config = self.munch2dict(config) - - # merge the model config with the config - - default_config = self.munch2dict(self.config) - for k,v in config.items(): - if not overrite_keys: - assert k not in default_config, f'config key {k} not found in config' - default_config[k] = config[k] - self.config = self.munch(default_config) - return self.config - @classmethod def config_path(cls) -> str: path = cls.module_file().replace('.py', '.yaml') return path - @classmethod - def load_config(cls, path:str=None, to_munch:bool = False, root:bool = False) -> Union[Munch, Dict]: + def load_config(cls, path:str=None, to_munch:bool = False) -> Union[Munch, Dict]: ''' Args: path: The path to the config file @@ -350,7 +400,7 @@ def load_config(cls, path:str=None, to_munch:bool = False, root:bool = False) -> if path == None: path = cls.config_path() else: - module_tree = cls.module_tree() + module_tree = c.tree() path = module_tree[path].replace('.py', '.yaml') config = cls.load_yaml(path) @@ -368,35 +418,23 @@ def load_config(cls, path:str=None, to_munch:bool = False, root:bool = False) -> default_config = load_config + encrypted_prefix = 'ENCRYPTED' @classmethod - def encrypt_path(cls, path:str, key=None, prefix='ENCRYPTED') -> str: - ''' - Encrypts the path - ''' - path = cls.resolve_path(path) - text = c.get_text(path) - encrypted_text = prefix + c.encrypt(text, key=key) - c.put_text(path, encrypted_text) - - return {'success': True, 'msg': f'encrypted {path}'} + def encrypt_file(cls, path:str, password=None, key=None,) -> str: + key = c.get_key(key) + text = cls.get_text(path) + r = key.encrypt(text, password=password) + return cls.put_text(path, r) - @classmethod - def decrypt_path(cls, path:str, key=None, prefix='ENCRYPTED') -> str: - ''' - Encrypts the path - ''' - path = cls.resolve_path(path) - text = c.get_text(path) - assert text.startswith(prefix), f'path {path} is not encrypted' - text = text[len(prefix):] - encrypted_text = c.decreypt(text, key=key) - c.put_text(path, encrypted_text) + def decrypt_file(cls, path:str, key=None, password=None, **kwargs) -> str: + key = c.get_key(key) + text = cls.get_text(path) + r = key.decrypt(text, password=password,key=key, **kwargs) + return cls.put_text(path, r) - return {'success': True, 'msg': f'encrypted {path}'} - - def is_encrypted_path(self, path:str, prefix='ENCRYPTED') -> bool: + def is_encrypted_path(self, path:str, prefix=encrypted_prefix) -> bool: ''' Encrypts the path ''' @@ -404,32 +442,37 @@ def is_encrypted_path(self, path:str, prefix='ENCRYPTED') -> bool: text = c.get_text(path) return text.startswith(prefix) - - @classmethod def put(cls, - k, - v, + k: str, + v: Any, mode: bool = 'json', - key : str = None, - encrypt: bool = False, - ): + encrypt: bool = False, + verbose: bool = False, + password: str = None, **kwargs) -> Any: ''' Puts a value in the config ''' + encrypt = encrypt or password != None - if encrypt: - data = c.encrypt(v, key=key, return_dict=True) + if encrypt or password != None: + v = c.encrypt(v, password=password) + + if not c.jsonable(v): + v = c.serialize(v) data = {'data': v, 'encrypted': encrypt, 'timestamp': c.timestamp()} + # default json getattr(cls,f'put_{mode}')(k, data) + + if verbose: + c.print(f'put {k} = {v}') + + data_size = c.sizeof(v) - return data - + return {'k': k, 'data_size': data_size, 'encrypted': encrypt, 'timestamp': c.timestamp()} - - @classmethod def get(cls, k:str, @@ -438,6 +481,9 @@ def get(cls, max_age:str = None, cache :bool = False, full :bool = False, + key: 'Key' = None, + update :bool = False, + password : str = None, **kwargs) -> Any: ''' @@ -448,23 +494,25 @@ def get(cls, if cache: if k in cls.cache: return cls.cache[k] - - verbose = kwargs.get('verbose', False) data = getattr(cls, f'get_{mode}')(k,default=default, **kwargs) - if data == None: - data = default - encrypted = c.is_encrypted(data) - if encrypted: - data = cls.decrypt(data, key=key) + + + if password != None: + assert data['encrypted'] , f'{k} is not encrypted' + data['data'] = c.decrypt(data['data'], password=password, key=key) + + data = data or default + if isinstance(data, dict): + if update: + max_age = 0 if max_age != None: timestamp = data.get('timestamp', None) if timestamp != None: - age = c.get_age(timestamp) - if age > max_age: - if verbose: - c.print(f'{key} is too old, age: {int(age)} > {max_age}', color='red') + age = int(c.time() - timestamp) + if age > max_age: # if the age is greater than the max age + c.print(f'{k} is too old ({age} > {max_age})', color='red') return default else: data = default @@ -479,18 +527,7 @@ def get(cls, cls.cache[k] = data return data - - - @staticmethod - def get_age(timestamp:int=0): - return c.time() - timestamp - - @staticmethod - def too_old(self, timestamp:int, max_age:int): - return self.get_age(timestamp) > max_age - - @classmethod def putc(cls, k, v, password=None) -> Munch: ''' @@ -504,8 +541,7 @@ def putc(cls, k, v, password=None) -> Munch: cls.save_config(config=config) return {'success': True, 'msg': f'config({k} = {v})'} - - + setc = putc @classmethod def rmc(cls, k, password=None) -> Munch: ''' @@ -516,14 +552,8 @@ def rmc(cls, k, password=None) -> Munch: cls.save_config(config=config) delc = rmc - setc = putc - - + - @classmethod - def frontend(cls): - return c.compose('frontend') - @classmethod def popc(cls, key:str): config = cls.config() @@ -535,18 +565,15 @@ def hasc(cls, key:str): config = cls.config() return key in config - @classmethod - def keysc(cls): - config = cls.config() - return list(config.keys()) - @classmethod def getc(cls, key, default= None, password=None) -> Any: ''' Saves the config to a yaml file ''' + + config = cls.config() + data = cls.dict_get(config, key, default) - data = cls.dict_get(cls.config(), key, default) if c.is_encrypted(data): if password == None: return data @@ -562,7 +589,7 @@ def save_config(cls, config:Union[Munch, Dict]= None, path:str=None) -> Munch: Saves the config to a yaml file ''' if config == None: - config = cls.get_config() + config = cls.config() path = path if path else cls.config_path() @@ -573,6 +600,8 @@ def save_config(cls, config:Union[Munch, Dict]= None, path:str=None) -> Munch: else: raise ValueError(f'config must be a dict or munch, not {type(config)}') + assert isinstance(config, dict), f'config must be a dict, not {config}' + config = cls.save_yaml(data=config , path=path) return config @@ -584,111 +613,85 @@ def config_exists(self, path:str=None) -> bool: ''' path = path if path else self.config_path() return self.path_exists(path) + @classmethod - def get_config(cls, + def config(cls, config:dict = None, kwargs:dict=None, - module = None, to_munch:bool = True) -> Munch: ''' Set the config as well as its local params ''' - if not cls.has_config(): - config = {} + # THIS LOADS A YAML IF IT EXIST, OR IT USES THE INIT KWARGS IF THERE IS NO YAML + if cls.has_config(): + default_config = cls.load_config(to_munch=False) + else: + default_config = cls.init_kwargs() + + if config == None: + config = default_config + elif isinstance(config, str): + config = cls.load_config(path=config) + assert isinstance(config, dict), f'config must be a dict, not {type(config)}' + + if isinstance(config, dict): + config = {**default_config, **config} else: - if config == None: - config = cls.load_config() - elif isinstance(config, str): + raise ValueError(f'config must be a dict, str or None, not {type(config)}') - config = cls.load_config(path=config) - assert isinstance(config, dict), f'config must be a dict, not {type(config)}' - elif isinstance(config, dict): - default_config = cls.load_config() - config = {**default_config, **config} - else: - raise ValueError(f'config must be a dict, str or None, not {type(config)}') - - assert isinstance(config, dict), f'config must be a dict, not {config}' - # SET THE CONFIG FROM THE KWARGS, FOR NESTED FIELDS USE THE DOT NOTATION, # for example model.name=bert is the same as config[model][name]=bert + # merge kwargs with itself (CAUTION THIS DOES NOT WORK IF KWARGS WAS MEANT TO BE A VARIABLE LOL) - kwargs = kwargs if kwargs != None else {} - kwargs.update(kwargs.pop('kwargs', {})) - for k,v in kwargs.items(): - cls.dict_put(config,k,v ) - + config = c.locals2kwargs(config) + + if kwargs != None: + kwargs = c.locals2kwargs(kwargs) + for k,v in kwargs.items(): + cls.dict_put(config,k,v ) # add the config after in case the config has a config attribute lol if to_munch: config = cls.dict2munch(config) - + + return config + + cfg = get_config = config - config = get_config + @classmethod + def flatten_dict(cls, x = {'a': {'b': 1, 'c': {'d': 2, 'e': 3}, 'f': 4}}): + from commune.utils.dict import deep2flat + return deep2flat(x) @classmethod - def cfg(cls, *args, **kwargs): - return cls.get_config(*args, **kwargs) - - - def set_config(self, - config:Optional[Union[str, dict]]=None, - kwargs:dict=None, - to_munch: bool = True, - add_attributes: bool = False, - save_config:bool = False) -> Munch: - ''' - Set the config as well as its local params - ''' - kwargs = kwargs if kwargs != None else {} - - # in case they passed in a locals() dict, we want to resolve the kwargs and avoid ambiguous args - kwargs = c.locals2kwargs(kwargs) - - if 'config' in kwargs: - config = kwargs.pop('config') - - # get the config - config = self.get_config(config=config,kwargs=kwargs, to_munch=to_munch) - - - # add the config attributes to the class (via munch -> dict -> class ) - if add_attributes: - self.__dict__.update(self.munch2dict(config)) - self.config = config - self.kwargs = kwargs - - if save_config: - self.save_config(config=config) - - - return self.config - - @classmethod - def flatten_dict(cls, x = {'a': {'b': 1, 'c': {'d': 2, 'e': 3}, 'f': 4}}): - from commune.utils.dict import deep2flat - return deep2flat(x) - - @classmethod - def start_node(cls, *args, **kwargs): - return c.module('subspace').start_node(*args, **kwargs) + def start_node(cls, *args, **kwargs): + return c.module('subspace.chain').start_node(*args, **kwargs) + + @classmethod + def chains(cls, *args, **kwargs): + return c.module('subspace.chain')().chains(*args, **kwargs) + networks = chains @classmethod def start_telemetry(cls, *args, **kwargs): - return c.module('subspace').start_telemetry(*args, **kwargs) + return c.module('subspace.chain').start_telemetry(*args, **kwargs) @classmethod def start_local_node(cls, *args, **kwargs): - return c.module('subspace').start_local_node(*args, **kwargs) + return c.module('subspace.chain').start_local_node(*args, **kwargs) @classmethod + def start_public_nodes(cls, *args, **kwargs): + return c.module('subspace.chain').start_public_nodes(*args, **kwargs) + + @classmethod def start_chain(cls, *args, **kwargs): - c.module('subspace').start_chain(*args, **kwargs) - return {'success': True, 'msg': 'started chain'} + return c.module('subspace.chain').start_chain(*args, **kwargs) + @classmethod def kill_chain(cls, *args, **kwargs): - c.module('subspace').kill_chain(*args, **kwargs) + c.module('subspace.chain').kill_chain(*args, **kwargs) return {'success': True, 'msg': 'killed chain'} def seconds_per_epoch(self, *args, **kwargs): return c.module('subspace')().seconds_per_epoch(*args, **kwargs) @@ -707,7 +710,7 @@ def getmem(self, *args, **kwargs): @classmethod def mv_key(cls, *args, **kwargs): return c.module('key').mv_key(*args, **kwargs) - + @classmethod def mems(cls, *args, **kwargs): return c.module('key').mems(*args, **kwargs) @@ -716,7 +719,18 @@ def mems(cls, *args, **kwargs): @classmethod def switch_key(cls, *args, **kwargs): return c.module('key').switch_key(*args, **kwargs) + + @classmethod + def module_info(cls, *args, **kwargs): + return c.module('subspace')().module_info(*args, **kwargs) + + minfo = module_info + @classmethod + def pwd2key(cls, *args, **kwargs): + return c.module('key').pwd2key(*args, **kwargs) + + password2key = pwd2key # KEY LAND @classmethod def rename_key(cls, *args, **kwargs): @@ -773,29 +787,19 @@ def add_args( cls, config: dict , prefix: str = None , parser: argparse.Argument def gradio(self, *args, **kwargs): return c.module('gradio')(*args, **kwargs) - @classmethod - def st(cls, module = None, fn='dashboard', port=8501): - module = c.module(module) - module_filepath = module.filepath() - c.print(f'Running {module_filepath}', color='green') - # add port to the command - port = c.get_port(port) - cmd = f'streamlit run {module_filepath}' - if port != None: - cmd += f' --server.port {port}' - cmd+= f' -- --fn {fn}' - c.cmd(cmd, verbose=True) - @staticmethod - def stside(fn): - import streamlit as st - - def wrapper(*args, **kwargs): - with st.sidebar: - return fn(*args, **kwargs) - - return wrapper - + @classmethod + def start_app(cls, + module:str = 'module', + fn='app', + port=8501, + public:bool = False, + remote:bool = False): + kwargs = c.locals2kwargs(locals()) + return c.module('app')().start(**kwargs) + + app = start_app + @staticmethod def st_load_css(*args, **kwargs): c.module('streamlit').load_css(*args, **kwargs) @@ -803,111 +807,53 @@ def st_load_css(*args, **kwargs): @classmethod def rcmd(cls, *args, **kwargs): return c.module('remote').cmd(*args, **kwargs) - - @classmethod - def cmd(cls, - command:Union[str, list], - verbose:bool = True, - env:Dict[str, str] = {}, - output_text:bool = False, - sudo:bool = False, - password: bool = None, - color: str = 'white', - bash : bool = False, - **kwargs) -> 'subprocess.Popen': - ''' - Runs a command in the shell. - - ''' - if output_text : - verbose = False - if isinstance(command, list): - kwargs = c.locals2kwargs(locals()) - for idx,cmd in enumerate(command): - assert isinstance(cmd, str), f'command must be a string, not {type(cmd)}' - kwargs['command'] = cmd - response = c.cmd(**kwargs) - return response - - import subprocess - import shlex - import time - import signal - - def kill_process(process): - import signal - process.stdout.close() - process.send_signal(signal.SIGINT) - process.wait() - # sys.exit(0) - - if password != None: - sudo = True - - if sudo: - command = f'sudo {command}' - - - if bash: - command = f'bash -c "{command}"' - process = subprocess.Popen(shlex.split(command), - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - env={**os.environ, **env}, **kwargs) + def rpwd(cls, *args, **kwargs): + return c.module('remote')().pwd(*args, **kwargs) + pw = rpwd + @classmethod + def cmd(cls, *args,**kwargs): + return c.module('os').cmd( *args, **kwargs) + run_command = shell = cmd - - new_line = b'' - stdout_text = '' - line_count_idx = 0 - line_delay_period = 0 - last_time_line_printed = time.time() - try: - - for ch in iter(lambda: process.stdout.read(1), b""): - - - if ch == b'\n': - stdout_text += (new_line + ch).decode() - line_count_idx += 1 - if verbose: - c.print(new_line.decode(), color='cyan') - new_line = b'' - continue - - new_line += ch - - - except KeyboardInterrupt: - kill_process(process) - - return stdout_text + @classmethod + def sys_path(cls, *args, **kwargs): + return c.module('os').sys_path(*args, **kwargs) - run_command = shell = cmd @classmethod def import_module(cls, import_path:str) -> 'Object': from importlib import import_module - - return import_module(import_path) - + try: + return import_module(import_path) + except Exception as e: + import sys + sys.path.append(c.pwd()) + return import_module(import_path) + + def can_import_module(self, module:str) -> bool: + ''' + Returns true if the module is valid + ''' + try: + c.import_module(module) + return True + except: + return False @classmethod def import_object(cls, key:str, verbose: bool = False)-> Any: - ''' - Import an object from a string with the format of {module_path}.{object} Examples: import_object("torch.nn"): imports nn from torch - ''' - from importlib import import_module module = '.'.join(key.split('.')[:-1]) object_name = key.split('.')[-1] if verbose: c.print(f'Importing {object_name} from {module}') - obj = getattr(import_module(module), object_name) + + obj = getattr(c.import_module(module), object_name) return obj imp = get_object = importobj = import_object @@ -915,27 +861,52 @@ def import_object(cls, key:str, verbose: bool = False)-> Any: @classmethod - def module_exists(cls, module:str) -> bool: + def module_exists(cls, module:str, **kwargs) -> bool: ''' Returns true if the module exists ''' - return module in c.modules() + return module in c.modules(**kwargs) @classmethod - def modules(cls, search=None)-> List[str]: - ''' - List of module paths with respect to module.py file - - Assumes the module root directory is the directory containing module.py - ''' - module_list = list(cls.module_tree().keys()) - if search != None: - module_list = [m for m in module_list if search in m] - + def modules(cls, search=None, mode='local', tree='commune', **kwargs)-> List[str]: + if any([str(k) in ['subspace', 's'] for k in [mode, search]]): + module_list = c.module('subspace')().modules(search=search, **kwargs) + else: + module_list = list(c.tree(search=search, tree=tree, **kwargs).keys()) + if search != None: + module_list = [m for m in module_list if search in m] return module_list + + def mean(self, x:list=[0,1,2,3,4,5,6,7,8,9,10]): + if not isinstance(x, list): + x = list(x) + return sum(x) / len(x) + + def median(self, x:list=[0,1,2,3,4,5,6,7,8,9,10]): + if not isinstance(x, list): + x = list(x) + x = sorted(x) + n = len(x) + if n % 2 == 0: + return (x[n//2] + x[n//2 - 1]) / 2 + else: + return x[n//2] + + def stdev(self, x:list= [0,1,2,3,4,5,6,7,8,9,10], p=2): + if not isinstance(x, list): + x = list(x) + mean = c.mean(x) + return (sum([(i - mean)**p for i in x]) / len(x))**(1/p) + + # def test_stats(self, x:list):c + # mean = self.mean(x) + # stdev = self.stdev(x) + # return {'mean': mean, 'stdev': stdev} + + @classmethod def port_used(cls, port: int, ip: str = '0.0.0.0', timeout: int = 1): import socket @@ -946,6 +917,7 @@ def port_used(cls, port: int, ip: str = '0.0.0.0', timeout: int = 1): # Try to connect to the specified IP and port try: + port=int(port) sock.connect((ip, port)) return True except socket.error: @@ -991,35 +963,82 @@ async def check_port(port, ip): get_used_ports = used_ports - + @classmethod - def resolve_path(cls, path:str, extension:Optional[str]= None, root:bool = False): + def makedirs(cls, *args, **kwargs): + return os.makedirs(*args, **kwargs) + + @classmethod + def resolve_path(cls, path:str = None, extension:Optional[str]= None, file_type:str = 'json'): ''' - Resolves path for saving items that relate to the module + ### Documentation for `resolve_path` class method + + #### Purpose: + The `resolve_path` method is a class method designed to process and resolve file and directory paths based on various inputs and conditions. This method is useful for preparing file paths for operations such as reading, writing, and manipulation. + + #### Parameters: + - `path` (str, optional): The initial path to be resolved. If not provided, a temporary directory path will be returned. + - `extension` (Optional[str], optional): The file extension to append to the path if necessary. Defaults to None. + - `root` (bool, optional): A flag to determine whether the path should be resolved in relation to the root directory. Defaults to False. + - `file_type` (str, optional): The default file type/extension to append if the `path` does not exist but appending the file type results in a valid path. Defaults to 'json'. - The path is determined by the module path + #### Behavior: + - If `path` is not provided, the method returns a path to a temporary directory. + - If `path` starts with '/', it is returned as is. + - If `path` starts with '~/', it is expanded to the user’s home directory. + - If `path` starts with './', it is resolved to an absolute path. + - If `path` does not fall under the above conditions, it is treated as a relative path. If `root` is True, it is resolved relative to the root temp directory; otherwise, relative to the class's temp directory. + - If `path` is a relative path and does not contain the temp directory, the method joins `path` with the appropriate temp directory. + - If `path` does not exist as a directory and an `extension` is provided, the extension is appended to `path`. + - If `path` does not exist but appending the `file_type` results in an existing path, the `file_type` is appended. + - The parent directory of `path` is created if it does not exist, avoiding any errors when the path is accessed later. + #### Returns: + - `str`: The resolved and potentially created path, ensuring it is ready for further file operations. + + #### Example Usage: + ```python + # Resolve a path in relation to the class's temporary directory + file_path = MyClassName.resolve_path('data/subfolder/file', extension='txt') + + # Resolve a path in relation to the root temporary directory + root_file_path = MyClassName.resolve_path('configs/settings' + ``` + + #### Notes: + - This method relies on the `os` module to perform path manipulations and checks. + - This method is versatile and can handle various input path formats, simplifying file path resolution in the class's context. ''' + if path == None: - path = cls.tmp_dir() + return cls.storage_dir() + + if path.startswith('/'): path = path elif path.startswith('~/'): path = os.path.expanduser(path) - elif path.startswith('./'): + elif path.startswith('.'): path = os.path.abspath(path) else: # if it is a relative path, then it is relative to the module path # ex: 'data' -> '.commune/path_module/data' - tmp_dir = c.tmp_dir() if root else cls.tmp_dir() + storage_dir = cls.storage_dir() - if tmp_dir not in path: - path = os.path.join(tmp_dir, path) + if storage_dir not in path: + path = os.path.join(storage_dir, path) if not os.path.isdir(path): if extension != None and extension != path.split('.')[-1]: path = path + '.' + extension + + if not os.path.exists(path) and os.path.exists(path + f'.{file_type}'): + path = path + f'.{file_type}' + dirpath = os.path.dirname(path) + if not os.path.exists(dirpath): + os.makedirs(dirpath, exist_ok=True) + return path @classmethod @@ -1066,12 +1085,12 @@ def resolve_port(cls, port:int=None, **kwargs): Resolves the port and finds one that is available ''' if port == None or port == 0: - port = cls.free_port(port, **kwargs) + port = c.free_port(port, **kwargs) - if cls.port_used(port): - port = cls.free_port(port, **kwargs) + if c.port_used(port): + port = c.free_port(port, **kwargs) - return port + return int(port) @classmethod def has_free_ports(self, n:int = 1, **kwargs): @@ -1100,16 +1119,14 @@ def random_port(cls, *args, **kwargs): return cls.choice(cls.free_ports(*args, **kwargs)) @staticmethod - def random_int(*args): - import random - if len(args) == 1: - return random.randint(0, args[0]) - elif len(args) == 2: - return random.randint(args[0], args[1]) - else: - raise ValueError('Invalid number of arguments') - - + def random_int(start_value=100, end_value=None): + if end_value == None: + end_value = start_value + start_value, end_value = 0 , start_value + + assert start_value != None, 'start_value must be provided' + assert end_value != None, 'end_value must be provided' + return random.randint(start_value, end_value) @classmethod def ports(cls, ip='0.0.0.0') -> List[int]: @@ -1138,7 +1155,6 @@ def free_port(cls, port_range: List[int] = None , ip:str =None, avoid_ports = None, - reserve:bool = False, random_selection:bool = True) -> int: ''' @@ -1151,26 +1167,16 @@ def free_port(cls, port_range = cls.resolve_port_range(port_range) ports = list(range(*port_range)) - - ip = ip if ip else c.default_ip if random_selection: ports = c.shuffle(ports) - - reserved_ports = cls.reserved_ports() - # return only when the port is available - port = None for port in ports: - if port in reserved_ports: - continue if port in avoid_ports: continue if cls.port_available(port=port, ip=ip): - if reserve: - cls.reserve_port(port) return port @@ -1189,42 +1195,35 @@ def kwargs2attributes(self, kwargs:dict, ignore_error:bool = False): assert not hasattr(self, k) setattr(self, k) - @classmethod - def kill_port(cls, port:int, mode='bash')-> str: - - port2module = cls.port2module() - if port in port2module: - - cls.kill(port2module[port]) + def kill_port_range(self, start_port = None, end_port = None, timeout=5, n=0): + if start_port != None and end_port != None: + port_range = [start_port, end_port] + else: + port_range = c.port_range() - if mode == 'python': - import signal - from psutil import process_iter - ''' - Kills the port {port} on the localhost - ''' - for proc in process_iter(): - for conns in proc.connections(kind='inet'): - if conns.laddr.port == port: - proc.send_signal(signal.SIGKILL) # or SIGKILL - return port - elif mode == 'bash': - return c.run_command(f'kill -9 $(lsof -ti:{port})', bash=True, verbose=True) - - @classmethod - def restart_servers(cls, module:str=None, mode:str = 'server'): - ''' - Kill the server by the name - ''' - - fn = getattr(cls, f'{mode}_restart') - for module in c.servers(module,network='local'): + if n > 0: + port_range = [start_port, start_port + n] + assert isinstance(port_range[0], int), 'port_range must be a list of ints' + assert isinstance(port_range[1], int), 'port_range must be a list of ints' + assert port_range[0] < port_range[1], 'port_range must be a list of ints' + futures = [] + for port in range(*port_range): + c.print(f'Killing port {port}', color='red') try: - c.print(f'Restarting {module}', color='red') - fn(module) + self.kill_port(port) except Exception as e: c.print(f'Error: {e}', color='red') - continue + + + def check_used_ports(self, start_port = 8501, end_port = 8600, timeout=5): + port_range = [start_port, end_port] + used_ports = {} + for port in range(*port_range): + used_ports[port] = self.port_used(port) + return used_ports + @classmethod + def kill_port(cls, port:int, mode='bash')-> str: + return c.module('os').kill_port(port=port, mode=mode) @classmethod def pm2_restart_all(cls): @@ -1250,128 +1249,91 @@ def kill_all_servers( *args, **kwargs): @classmethod - def kill_all(cls,*args,**kwargs): - for module in c.servers(*args, **kwargs): - c.print(c.kill(module)) - - - - - - @classmethod - def restart_all_servers(cls, verbose: bool = True): - ''' - Kill all of the servers - ''' - for module in cls.servers(): - if verbose: - c.print(f'Restarting {module}', color='red') - cls.server_restart(module) - @classmethod - def restart_all(cls): - cls.restart_all_servers() - - @classmethod - def path_config_exists(cls, path:str) -> bool: - ''' - Checks if the path exists - ''' - for ext in ['.yaml', '.yml']: - if os.path.exists(path.replace('.py', ext)): - return True - return False - @classmethod - def path2simple(cls, path:str, compress:bool = True,) -> str: - - # does the config exist + def kill_all(cls, network='local', timeout=20, verbose=True): + futures = [] + namespace = c.namespace(network=network, update=True) + progress = c.tqdm(len(namespace)) - simple_path = path.split(deepcopy(cls.root_dir))[-1] - if cls.path_config_exists(path): - simple_path = os.path.dirname(simple_path) + for s in c.servers(network=network): + c.print(f'Killing {s}', color='red') + futures += [c.submit(c.kill, kwargs={'module':s, 'update': False}, return_future=True)] - simple_path = simple_path.replace('.py', '') + results_list = [] + for f in c.as_completed(futures, timeout=timeout): + result = f.result() + c.print(result, verbose=verbose) + results_list += [result] - - simple_path = simple_path.replace('/', '.')[1:] + namespace = c.namespace(network=network, update=True) + progress.update(1) - # compress nae - chunks = simple_path.split('.') - new_chunks = [] - for i, chunk in enumerate(chunks): - if len(new_chunks)>0: - if new_chunks[-1] == chunks[i]: - continue - elif any([chunks[i].endswith(s) for s in ['_module', 'module']]): - continue - new_chunks.append(chunk) - simple_path = '.'.join(new_chunks) - - # remove the modules prefix - if simple_path.startswith('modules.'): - simple_path = simple_path.replace('modules.', '') - # remove any files to compress the name even further for - if len(simple_path.split('.')) > 2: - - if simple_path.split('.')[-1].endswith(simple_path.split('.')[-2]): - simple_path = '.'.join(simple_path.split('.')[:-1]) - return simple_path - + return {'namespace': namespace} + @classmethod - def path2localpath(cls, path:str) -> str: - local_path = path.replace(cls.repo_path, cls.root_dir) - return local_path + def path2simple(cls, *args, **kwargs ) -> str: + return c.module('tree').path2simple(*args, **kwargs) @classmethod - def path2config(cls, path:str, to_munch=False)-> dict: - path = cls.path2configpath(path=path) - return cls.load_config(path, to_munch=to_munch) + def path2objectpath(cls, path:str = None, tree=None) -> str: + return c.module('tree').path2objectpath(path=path, tree=tree) @classmethod - def path2configpath(cls, path:str): - return path.replace('.py', '.yaml') - @classmethod - def simple2configpath(cls, path:str): - return cls.path2configpath(cls.simple2path(path)) - @classmethod - def simple2config(cls, path:str, to_munch=False)-> dict: - return cls.load_config(cls.simple2configpath(path), to_munch=to_munch) + def tree_paths(cls, *args, **kwargs) -> List[str]: + return c.module('tree').tree_paths(*args, **kwargs) @classmethod - def import_path(cls): - return cls.path2objectpath(cls.module_file()) + def tree_names(cls): + return c.module('tree').tree_names() + def file2classes(self, path:str = None, search:str = None, start_lines:int=2000): + return self.find_python_classes(path=path, search=search, start_lines=start_lines) + @classmethod - def object_path(cls): - return cls.path2objectpath(cls.module_path(simple=False)) + def find_classes(cls, path): + code = c.get_text(path) + classes = [] + for line in code.split('\n'): + if all([s in line for s in ['class ', '(', '):']]): + classes.append(line.split('class ')[-1].split('(')[0].strip()) + return [c for c in classes] + @classmethod - def object_module_path(cls): - return '.'.join(cls.object_path().split('.')[:-1]) - + def find_functions(cls, path): + code = c.get_text(path) + functions = [] + for line in code.split('\n'): + if line.startswith('def '): + if all([s in line for s in ['def ', '(', '):']]): + functions.append(line.split('def ')[-1].split('(')[0].strip()) + return functions @classmethod - def __object_name__(cls): - return '.'.join(cls.object_path().split('.')[:-1]) - - - @classmethod - def find_python_class(cls, path:str , class_index:int=0, search:str = None, start_lines:int=2000): + def find_python_classes(cls, path:str , class_index:int=0, search:str = None, start_lines:int=2000): import re - + path = cls.resolve_path(path) + if os.path.isdir(path): + file2classes = {} + for f in c.glob(path): + if f.endswith('.py'): + try: + file2classes[f] = cls.find_python_classes(f, class_index=class_index, search=search, start_lines=start_lines) + except Exception as e: + c.print(f'Error: {e}', color='red') + return file2classes # read the contents of the Python script file python_script = cls.readlines(path, end_line = start_lines, resolve=False) class_names = [] lines = python_script.split('\n') + + # c.print(python_script) for line in lines: - key_elements = ['class ', '(', '):'] - self_ref_condition = 'key_elements' not in line - has_class_bool = all([key_element in line for key_element in key_elements]) if has_class_bool: @@ -1387,206 +1349,268 @@ def find_python_class(cls, path:str , class_index:int=0, search:str = None, star # return the class names return class_names - - @classmethod - def path2objectpath(cls, path:str) -> str: - if path.endswith('module/module.py'): - return 'commune.Module' - - object_name = cls.find_python_class(path) - if len(object_name) == 0: - return None - object_name = object_name[-1] - path = path.replace(cls.repo_path+'/', '').replace('.py','.').replace('/', '.') - path = path + object_name - return path + def url2text(cls, *args, **kwargs): + return c.module('web').url2text(*args, **kwargs).text + module_cache = {} @classmethod - def path2object(cls, path:str) -> str: - path = cls.path2objectpath(path) - return c.import_object(path) + def get_module(cls, + path:str = 'module', + cache=True, + trials = 3, + tree = 'commune', + verbose = 0, + ) -> str: + """ + params: + path: the path to the module + cache: whether to cache the module + tree: the tree to search for the module + """ + path = path or 'module' + module = None + cache_key = f'{tree}_{path}' + t0 = c.time() + if cache and cache_key in c.module_cache: + module = c.module_cache[cache_key] + if module != None: + return module - @classmethod - def get_module(cls, path:str) -> str: - path = cls.simple2path(path) - path = cls.path2objectpath(path) - return c.import_object(path) + try: + module = c.simple2object(path) + except Exception as e: + raise e + if trials == 0: + raise Exception(f'Could not find {path} in {c.modules(path)} modules') + c.print(f'Could not find {path} in {c.modules(path)} modules, so we are updating the tree', color='red') + module = c.get_module(path, cache=cache , verbose=verbose, trials=trials-1) + if cache: + c.module_cache[cache_key] = module + + if verbose: + c.print(f'Loaded {path} in {c.time() - t0} seconds', color='green') + return module @classmethod - def module_tree(cls, search=None, - mode='path', - cache:bool = True, - update:bool = False, - verbose:bool = False) -> List[str]: - - if update and verbose: - c.print('Building module tree', verbose=verbose) - assert mode in ['path', 'object'] - module_tree = {} - if mode == 'path': - module_tree = {cls.path2simple(f):f for f in cls.get_module_python_paths()} - - elif mode == 'object': - module_tree = {cls.path2simple(f):cls.path2objectpath(f) for f in cls.get_module_python_paths()} - module_tree = {k:v for k,v in module_tree.items() if search is None or search in k} - - # to use functions like c. we need to replace it with module lol - if cls.root_module_class in module_tree: - module_tree[cls.module_path()] = module_tree.pop(cls.root_module_class) - if cache or update: - c.put('module_tree', module_tree) - return module_tree - - available_modules = tree = module_tree - @classmethod - def list_modules(cls, search=None): - modules = list(cls.module_tree(search).keys()) - return modules + def is_dir_module(cls, path:str) -> bool: + """ + determine if the path is a module + """ + filepath = cls.simple2path(path) + if path.replace('.', '/') + '/' in filepath: + return True + if ('modules/' + path.replace('.', '/')) in filepath: + return True + return False + @classmethod + def timefn(cls, fn, *args, trials=1, **kwargs): + if trials > 1: + responses = [] + for i in range(trials): + responses += [cls.timefn(fn, *args, trials=1, **kwargs)] + return responses + if isinstance(fn, str): + if '/' in fn: + module, fn = fn.split('/') + module = c.module(module) + else: + module = cls + if module.classify_fn(fn) == 'self': + module = cls() + fn = getattr(module, fn) + + t1 = c.time() + result = fn(*args, **kwargs) + t2 = c.time() + + return {'time': t2 - t1} - @classmethod - def get_tags(cls, module, *args, **kwargs): - servers = c.servers(module, *args, **kwargs) - return [s.split('::')[-1] if len(s.split('::'))>1 else None for s in servers] + def search_dict(self, d:dict = 'k,d', search:str = {'k.d': 1}) -> dict: + search = search.split(',') + new_d = {} + + for k,v in d.items(): + if search in k.lower(): + new_d[k] = v + + return new_d + + @classmethod - def has_config(cls) -> bool: - config_path = cls.configpath() - return c.exists(config_path) + def tree(cls, *args, **kwargs) -> List[str]: + return c.module('tree').tree(*args, **kwargs) - - - @classmethod - def has_module(cls, module): - return module in c.modules() - + def tree2path(cls, *args, **kwargs) -> List[str]: + return c.module('tree').tree2path( *args, **kwargs) + @classmethod - def valid_module(cls,module,**kwargs ): - modules = c.servers(module, **kwargs) - return bool(len(modules) > 0) + def trees(cls): + return c.m('tree').trees() @classmethod - def tasks(cls, task = None, mode='pm2',**kwargs) -> List[str]: - kwargs['network'] = 'local' - kwargs['update'] = False - modules = c.servers( **kwargs) - tasks = getattr(cls, f'{mode}_list')(task) - tasks = list(filter(lambda x: x not in modules, tasks)) - return tasks + def add_tree(cls, *args, **kwargs): + return c.m('tree').add_tree(*args, **kwargs) + + @classmethod + def add_tree(cls, *args, **kwargs): + return c.m('tree').add_tree(*args, **kwargs) @classmethod - def models(cls, *args, **kwargs) -> List[str]: - models = c.servers(*args, **kwargs) - models = [k for k in models if k.startswith('model')] - return models - @classmethod - def datasets(cls, **kwargs) -> List[str]: - return c.servers('data', **kwargs) - datas = datasets - - @staticmethod - def module_config_tree() -> List[str]: - return [f.replace('.py', '.yaml')for f in c.get_module_python_paths()] + def rm_tree(cls, *args, **kwargs): + return c.m('tree').rm_tree(*args, **kwargs) - @staticmethod - def is_imported(package:str) : - return bool(package in sys.modules) + def repo2module(self, repo:str, name=None, template_module='demo', **kwargs): + if not repo_path.startswith('/') and not repo_path.startswith('.') and not repo_path.startswith('~'): + repo_path = os.path.abspath('~/' + repo_path) + assert os.path.isdir(repo_path), f'{repo_path} is not a directory, please clone it' + c.add_tree(repo_path) + template_module = c.module(template_module) + code = template_module.code() - @classmethod - def simple2path(cls, path) -> Dict[str, str]: - module_tree = c.module_tree() - return module_tree[path] + # replace the template module with the name + name = name or repo_path.split('/')[-1] + assert not c.module_exists(name), f'{name} already exists' + code_lines = code.split('\n') + for i, line in enumerate(code_lines): + if 'class' in line and 'c.Module' in line: + class_name = line.split('class ')[-1].split('(')[0] + code_lines[i] = line.replace(class_name, name) + break + code = '\n'.join(code_lines) + + module_path = repo_path + '/module.py' + + # write the module code + c.put_text(code, module_path) + # build the tree + c.build_tree(update=True) - module_python_paths = None + + @classmethod + def simple2path(cls, path:str, **kwargs) -> str: + return c.module('tree').simple2path(path, **kwargs) + + @classmethod + def simple2objectpath(cls, path:str,path2objectpath = {'tree': 'commune.tree.tree.Tree'}, **kwargs) -> str: + if path in path2objectpath: + object_path = path2objectpath[path] + else: + object_path = c.module('tree').simple2objectpath(path, **kwargs) + return object_path + @classmethod + def simple2object(cls, path:str, **kwargs) -> str: + path = c.simple2objectpath(path, **kwargs) + try: + return c.import_object(path) + except Exception as e: + c.print(path) + raise e + + + + @classmethod + def python_paths(cls, path:str = None, recursive=True, **kwargs) -> List[str]: + if path == None: + path = c.homepath + return glob(path + '/**/*.py', recursive=recursive, **kwargs) + @classmethod - def get_module_python_paths(cls) -> List[str]: + def get_module_python_paths(cls, + path : str= None, + search:str=None, + end_line=200, + ) -> List[str]: ''' Search for all of the modules with yaml files. Format of the file ''' - if isinstance(cls.module_python_paths, list): - return cls.module_python_paths + + path = path or c.libpath + + search_glob = path +'/**/*.py' modules = [] - failed_modules = [] # find all of the python files - for f in glob(c.root_path + '/**/*.py', recursive=True): + for f in glob(search_glob, recursive=True): + + initial_text = c.readlines(f, end_line=end_line) + + commune_in_file = 'import commune as c' in initial_text + is_commune_root = 'class c:' in initial_text + if not commune_in_file and not is_commune_root: + continue if os.path.isdir(f): continue - file_path, file_ext = os.path.splitext(f) - - if file_ext == '.py': - dir_path, file_name = os.path.split(file_path) - dir_name = os.path.basename(dir_path) - previous_dir_path = dir_path.split('/')[-2] - - if dir_name.lower() == file_name.lower(): - # if the dirname is equal to the filename then it is a module - modules.append(f) - if file_name.lower().endswith(dir_name.lower()): - # if the dirname is equal to the filename then it is a module - modules.append(f) - if file_name.lower().endswith('module'): - # if the dirname is equal to the filename then it is a module - modules.append(f) - - elif 'module' in file_name.lower(): - modules.append(f) - elif any([os.path.exists(file_path+'.'+ext) for ext in ['yaml', 'yml']]): - modules.append(f) - else: - # FIX ME - f_classes = cls.find_python_class(f, search=['commune.Module', 'c.Module']) - # f_classes = [] - if len(f_classes) > 0: - modules.append(f) - - cls.module_python_paths = modules - + modules.append(f) + # we ar caching t return modules - tree_folders_path = 'module_tree_folders' + available_modules = module_tree = tree @classmethod - def add_tree(cls, tree_path:str, **kwargs): - path = cls.tree_folders_path - tree_folder = c.get(path, []) - tree_folder += [tree_path] - assert os.path.isdir(tree_path) - assert isinstance(tree_folder, list) - c.put(path, tree_folder, **kwargs) - return {'module_tree_folders': tree_folder} + def list_modules(cls, search=None): + modules = list(cls.module_tree(search).keys()) + return modules + + + @classmethod - def ls_trees(cls): - path = tree_folders_path - tree_folders = c.get(path, []) - return tree_folders + def get_tags(cls, module, *args, **kwargs): + servers = c.servers(module, *args, **kwargs) + return [s.split('::')[-1] if len(s.split('::'))>1 else None for s in servers] + + @classmethod + def has_config(cls) -> bool: + return c.exists(cls.config_path()) + @classmethod - def rm_tree(cls, tree_path:str, **kwargs): - path = cls.tree_folders_path - tree_folder = c.get(tree_path, []) - tree_folder = [f for f in tree_folder if f != tree_path ] - c.put(path, tree_folder) - return {'module_tree_folders': tree_folder} + def has_module(cls, module): + return module in c.modules() + + @classmethod + def Vali(cls, *args, **kwargs): + return c.module('vali') + @classmethod + def tasks(cls, task = None, mode='pm2',**kwargs) -> List[str]: + kwargs['network'] = 'local' + kwargs['update'] = False + modules = c.servers( **kwargs) + tasks = getattr(cls, f'{mode}_list')(task) + tasks = list(filter(lambda x: x not in modules, tasks)) + return tasks + + @classmethod + def models(cls, *args, **kwargs) -> List[str]: + models = c.servers(*args, **kwargs) + models = [k for k in models if k.startswith('model')] + return models + + @classmethod + def infer_device_map(cls, *args, **kwargs): + return cls.infer_device_map(*args, **kwargs) + + @classmethod + def datasets(cls, **kwargs) -> List[str]: + return c.servers('data', **kwargs) + datas = datasets + @staticmethod + def is_imported(package:str) : + return bool(package in sys.modules) - - - @classmethod - def dashboard(cls, *args, **kwargs): - return c.st('subspace') - dash = dashboard @classmethod def is_parent(cls, parent=None): @@ -1605,60 +1629,88 @@ def python(cls, *cmd, interpreter:str='python3'): def timer(cls, *args, **kwargs): from commune.utils.time import Timer return Timer(*args, **kwargs) + - @staticmethod - def timeit(fn): - from commune.utils.time import Timer - def wrapper(*args, **kwargs): - t = c.time() + def timefn(cls, fn, *args, **kwargs): + def wrapper(*args, **kwargs): + t1 = c.time() result = fn(*args, **kwargs) - c.print(f'Finished {fn.__name__} in {c.time() - t:.2f} seconds') - # return result - + t2 = c.time() + c.print(f'{fn.__name__} took {t2-t1} seconds') + return result return wrapper - + + @classmethod + def timeit(cls, fn, *args, include_result=False, **kwargs): + + t = c.time() + if isinstance(fn, str): + fn = cls.get_fn(fn) + result = fn(*args, **kwargs) + response = { + 'latency': c.time() - t, + 'fn': fn.__name__, + + } + if include_result: + c.print(response) + return result + return response + @staticmethod - def remotewrap(fn): - ''' - WARNNG IN PROGRSS, USE WITH CAUTION + def remotewrap(fn, remote_key:str = 'remote'): ''' - - def wrapper(self, *args, **kwargs): + calls your function if you wrap it as such + + @c.remotewrap + def fn(): + pass - c.remote_fn(module=self, fn=fn.__name__, args=args, kwargs=kwargs) - result = fn(self, *args, **kwargs) - c.print(f'Finished {fn.__name__} in {c.time() - t:.2f} seconds') - # return result - - return wrapper + # deploy it as a remote function + fn(remote=True) + ''' + def remotewrap(self, *args, **kwargs): + remote = kwargs.pop(remote_key, False) + if remote: + return c.remote_fn(module=self, fn=fn.__name__, args=args, kwargs=kwargs) + else: + return fn(self, *args, **kwargs) + + return remotewrap + # def local @classmethod - def locals2kwargs(cls, - locals_dict:dict, - seperate_args:bool=False, - merge_kwargs :bool = True) -> dict: - kwargs = {} - locals_dict = locals_dict if locals_dict != None else {} - assert isinstance(locals_dict, dict) - kwargs.update(locals_dict) - if merge_kwargs: - kwargs.update(locals_dict.get('kwargs', {})) - + def local_node_urls(cls): + return c.module('subpsace').local_node_urls() + + def locals2hash(self, kwargs:dict = {'a': 1}, keys=['kwargs']) -> str: kwargs.pop('cls', None) kwargs.pop('self', None) + return c.dict2hash(kwargs) - if seperate_args: - args = locals_dict.pop('args', []) - assert isinstance(args, list), f'args must be a list, got {type(args)}' - return args, kwargs + @classmethod + def dict2hash(cls, d:dict) -> str: + for k in d.keys(): + assert c.jsonable(d[k]), f'{k} is not jsonable' + return c.hash(d) + + + @classmethod + def locals2kwargs(cls,locals_dict:dict, kwargs_keys=['kwargs']) -> dict: + kwargs = locals_dict or {} + kwargs.pop('cls', None) + kwargs.pop('self', None) assert isinstance(kwargs, dict), f'kwargs must be a dict, got {type(kwargs)}' + # These lines are needed to remove the self and cls from the locals_dict + for k in kwargs_keys: + kwargs.update( locals_dict.pop(k, {}) or {}) + return kwargs - get_kwargs = get_params = locals2kwargs @classmethod @@ -1670,151 +1722,145 @@ def get_parents(cls, obj=None): return list(obj.__mro__[1:-1]) @classmethod - def module_config_tree(cls): - return {m: c.simple2config(m) for m in c.modules()} + def storage_dir(cls): + return f'{c.cache_path()}/{cls.module_path()}' + tmp_dir = cache_dir = storage_dir - @classmethod - def tmp_dir(cls): - return f'{c.cache_path()}/{cls.module_path()}' + def refresh_storage(cls): + c.rm(cls.storage_dir()) + + @classmethod + def refresh_storage_dir(cls): + c.rm(cls.storage_dir()) + c.makedirs(cls.storage_dir()) + ############ JSON LAND ############### @classmethod def cache_path(cls): - return os.path.expanduser(f'~/.{cls.library_name}') + path = os.path.expanduser(f'~/.{cls.libname}') + return path @classmethod def tilde_path(cls): return os.path.expanduser('~') - - @classmethod - def get_json(cls, *args, **kwargs): - loop = cls.get_event_loop() - return loop.run_until_complete(cls.async_get_json(*args, **kwargs)) @classmethod - async def async_get_json(cls, - path:str, - default:Any=None, - root: bool = False, - verbose: bool = False, - **kwargs): - + def get_json(cls, + path:str, + default:Any=None, + verbose: bool = False,**kwargs): from commune.utils.dict import async_get_json - path = cls.resolve_path(path=path, extension='json', root=root) + path = cls.resolve_path(path=path, extension='json') + + c.print(f'Loading json from {path}', color='green', verbose=verbose) + try: - data = await async_get_json(path, default=default, **kwargs) + data = cls.get_text(path, **kwargs) except Exception as e: - if verbose: - c.print(f'Failed to load json from {path} with error {e}') return default + if isinstance(data, str): + try: + data = json.loads(data) + except Exception as e: + c.print(e) + return default if isinstance(data, dict): if 'data' in data and 'meta' in data: data = data['data'] - return data + @classmethod + async def async_get_json(cls,*args, **kwargs): + return cls.get_json(*args, **kwargs) load_json = get_json data_path = repo_path + '/data' @classmethod - def put_torch(cls, path:str, data:Dict, root:bool = False, **kwargs): + def put_torch(cls, path:str, data:Dict, **kwargs): import torch - path = cls.resolve_path(path=path, extension='pt', root=root) + path = cls.resolve_path(path=path, extension='pt') torch.save(data, path) return path - - @classmethod - def get_torch(cls,path:str, root:bool = False, **kwargs): - import torch - path = cls.resolve_path(path=path, extension='pt', root=root) - return torch.load(path) - - def init_nn(self): import torch torch.nn.Module.__init__(self) @classmethod - def put_json(cls,*args,**kwargs) -> str: - loop = cls.get_event_loop() - return loop.run_until_complete(cls.async_put_json(*args, **kwargs)) + async def async_put_json(cls,*args,**kwargs) -> str: + return cls.put_json(*args, **kwargs) @classmethod - async def async_put_json(cls, + def put_json(cls, path:str, data:Dict, meta = None, - root: bool = False, - cache: bool = False, + verbose: bool = False, + **kwargs) -> str: - - from commune.utils.dict import async_put_json if meta != None: data = {'data':data, 'meta':meta} - path = cls.resolve_path(path=path, extension='json', root=root) + path = cls.resolve_path(path=path, extension='json') # cls.lock_file(path) - - await async_put_json(path=path, data=data, **kwargs) - # cls.unlock_file(path) + c.print(f'Putting json from {path}', color='green', verbose=verbose) + if isinstance(data, dict): + data = json.dumps(data) + c.put_text(path, data) return path save_json = put_json @classmethod - def file_exists(cls, path:str, root:bool = False)-> bool: - path = cls.resolve_path(path=path, root=root) - + def file_exists(cls, path:str)-> bool: + path = cls.resolve_path(path=path) exists = os.path.exists(path) if not exists and not path.endswith('.json'): exists = os.path.exists(path + '.json') - return exists - + exists = exists_json = file_exists @classmethod - def docs(cls): + def readme(cls): # Markdown input markdown_text = "## Hello, *Markdown*!" - - path = cls.filepath().replace('.py', '_docs.md') markdown_text = cls.get_text(path=path) return markdown_text + + docs = readme - exists = exists_json = file_exists - @classmethod - def rm_json(cls, path=None, root:bool = False): + def rm_json(cls, path=None): from commune.utils.dict import rm_json if path in ['all', '**']: return [cls.rm_json(f) for f in cls.glob(files_only=False)] - path = cls.resolve_path(path=path, extension='json', root=root) + path = cls.resolve_path(path=path, extension='json') return rm_json(path ) @classmethod - def rmdir(cls, path, root:bool = False): + def rmdir(cls, path): import shutil return shutil.rmtree(path) @classmethod - def isdir(cls, path, root:bool = False): - path = cls.resolve_path(path=path, root=root) + def isdir(cls, path): + path = cls.resolve_path(path=path) return os.path.isdir(path) @classmethod - def isfile(cls, path, root: bool = False): - path = cls.resolve_path(path=path, root=root) + def isfile(cls, path): + path = cls.resolve_path(path=path) return os.path.isfile(path) @@ -1822,40 +1868,57 @@ def rm_many(cls, paths:List[str]): paths = c.ls(paths) - - # for path in paths: # cls.rm(path) @classmethod - def rm(cls, path, extension=None, root=False, mode = 'json'): - path = cls.resolve_path(path=path, extension=extension, root=root) + def rm_all(cls): + for path in cls.ls(): + cls.rm(path) + return {'success':True, 'message':f'{cls.storage_dir()} removed'} + - if not os.path.exists(path): - path += f'.{mode}' + @classmethod + def rm(cls, path, extension=None, mode = 'json'): + assert isinstance(path, str), f'path must be a string, got {type(path)}' + path = cls.resolve_path(path=path, extension=extension) - if os.path.exists(path): - if os.path.isdir(path): - cls.rmdir(path) - else: - os.remove(path) - assert not os.path.exists(path) - return {'success':True, 'message':f'{path} removed'} - else: + # incase we want to remove the json file + mode_suffix = f'.{mode}' + if not os.path.exists(path) and os.path.exists(path+mode_suffix): + path += mode_suffix + + if not os.path.exists(path): return {'success':False, 'message':f'{path} does not exist'} + if os.path.isdir(path): + c.rmdir(path) + else: + os.remove(path) + assert not os.path.exists(path), f'{path} was not removed' + return {'success':True, 'message':f'{path} removed'} @classmethod - def glob(cls, path =None, files_only:bool = True, root:bool = False, recursive:bool=True): - - path = cls.resolve_path(path, extension=None, root=root) - + def rm_all(cls): + storage_dir = cls.storage_dir() + if c.exists(storage_dir): + cls.rm(storage_dir) + assert not c.exists(storage_dir), f'{storage_dir} was not removed' + c.makedirs(storage_dir) + assert c.is_dir_empty(storage_dir), f'{storage_dir} was not removed' + return {'success':True, 'message':f'{storage_dir} removed'} + + def is_dir_empty(self, path:str): + return len(self.ls(path)) == 0 + + + @classmethod + def glob(cls, path =None, files_only:bool = True, recursive:bool=True): + path = cls.resolve_path(path, extension=None) if os.path.isdir(path): path = os.path.join(path, '**') - paths = glob(path, recursive=recursive) - if files_only: paths = list(filter(lambda f:os.path.isfile(f), paths)) return paths @@ -1864,36 +1927,44 @@ def glob(cls, path =None, files_only:bool = True, root:bool = False, recursive: def get_file_size(cls, path:str): path = cls.resolve_path(path) return os.path.getsize(path) - - @classmethod - def ls_json(cls, path:str = '', recursive:bool = True): - return [os.path.basename(p).replace('.json', '')for p in cls.ls(path, recursive=recursive)] - @classmethod def ls(cls, path:str = '', recursive:bool = False, - root:bool = False, + search = None, return_full_path:bool = True): - path = cls.resolve_path(path, extension=None, root=root) + """ + provides a list of files in the path + + this path is relative to the module path if you dont specifcy ./ or ~/ or / + which means its based on the module path + """ + path = cls.resolve_path(path, extension=None) try: ls_files = cls.lsdir(path) if not recursive else cls.walk(path) except FileNotFoundError: return [] if return_full_path: - ls_files = [os.path.expanduser(os.path.join(path,f)) for f in ls_files] + ls_files = [os.path.abspath(os.path.join(path,f)) for f in ls_files] + + ls_files = sorted(ls_files) + if search != None: + ls_files = list(filter(lambda x: search in x, ls_files)) return ls_files @classmethod def lsdir(cls, path:str) -> List[str]: - if path.startswith('~'): - path = os.path.expanduser(path) + path = os.path.abspath(path) return os.listdir(path) + @classmethod + def abspath(cls, path:str) -> str: + return os.path.abspath(path) + + @classmethod def walk(cls, path:str, module:str=False) -> List[str]: - import os path_map = {} for root, dirs, files in os.walk(path): for f in files: @@ -1902,10 +1973,6 @@ def walk(cls, path:str, module:str=False) -> List[str]: return list(path_map.keys()) - ftree = walk - @classmethod - def bt(cls, *args, **kwargs): - return cls.get_module('bittensor')(*args, **kwargs) @classmethod def __str__(cls): return cls.__name__ @@ -1917,67 +1984,19 @@ def get_server_info(cls,name:str) -> Dict: @classmethod def connect(cls, module:str, - network : str = None, - namespace = None, - mode = server_mode, + network : str = 'local', + mode = 'http', virtual:bool = True, - verbose: bool = False, - prefix_match: bool = False, - key = None, - return_future:bool = False,): - - kwargs = c.locals2kwargs(locals()) - return_future = kwargs.pop('return_future', False) - future = cls.async_connect(**kwargs) - - if return_future: - return future - return c.gather(future) + **kwargs): + + return c.module( f'client').connect(module=module, + virtual=virtual, + network=network, + **kwargs) @classmethod - async def async_connect(cls, - module:str, - network : str = None, - namespace = None, - mode = server_mode, - virtual:bool = False, - verbose: bool = True, - prefix_match: bool = False, - key = None, - **kwargs ): - - """ - Connects to a server by the name of the module - :param module: name of the module - """ - - network = c.resolve_network(network) - key = cls.get_key(key) - if c.is_address(module): - address = module - else: - namespace = namespace if namespace != None else c.namespace(module, network=network) - modules = list(namespace.keys()) - if prefix_match == True: - module = c.choice(modules) - else: - modules = [m for m in modules if m==module] - - assert len(modules) > 0, f'No modules with {module} found in namespace {namespace.keys()}' - address = namespace[module] - - ip, port = address.split(':') - - # CONNECT TO THE MODULE - if 'None' in address: - raise Exception(f'Invalid address {address}') - - if ip == c.ip(): - ip = '0.0.0.0' - - client= c.get_client(ip=ip, port=int(port), key=key, mode=mode, virtual=virtual, **kwargs) - - return client + async def async_connect(cls, *args, **kwargs): + return c.connect(*args, **kwargs) @classmethod def root_address(cls, name:str='module', @@ -1986,17 +2005,28 @@ def root_address(cls, name:str='module', sleep_interval:int = 1, **kwargs): + + + """ Root module """ - if not c.server_exists(name, network=network): - c.serve(name, network=network, wait_for_server=True, **kwargs) - address = c.call('module', 'address', network=network, timeout=timeout) - ip = c.ip() - address = ip+':'+address.split(':')[-1] + try: + if not c.server_exists(name, network=network): + c.serve(name, network=network, wait_for_server=True, **kwargs) + address = c.call('module/address', network=network, timeout=timeout) + ip = c.ip() + address = ip+':'+address.split(':')[-1] + except Exception as e: + c.print(f'Error: {e}', color='red') + address = None return address addy = root_address + @property + def key_address(self): + return self.key.ss58_address + @staticmethod def round(x:Union[float, int], sig: int=6, small_value: float=1.0e-9): import math @@ -2037,30 +2067,11 @@ def connect_pool(cls, modules=None, *args, return_dict:bool=False, **kwargs): return dict(zip(modules, module_clients)) return module_clients - @classmethod - def get_client(cls, ip:str = None, port:int = None ,virtual:bool = True, mode=server_mode, **kwargs): - ''' - Returns a client to a server - ''' - client = c.module(f'server.{mode}.client')(ip=ip, port=port,**kwargs) - # if virtual turn client into a virtual client, making it act like if the server was local - if virtual: - client = c.virtual_client(client) - - return client - - - - nest_asyncio_enabled : bool = False @classmethod def nest_asyncio(cls): - assert not cls.nest_asyncio_enabled, 'Nest Asyncio already enabled' import nest_asyncio nest_asyncio.apply() - nest_asyncio_enabled = True - - @classmethod def port2module(cls, *args, **kwargs): @@ -2150,13 +2161,15 @@ def unlock_file(cls, f): @classmethod def is_address(cls, address:str) -> bool: - conds = [] - if '::' in address: + if not isinstance(address, str): return False + if '://' in address: + return True + conds = [] + conds.append(len(address.split('.')) >= 3) conds.append(isinstance(address, str)) conds.append(':' in address) - conds.append(cls.is_number(address.split(':')[-1])) - + conds.append(cls.is_int(address.split(':')[-1])) return all(conds) @classmethod @@ -2170,7 +2183,7 @@ def is_module(cls, obj=None) -> bool: return False @classmethod - def is_root_module(cls, obj=None) -> bool: + def is_root(cls, obj=None) -> bool: if obj is None: obj = cls @@ -2180,17 +2193,15 @@ def is_root_module(cls, obj=None) -> bool: return True return False - is_root = is_module_root = is_root_module + is_module_root = is_root_module = is_root @classmethod def new_event_loop(cls, nest_asyncio:bool = True) -> 'asyncio.AbstractEventLoop': import asyncio - if nest_asyncio: - cls.nest_asyncio() - loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) - - + if nest_asyncio: + cls.nest_asyncio() + return loop @@ -2210,20 +2221,16 @@ def set_event_loop(self, loop=None, new_loop:bool = False) -> 'asyncio.AbstractE @classmethod def get_event_loop(cls, nest_asyncio:bool = True) -> 'asyncio.AbstractEventLoop': - import asyncio - if nest_asyncio: - cls.nest_asyncio() try: loop = asyncio.get_event_loop() - except RuntimeError: - loop = cls.new_event_loop() - + except Exception as e: + loop = c.new_event_loop(nest_asyncio=nest_asyncio) return loop @classmethod - def get_port(cls, port:int = None, **kwargs)->int: - port = port if port is not None and port != 0 else cls.free_port(**kwargs) + def get_port(cls, port:int = None)->int: + port = port if port is not None and port != 0 else cls.free_port() while cls.port_used(port): port += 1 return port @@ -2250,6 +2257,8 @@ def server_name(self): @server_name.setter def server_name(self, v): + if callable(self.config): + self.set_config() self.config['server_name'] = v return self.config['server_name'] @@ -2264,23 +2273,15 @@ def wait_for_server(cls, time_waiting = 0 logs = [] while not c.server_exists(name, network=network): - c.sleep(sleep_interval) time_waiting += sleep_interval - new_logs = list(set(c.logs(name, mode='local').split('\n'))) - print_logs = [l for l in new_logs if l not in logs] - - if verbose: - if len(print_logs) > 0: - logs.extend(print_logs) - logs = list(set(logs)) - c.print('\n'.join(print_logs)) - if time_waiting > timeout: - raise TimeoutError(f'Timeout waiting for server to start') + c.sleep(sleep_interval) + logs.append(f'Waiting for {name} to start') return True def attributes(self): return list(self.__dict__.keys()) + @classmethod def get_attributes(cls, search = None, obj=None): if obj is None: @@ -2290,14 +2291,8 @@ def get_attributes(cls, search = None, obj=None): # assert hasattr(obj, '__dict__'), f'{obj} has no __dict__' attrs = dir(obj) if search is not None: - attrs = [a for a in attrs if search in a] + attrs = [a for a in attrs if search in a and callable(a)] return attrs - - - @classmethod - def virtual_client(cls, module): - virtual_client = c.import_object('commune.modules.client.virtual.VirtualClient') - return virtual_client(module) # NAMESPACE::MODULE namespace_module = 'module.namespace' @@ -2307,6 +2302,12 @@ def name2address(cls, name:str, network:str='local') -> str: @classmethod def servers(cls, *args, **kwargs) -> List[str]: return c.module("namespace").servers(*args, **kwargs) + @classmethod + def server2key(self, *args, **kwargs): + servers = c.servers() + key2address = c.key2address() + server2key = {s:key2address[s] for s in servers} + return server2key @classmethod def rservers(cls, *args, **kwargs) -> List[str]: @@ -2324,13 +2325,18 @@ def get_port(cls, module, **kwargs): return None return int(address.split(':')[-1]) @classmethod - def servers_info(cls, *args, **kwargs) -> List[str]: - return c.module("namespace").servers_info(*args, **kwargs) + def infos(cls, *args, **kwargs) -> List[str]: + return c.module("namespace").infos(*args, **kwargs) + @classmethod + def server2info(cls, *args, **kwargs) -> List[str]: + return c.module("namespace").server2info(*args, **kwargs) + @classmethod def has_server(cls, *args, **kwargs): return c.module("namespace").has_server(*args, **kwargs) @classmethod def server_exists(cls, name:str, network:str = 'local', prefix_match:bool=False, **kwargs) -> bool: + return c.module("namespace").server_exists(name=name, network=network, prefix_match=prefix_match, **kwargs) @classmethod def register_server(cls, name: str, address:str, network='local')-> dict: @@ -2345,6 +2351,10 @@ def add_server(cls, *args, **kwargs): @classmethod def add_servers(cls, *args, **kwargs): return c.module("namespace").add_servers(*args, **kwargs) + + @classmethod + def readd_servers(cls, *args, **kwargs): + return c.module("namespace").readd_servers(*args, **kwargs) @classmethod def rm_server(cls, *args, **kwargs): return c.module("namespace").rm_server(*args, **kwargs) @@ -2357,16 +2367,59 @@ def remote_servers(cls, *args, **kwargs): def namespace(cls, search:str = None, network:str='local', - update: bool = False): - return c.module("namespace").namespace(search=search, network=network, update=update) + update: bool = False, + **kwargs): + namespace = c.module("namespace").namespace(search=search, network=network, update=update, **kwargs) + return namespace + + + get_namespace = namespace @classmethod def rm_namespace(cls, *args, **kwargs): + """ + remove the namespace + """ return c.module("namespace").rm_namespace(*args, **kwargs) + + + + @classmethod + def empty_namespace(cls, *args, **kwargs): + """ + empty the namespace + """ + return c.module("namespace").empty_namespace(*args, **kwargs) + + @classmethod + def add_namespace(cls, *args, **kwargs): + return c.module("namespace").empty_namespace(*args, **kwargs) + + @classmethod def update_namespace(cls, network:str='local',**kwargs): return c.module("namespace").update_namespace(network=network, **kwargs) + @classmethod + def update_subnet(cls, *args, **kwargs): + return c.module("subspace")().update_subnet(*args, **kwargs) + + @classmethod + def subnet_params(cls, *args, **kwargs): + return c.module("subspace")().subnet_params(*args, **kwargs) + + @classmethod + def my_subnets(cls, *args, **kwargs): + return c.module("subspace")().my_subnets(*args, **kwargs) + + @classmethod + def global_params(cls, *args, **kwargs): + return c.module("subspace")().global_params(*args, **kwargs) + + @classmethod + def subnet_names(cls, *args, **kwargs): + return c.module("subspace")().subnet_names(*args, **kwargs) + @classmethod def put_namespace(cls,network:str, namespace:dict, **kwargs): namespace = c.module("namespace").put_namespace(network=network, namespace=namespace, **kwargs) @@ -2378,15 +2431,38 @@ def rm_namespace(cls,network:str, **kwargs): return namespace + def add_fn(self, fn, name=None): + if name == None: + name = fn.__name__ + assert not hasattr(self, name), f'{name} already exists' + + setattr(self, name, fn) + + return { + 'success':True , + 'message':f'Added {name} to {self.__class__.__name__}' + } + + add_attribute = add_attr = add_function = add_fn @classmethod - def resolve_server_name(cls, module:str = None, tag:str=None, name:str = None, tag_seperator:str='::', **kwargs): - + def resolve_server_name(cls, + module:str = None, + tag:str=None, + name:str = None, + tag_seperator:str='::', + **kwargs): + """ + Resolves the server name + """ # if name is not specified, use the module as the name such that module::tag if name == None: - # module::tag module = cls.module_path() if module == None else module + + # module::tag + if tag_seperator in module: + module, tag = module.split(tag_seperator) if tag_seperator in module: module, tag = module.split(tag_seperator) name = module @@ -2394,130 +2470,108 @@ def resolve_server_name(cls, module:str = None, tag:str=None, name:str = None, tag = None if tag != None: name = f'{name}{tag_seperator}{tag}' + + # ensure that the name is a string assert isinstance(name, str), f'Invalid name {name}' + return name resolve_name = resolve_server_name - @property - def whitelist(self): - if hasattr(self, '_whitelist'): - return self._whitelist - whitelist = c.helper_whitelist - is_module = c.is_root_module(self) - # we want to expose the helper functions - if not is_module: - whitelist += self.functions() + self.attributes() - return whitelist - - @whitelist.setter - def whitelist(self, whitelist:List[str]): - self._whitelist = whitelist + self.helper_functions - return whitelist - bl = blacklist = [] - @classmethod - def save_serve_kwargs(cls,server_name:str, kwargs:dict): - serve_kwargs = c.get('serve_kwargs', {}) - serve_kwargs[server_name] = kwargs - c.put('serve_kwargs', serve_kwargs) - return serve_kwargs - - @classmethod - def load_serve_kwargs(cls, server_name:str): - serve_kwargs = c.get('serve_kwargs', {}) - return serve_kwargs.get(server_name, {}) - - @classmethod - def has_serve_kwargs(cls, server_name:str): - serve_kwargs = c.get('serve_kwargs', {}) - return server_name in serve_kwargs - @classmethod def serve(cls, module:Any = None , + kwargs:dict = None, # kwargs for the module tag:str=None, - ip :str = None, - network = 'local', + server_network = 'local', port :int = None, # name of the server if None, it will be the module name server_name:str=None, # name of the server if None, it will be the module name - kwargs:dict = None, # kwargs for the module + name = None, # name of the server if None, it will be the module name refresh:bool = True, # refreshes the server's key - wait_for_server:bool = True , # waits for the server to start before returning remote:bool = True, # runs the server remotely (pm2, ray) - server_mode:str = server_mode, tag_seperator:str='::', - update:bool = False, + max_workers:int = None, + free: bool = False, + mnemonic = None, # mnemonic for the server + key = None, **extra_kwargs ): - - if update: - c.update() - - - kwargs = kwargs or {} - kwargs = {**kwargs, **extra_kwargs} - extra_kwargs = {} - + if c.is_module(module): + cls = module if module == None: module = cls.module_path() - if tag_seperator in module: - module, tag = module.split(tag_seperator) - - module_class = cls.resolve_module(module) - - server_name = cls.resolve_server_name(module=module, name=server_name, tag=tag, tag_seperator=tag_seperator) - if tag_seperator in server_name: - tag = server_name.split(tag_seperator)[-1] + kwargs = kwargs or {} + kwargs.update(extra_kwargs or {}) + name = server_name or name # name of the server if None, it will be the module name + name = cls.resolve_server_name(module=module, name=name, tag=tag, tag_seperator=tag_seperator) + if tag_seperator in name: + module, tag = name.split(tag_seperator) + # RESOLVE THE PORT FROM THE ADDRESS IF IT ALREADY EXISTS + if port == None: + # now if we have the server_name, we can repeat the server + address = c.get_address(name, network=server_network) + port = int(address.split(':')[-1]) if address else c.free_port() + # NOTE REMOVE THIS FROM THE KWARGS REMOTE if remote: - remote_kwargs = cls.locals2kwargs(locals(), merge_kwargs=False) - remote_kwargs.pop('extra_kwargs') - remote_kwargs['remote'] = False - remote_kwargs.pop('module_class') # remove module_class from the kwargs - c.save_serve_kwargs(server_name, remote_kwargs) - cls.remote_fn('serve',name=server_name, kwargs=remote_kwargs) - if wait_for_server: - cls.wait_for_server(server_name, network=network) - return server_name - - module_class = cls.resolve_module(module) - kwargs.update(extra_kwargs) - # this automatically adds + remote_kwargs = c.locals2kwargs(locals()) # GET THE LOCAL KWARGS FOR SENDING TO THE REMOTE + remote_kwargs['remote'] = False # SET THIS TO FALSE TO AVOID RECURSION + # REMOVE THE LOCALS FROM THE REMOTE KWARGS THAT ARE NOT NEEDED + for _ in ['extra_kwargs', 'address']: + remote_kwargs.pop(_, None) # WE INTRODUCED THE ADDRES + cls.remote_fn('serve',name=name, kwargs=remote_kwargs) + address = c.ip() + ':' + str(remote_kwargs['port']) + return {'success':True, + 'name': name, + 'address':address, + 'kwargs':kwargs + } + module_class = c.module(module) + kwargs.update(extra_kwargs) + if mnemonic != None: + c.add_key(server_name, mnemonic) + self = module_class(**kwargs) + self.server_name = name self.tag = tag - self.server_name = server_name - - if c.server_exists(server_name, network=network) and server_name in c.pm2_list(): - if refresh: - c.print(f'Stopping existing server {server_name}', color='yellow') - address = c.get_address(server_name, network=network) - ip, port = address[0], int(address[1]) - c.kill(server_name) - c.deregister_server(server_name, network=network) - else: - return server_name - if port == None: - port = c.free_port() - - server = c.module(f'server.{server_mode}')(module=self, name= server_name, port=int(port), network=network) - return server.name + address = c.get_address(name, network=server_network) + if address != None and ':' in address: + port = address.split(':')[-1] + + if c.server_exists(server_name, network=server_network) and not refresh: + return {'success':True, 'message':f'Server {server_name} already exists'} + c.module(f'server')(module=self, + name=name, + port=port, + network=server_network, + max_workers=max_workers, + free=free, + key=key) + + return {'success':True, + 'address': f'{c.default_ip}:{port}' , + 'name':name, + 'kwargs': kwargs, + 'module':module} serve_module = serve @classmethod - def functions(cls, search: str=None , include_parents:bool = False): - functions = cls.get_functions(include_parents=include_parents) - functions = list(set(functions)) - if isinstance(search, str): - functions = [f for f in functions if search in f] + def functions(cls, search: str=None , include_parents:bool = False, module=None): + if module != None: + cls = c.module(module) + functions = cls.get_functions(include_parents=include_parents, search=search) return functions fns = functions + + def hasfn(self, fn:str): + return hasattr(self, fn) and callable(getattr(self, fn)) @classmethod - def get_function_signature_map(cls, obj=None, include_parents:bool = False): + def fn_signature_map(cls, obj=None, include_parents:bool = False): function_signature_map = {} if isinstance(obj, str): obj = c.module(obj) @@ -2535,37 +2589,10 @@ def get_function_signature_map(cls, obj=None, include_parents:bool = False): return function_signature_map - @property - def function_signature_map(self, include_parents:bool = False): - return self.get_function_signature_map(obj=self, include_parents=include_parents) - - @property - def function_default_map(self, include_parents=False): - return self.get_function_default_map(obj=self, include_parents=False) - - @classmethod - def get_function_default_map(cls, obj:Any= None, include_parents=False) -> Dict[str, Dict[str, Any]]: - obj = obj if obj else cls - default_value_map = {} - function_signature = cls.get_function_signature_map(obj=obj,include_parents=include_parents) - for fn_name, fn in function_signature.items(): - default_value_map[fn_name] = {} - if fn_name in ['self', 'cls']: - continue - for var_name, var in fn.items(): - if len(var.split('=')) == 1: - var_type = var - default_value_map[fn_name][var_name] = 'NA' - - elif len(var.split('=')) == 2: - var_value = var.split('=')[-1].strip() - default_value_map[fn_name][var_name] = eval(var_value) - - return default_value_map + function_signature_map = fn_signature_map - def is_fn_allowed(self, fn_name:str) -> bool: whitelist = self.whitelist blacklist = self.blacklist @@ -2582,101 +2609,144 @@ def set_server_name(self, name:str, **kwargs): c.register_server(name, self.address, **kwargs) return {'success':True, 'message':f'Server name set to {name}'} - - + @classmethod + def dummy_gen(cls): + for i in range(10): + c.print(i) + yield i def info(self , - schema: bool = False, - namespace:bool = False, - peers: bool = False) -> Dict[str, Any]: - fns = [fn for fn in self.fns() if self.is_fn_allowed(fn)] - attributes =[ attr for attr in self.attributes() if self.is_fn_allowed(attr)] - info = dict( - address = self.address.replace(c.default_ip, c.ip(update=False)), - functions = fns, # get the functions of the module - attributes = attributes, # get the attributes of the module - name = self.server_name() if callable(self.server_name) else self.server_name, # get the name of the module - path = self.module_path(), # get the path of the module - chash = self.chash(), # get the hash of the module (code) + module = None, + features = ['schema', 'namespace', 'commit_hash', 'hardware','attributes','functions'], + lite_features = ['name', 'address', 'schema', 'ss58_address', 'description'], + lite = True, + cost = False, + **kwargs + ) -> Dict[str, Any]: + ''' + hey, whadup hey how is it going + ''' + if lite: + features = lite_features + + if module != None: + if isinstance(module, str): + module = c.module(module)() + self = module + + info = {} + + if 'schema' in features: + info['schema'] = self.schema(defaults=True, include_parents=True) + info['schema'] = {k: v for k,v in info['schema'].items() if k in self.whitelist} + if 'namespace' in features: + info['namespace'] = c.namespace(network='local') + if 'hardware' in features: + info['hardware'] = c.hardware() + if 'attributes' in features: + info['attributes'] = attributes =[ attr for attr in self.attributes()] + if 'functions' in features: + info['functions'] = [fn for fn in self.whitelist] + if 'name' in features: + info['name'] = self.server_name() if callable(self.server_name) else self.server_name # get the name of the module + if 'path' in features: + info['path'] = self.module_path() # get the path of the module + if 'address' in features: + info['address'] = self.address.replace(c.default_ip, c.ip(update=False)) + if 'ss58_address' in features: + info['ss58_address'] = self.key.ss58_address + if 'code_hash' in features: + info['code_hash'] = self.chash() # get the hash of the module (code) + if 'commit_hash' in features: + info['commit_hash'] = c.commit_hash() + if 'description' in features: + info['description'] = self.description + + c.put_json('info', info) + if cost: + if hasattr(self, 'cost'): + info['cost'] = self.cost + return info + + help = info - ) - info['hash'] = c.hash(info) + + @classmethod + def hardware(cls, fmt:str = 'gb', **kwargs): + return c.module('os').hardware(fmt=fmt, **kwargs) - if hasattr(self, 'key'): - auth = self.key.sign(info, return_json=True) - info['signature'] = auth['signature'] - info['ss58_address'] = auth['address'] - if schema: - schema = self.schema(defaults=True) - info['schema'] = {fn: schema[fn] for fn in fns} + @classmethod + def init_schema(cls): + return cls.fn_schema('__init__') - return info + @classmethod + def init_kwargs(cls): + kwargs = cls.fn_defaults('__init__') + kwargs.pop('self', None) + if 'config' in kwargs: + if kwargs['config'] != None: + kwargs.update(kwargs.pop('config')) + del kwargs['config'] + if 'kwargs' in kwargs: + if kwargs['kwargs'] != None: + kwargs = kwargs.pop('kwargs') + del kwargs['kwargs'] + + return kwargs - help = info @classmethod - def schema(cls,search: str = None, - code : bool = False, - docs: bool = False, - include_parents:bool = False, - defaults:bool = False,) -> 'Schema': + def schema(cls, + search = None, + module = None, + fn = None, + docs: bool = True, + include_parents:bool = False, + defaults:bool = True, cache=False) -> 'Schema': + + if '/' in str(search): + module, fn = search.split('/') + cls = c.module(module) + if isinstance(module, str): + if '/' in module: + module , fn = module.split('/') + module = c.module(module) - kwargs = c.locals2kwargs(locals()) - return {k: v for k,v in cls.get_schema(**kwargs).items()} - @classmethod - def get_schema(cls, - obj = None, - search = None, - code : bool = False, - docs: bool = False, - include_parents:bool = False, - defaults:bool = False,): - - obj = obj if obj else cls + module = module or cls + schema = {} + fns = module.get_functions(include_parents=include_parents) + for fn in fns: + if search != None and search not in fn: + continue + if callable(getattr(module, fn )): + schema[fn] = cls.fn_schema(fn, defaults=defaults,docs=docs) + + return c.copy(schema) - if isinstance(obj, str): - obj = c.module(obj) - - function_schema_map = {} - for fn in cls.get_functions(obj, include_parents=include_parents): - - if search != None : - if search not in fn: - continue - fn_obj = getattr(obj, fn ) - if callable(fn_obj): - function_schema_map[fn] = cls.fn_schema(fn, defaults=defaults, code=code, docs=docs) - - return function_schema_map @classmethod def get_function_annotations(cls, fn): fn = cls.get_fn(fn) + if not hasattr(fn, '__annotations__'): + return {} return fn.__annotations__ @classmethod def fn_schema(cls, fn:str, - defaults:bool=False, + defaults:bool=True, code:bool = False, - docs:bool = False)->dict: + docs:bool = True, + version=2)->dict: ''' Get function schema of function in cls ''' - import inspect fn_schema = {} - if isinstance(fn, str): - fn = getattr(cls, fn) - fn_args = cls.get_function_args(fn) + fn = cls.get_fn(fn) fn_schema['input'] = cls.get_function_annotations(fn=fn) - if defaults: - fn_schema['default'] = cls.get_function_defaults(fn=fn) - for k,v in fn_schema['default'].items(): - if k not in fn_schema['input'] and v != None: - fn_schema['input'][k] = type(v).__name__ if v != None else None - + for k,v in fn_schema['input'].items(): v = str(v) @@ -2704,6 +2774,21 @@ def fn_schema(cls, fn:str, fn_schema['input'].pop(arg) if 'default' in fn_schema: fn_schema['default'].pop(arg, None) + + + if defaults: + fn_schema['default'] = cls.fn_defaults(fn=fn) + for k,v in fn_schema['default'].items(): + if k not in fn_schema['input'] and v != None: + fn_schema['input'][k] = type(v).__name__ if v != None else None + + if version == 1: + pass + elif version == 2: + defaults = fn_schema.pop('default', {}) + fn_schema['input'] = {k: {'type':v, 'default':defaults.get(k)} for k,v in fn_schema['input'].items()} + else: + raise Exception(f'Version {version} not implemented') return fn_schema @@ -2714,7 +2799,8 @@ def get_annotations(fn:callable) -> dict: return fn.__annotations__ @classmethod - def kill(cls, module, + def kill(cls, + module, mode:str = 'pm2', verbose:bool = False, update : bool = True, @@ -2745,10 +2831,10 @@ def kill(cls, module, for m in delete_modules: if m in servers: c.deregister_server(m) - return {'server_killed': delete_modules, 'update': update} + @classmethod def kill_prefix(cls, prefix:str, **kwargs): servers = c.servers(network='local') @@ -2758,12 +2844,44 @@ def kill_prefix(cls, prefix:str, **kwargs): c.kill(s, **kwargs) killed_servers.append(s) return {'success':True, 'message':f'Killed servers with prefix {prefix}'} + killpre = kill_prefix + + + + @classmethod + def kill_many(cls, search:str, network='local', parallel=True, timeout=10, n=None, **kwargs): + servers = c.servers(network=network) + servers = [s for s in servers if search in s] + + + n = n or len(servers) + + if n > 0 and n < 1: + servers = servers[:int(len(servers)*n)] + elif n > 1: + servers = servers[:n] + + assert len(servers) > 0, f'No servers found with search {search}' + if parallel: + futures = [] + for s in servers: + future = c.submit(c.kill, kwargs={'module':s, **kwargs}, mode='thread', return_future = True, timeout=timeout) + futures.append(future) + + results = c.wait(futures, timeout=timeout) + else: + results = [] + for s in servers: + results.append(c.kill(s, **kwargs)) + + + return {'success':True, 'message':f'Killed servers with prefix {search}', 'results': results} delete = kill_server = kill def destroy(self): - self.kill(self.server_name) - return path + return self.kill(self.server_name) + def self_destruct(self): c.kill(self.server_name) @@ -2771,32 +2889,14 @@ def self_destruct(self): def self_restart(self): c.restart(self.server_name) - @classmethod - def set_shortcut(cls, shortcut: str, kwargs: dict) -> dict: - self.shortcuts = self.get_shortcuts() - # remove shortcut if it exists - kwargs.pop('shortcut', None) - cls.shortcuts[shortcut] = kwargs - self.put_json('shortcuts', cls.shortcuts) - - return kwargs - - @classmethod - def get_shortcut(cls, shortcut:str) -> dict: - self.shortcuts = cls.get_shortcuts() - kwargs = cls.shortcuts.get(shortcut, None) - return kwargs - - def get_shortcuts(cls) -> dict: - return cls.get_json('shortcuts') @classmethod - def has_shortcut(cls, shortcut:str): - return cls.get_shortcut(shortcut) != None - + def get_shortcut(cls, shortcut:str) -> dict: + return cls.shortcuts().get(shortcut) + @classmethod def rm_shortcut(cls, shortcut) -> str: - shortcuts = cls.get_shortcuts() + shortcuts = cls.shortcuts() if shortcut in shortcuts: cls.shortcuts.pop(shortcut) cls.put_json('shortcuts', cls.shortcuts) @@ -2813,270 +2913,63 @@ def launch(cls, mode:str = 'pm2', tag:str=None, tag_seperator: str = '::', - verbose : bool = True, - device:str = None, + verbose : bool = False, update: bool = False, - **extra_kwargs): + **extra_launch_kwargs): ''' Launch a module as pm2 or ray ''' if update: cls.update() - kwargs = kwargs if kwargs else {} - kwargs.update(extra_kwargs) - args = args if args else [] - if module == None: - module = cls - elif isinstance(module, str): - module = cls.get_module(module) + kwargs = kwargs or {} + args = args or [] + + # if module is not specified, use the current module + module = module or cls + if isinstance(module, str): + module = c.module(module) + # resolve the name if name == None: + # if the module has a module_path function, use that as the name if hasattr(module, 'module_path'): name = module.module_path() else: - name = module.__name__.lower() - - if tag != None: - name = f'{name}{tag_seperator}{tag}' - - - if verbose: - c.print(f'[bold cyan]Launching[/bold cyan] [bold yellow]class:{module.__name__}[/bold yellow] [bold white]name[/bold white]:{name} [bold white]fn[/bold white]:{fn} [bold white]mode[/bold white]:{mode}', color='green') - - if mode == 'local': - return getattr(module, fn)(*args, **kwargs) - - elif mode == 'pm2': - - launch_kwargs = dict( - module=module, - fn = fn, - name=name, - tag=tag, - args = args, - kwargs = kwargs, - refresh=refresh, - device= device, - **extra_kwargs - ) - - - assert fn != None, 'fn must be specified for pm2 launch' - stdout = getattr(cls, f'{mode}_launch')(**launch_kwargs) - - - elif mode == 'ray': - launch_kwargs = dict( - module=module, - name=name, - tag=tag, - args = args, - kwargs = kwargs, - refresh=refresh, - **extra_kwargs - ) - - getattr(cls, f'{mode}_launch')(**launch_kwargs) - else: - raise Exception(f'launch mode {mode} not supported') - - return name - - - @classmethod - def pm2_kill_all(cls, verbose:bool = True): - for module in cls.pm2_list(): - cls.pm2_kill(module, verbose=verbose) - - @classmethod - def pm2_list(cls, search=None, verbose:bool = False) -> List[str]: - output_string = cls.run_command('pm2 status', verbose=False) - module_list = [] - for line in output_string.split('\n'): - if '│ default │ ' in line: - server_name = line.split('│')[2].strip() - # fixes odd issue where there is a space between the name and the front - server_name = server_name.split(' ')[-1] - module_list += [server_name] - + name = module.__name__.lower() + # resolve the tag + if tag != None: + name = f'{name}{tag_seperator}{tag}' + + c.print(f'[bold cyan]Launching[/bold cyan] [bold yellow]class:{module.__name__}[/bold yellow] [bold white]name[/bold white]:{name} [bold white]fn[/bold white]:{fn} [bold white]mode[/bold white]:{mode}', color='green', verbose=verbose) + + launch_kwargs = dict( + module=module, + fn = fn, + name=name, + tag=tag, + args = args, + kwargs = kwargs, + refresh=refresh, + **extra_launch_kwargs + ) - if search: - if isinstance(search, str): - search = [search] - elif isinstance(search, list): - pass - assert all([isinstance(s, str) for s in search]), 'search must be a list of strings' - - search_true = lambda x: any([s in x for s in search]) - module_list = [m for m in module_list if search_true(m)] - - return module_list - - lspm2 = ls_pm2 = pm2ls = pm2_ls = pm2list = pm2_list - # commune.run_command('pm2 status').stdout.split('\n')[5].split(' │')[0].split(' │ ')[-1]commune.run_command('pm2 status').stdout.split('\n')[5].split(' │')[0].split(' │ ')[-1] + assert fn != None, 'fn must be specified for pm2 launch' + return getattr(cls, f'{mode}_launch')(**launch_kwargs) @classmethod - def pm2_exists(cls, name:str): - return name in cls.pm2_list() - - @staticmethod - def pm2_start(path:str , - name:str, - cmd_kwargs:str = None, - refresh: bool = True, - verbose:bool = True, - force : bool = True, - interpreter : str = None, - **kwargs): - if c.pm2_exists(name) and refresh: - c.pm2_kill(name, verbose=verbose) - - cmd = f'pm2 start {path} --name {name}' - if force: - cmd += ' -f' - - if interpreter != None: - cmd += f' --interpreter {interpreter}' - - if cmd_kwargs != None: - cmd += f' -- ' - if isinstance(cmd_kwargs, dict): - for k, v in cmd_kwargs.items(): - cmd += f'--{k} {v}' - elif isinstance(cmd_kwargs, str): - cmd += f'{cmd_kwargs}' - - - c.print(f'[bold cyan]Starting (PM2)[/bold cyan] [bold yellow]{name}[/bold yellow]', color='green') - - return c.cmd(cmd, verbose=verbose,**kwargs) - - @classmethod - - def pm2_launch(cls, - module:str = None, - fn: str = 'serve', - name:Optional[str]=None, - tag : str = None, - args : list = None, - kwargs: dict = None, - device:str=None, - interpreter:str='python3', - no_autorestart: bool = False, - verbose: bool = False , - force:bool = True, - meta_fn: str = 'module_fn', - tag_seperator:str = '::', - refresh:bool=True ): - - if module == None: - module = cls.module_path() - elif hasattr(module, 'module_path'): - module = module.module_path() - - # avoid these references fucking shit up - args = args if args else [] - kwargs = kwargs if kwargs else {} - - # convert args and kwargs to json strings - kwargs = { - 'module': module, - 'fn': fn, - 'args': args, - 'kwargs': kwargs - - } - kwargs_str = json.dumps(kwargs).replace('"', "'") - name = c.resolve_server_name(module=module, name=name, tag=tag, tag_seperator=tag_seperator) - # build command to run pm2 - command = f" pm2 start {c.module_file()} --name {name} --interpreter {interpreter}" - if no_autorestart: - command = command + ' ' + '--no-autorestart' - if force: - command += ' -f ' - command = command + '' - - command = command + f' -- --fn {meta_fn} --kwargs "{kwargs_str}"' - env = {} - if device != None: - if isinstance(device, int): - env['CUDA_VISIBLE_DEVICES']=str(device) - if isinstance(device, list): - env['CUDA_VISIBLE_DEVICES']=','.join(list(map(str, device))) - - if refresh: - cls.pm2_kill(name) - - if verbose: - c.print(f'Launching {module} with command: {command}', color='green') - - stdout = c.cmd(command, env=env, verbose=verbose) - - return stdout - - @classmethod - def register(cls, - module = None, - tag:str = None, - key : str = None, - stake : int = 0, - subnet:str = 'commune', - refresh:bool =False, - fmt : str = 'j', - **kwargs ): - subspace = c.module('subspace')() - - # resolve module name and tag if they are in the server_name - if isinstance(module, str) and '::' in module: - module, tag = module.split('::') - server_name = cls.resolve_server_name(module=module, tag=tag) - # if not subspace.is_unique_name(server_name, netuid=subnet): - # return {'success': False, 'msg': f'Server name {server_name} already exists in subnet {subnet}'} - if not c.key_exists(server_name): - c.add_key(server_name) - if c.server_exists(server_name, network='local') and refresh == False: - c.print(f'Server already Exists ({server_name})') - - else: - module = cls.resolve_module(module) - server_name = module.serve( - server_name=server_name, - wait_for_server=False, - refresh=refresh, - tag=tag, - **kwargs) - subspace.register(name=server_name, subnet=subnet, key=key, stake=stake) - return {'success':True, 'message':f'Server {server_name} registered to {subnet}', 'server_name': server_name } + def register(cls, *args, **kwargs ): + return c.module('subspace')().register(*args, **kwargs) @classmethod def key_stats(cls, *args, **kwargs): return c.module('subspace')().key_stats(*args, **kwargs) - - - r = reg = register - @classmethod - def pm2_kill(cls, name:str, verbose:bool = False, prefix_match:bool = True): - pm2_list = cls.pm2_list() - if name in pm2_list: - rm_list = [name] - else: - if prefix_match: - rm_list = [ p for p in pm2_list if p.startswith(name)] - else: - raise Exception(f'pm2 process {name} not found') - if len(rm_list) == 0: - if verbose: - c.print(f'ERROR: No pm2 processes found for {name}', color='red') - return [] - for n in rm_list: - if verbose: - c.print(f'Killing {n}', color='red') - cls.cmd(f"pm2 delete {n}", verbose=False) - cls.pm2_rm_logs(n) - return rm_list - + + @classmethod + def key2stats(cls, *args, **kwargs): + return c.module('subspace')().key_stats(*args, **kwargs) + @staticmethod def detailed_error(e) -> dict: import traceback @@ -3085,6 +2978,7 @@ def detailed_error(e) -> dict: line_no = tb[-1].lineno line_text = tb[-1].line response = { + 'success': False, 'error': str(e), 'file_name': file_name, 'line_no': line_no, @@ -3092,154 +2986,151 @@ def detailed_error(e) -> dict: } return response + @classmethod + def pm2_kill_many(cls, search=None, verbose:bool = True, timeout=10): + return c.module('pm2').kill_many(search=search, verbose=verbose, timeout=timeout) + + @classmethod + def pm2_kill_all(cls, verbose:bool = True, timeout=10): + return cls.pm2_kill_many(search=None, verbose=verbose, timeout=timeout) + + @classmethod + def pm2_servers(cls, search=None, verbose:bool = False) -> List[str]: + return c.module('pm2').servers(verbose=verbose) + pm2ls = pm2_list = pm2_servers + # commune.run_command('pm2 status').stdout.split('\n')[5].split(' │')[0].split(' │ ')[-1]commune.run_command('pm2 status').stdout.split('\n')[5].split(' │')[0].split(' │ ')[-1] + + @classmethod + def pm2_exists(cls, name:str) -> bool: + return c.module('pm2').exists(name=name) + + @classmethod + def pm2_start(cls, *args, **kwargs): + return c.module('pm2').start(*args, **kwargs) + + @classmethod + def pm2_launch(cls, *args, **kwargs): + return c.module('pm2').launch(*args, **kwargs) + @classmethod def pm2_restart(cls, name:str, verbose:bool = False, prefix_match:bool = True): - pm2_list = cls.pm2_list() - if name in pm2_list: - rm_list = [name] - else: - if prefix_match: - rm_list = [ p for p in pm2_list if p.startswith(name)] - else: - raise Exception(f'pm2 process {name} not found') - - if len(rm_list) == 0: - if verbose: - c.print(f'ERROR: No pm2 processes found for {name}', color='red') - return [] - for n in rm_list: - c.print(f'Restarting {n}', color='cyan') - cls.cmd(f"pm2 restart {n}", verbose=False) - cls.pm2_rm_logs(n) - return rm_list - + return c.module('pm2').restart(name=name, verbose=verbose, prefix_match=prefix_match) @classmethod def pm2_restart_prefix(cls, name:str = None, verbose:bool=False): - pm2_list = cls.pm2_list() - - restarted_modules = [] - - for module in pm2_list: - if module.startswith(name) or name in ['all']: - if verbose: - c.print(f'Restarting {module}', color='cyan') - c.cmd(f"pm2 restart {module}", verbose=verbose) - restarted_modules.append(module) - - return restarted_modules - + return c.module('pm2').restart_prefix(name=name, verbose=verbose) + + @classmethod + def pm2_kill(cls, name:str, verbose:bool = False, prefix_match:bool = True): + return c.module('pm2').kill(name=name, verbose=verbose, prefix_match=prefix_match) @classmethod - def restart(cls, name:str, mode:str='server', verbose:bool = False, prefix_match:bool = True): + def restart(cls, name:str, mode:str='pm2', verbose:bool = False, prefix_match:bool = True): refreshed_modules = getattr(cls, f'{mode}_restart')(name, verbose=verbose, prefix_match=prefix_match) return refreshed_modules def restart_self(self): - c.restart_server(self.server_name) + """ + Helper function to restart the server + """ + return c.restart(self.server_name) + + update_self = restart_self def kill_self(self): - c.kill(self.server_name) + """ + Helper function to kill the server + """ + return c.kill(self.server_name) refresh = reset = restart @classmethod def pm2_status(cls, verbose=True): - stdout = cls.run_command(f"pm2 status") - if verbose: - c.print(stdout,color='green') - return stdout + return c.module('pm2').status(verbose=verbose) - pm2_dir = os.path.expanduser('~/.pm2') @classmethod def pm2_logs_path_map(cls, name=None): - pm2_logs_path_map = {} - for l in c.ls(f'{cls.pm2_dir}/logs/'): - key = '-'.join(l.split('/')[-1].split('-')[:-1]).replace('-',':') - pm2_logs_path_map[key] = pm2_logs_path_map.get(key, []) + [l] - - - for k in pm2_logs_path_map.keys(): - pm2_logs_path_map[k] = {l.split('-')[-1].split('.')[0]: l for l in list(pm2_logs_path_map[k])} - - if name != None: - return pm2_logs_path_map.get(name, {}) - - return pm2_logs_path_map - + return c.module('pm2').logs_path_map(name=name) @classmethod def pm2_rm_logs( cls, name): - pm2_logs_map = cls.pm2_logs_path_map(name) - - for k in pm2_logs_map.keys(): - c.rm(pm2_logs_map[k]) + return c.module('pm2').rm_logs(name=name) @classmethod def pm2_logs(cls, module:str, - tail: int =100, + tail: int =20, verbose: bool=True , - mode: str ='cmd'): - - - if mode == 'local': - text = '' - for m in ['out','error']: - - # I know, this is fucked - path = f'{cls.pm2_dir}/logs/{module.replace("/", "-")}-{m}.log'.replace(':', '-').replace('_', '-') - try: - text += c.get_text(path, tail=tail) - except Exception as e: - c.print(e) - continue - - return text - elif mode == 'cmd': - return cls.run_command(f"pm2 logs {module}", verbose=verbose) - else: - raise NotImplementedError(f'mode {mode} not implemented') + mode: str ='cmd', + **kwargs): + return c.module('pm2').logs(module=module, + tail=tail, + verbose=verbose, + mode=mode, + **kwargs) + + @staticmethod def memory_usage(fmt='gb'): fmt2scale = {'b': 1e0, 'kb': 1e1, 'mb': 1e3, 'gb': 1e6} - import os, psutil + import psutil process = psutil.Process() scale = fmt2scale.get(fmt) return (process.memory_info().rss // 1024) / scale @classmethod - def argparse(cls, verbose: bool = False): - import argparse - parser = argparse.ArgumentParser(description='Gradio API and Functions') - parser.add_argument('-fn', '--fn', dest='function', help='run a function from the module', type=str, default="__init__") - parser.add_argument('-kwargs', '--kwargs', dest='kwargs', help='key word arguments to the function', type=str, default="{}") - parser.add_argument('-args', '--args', dest='args', help='arguments to the function', type=str, default="[]") - args = parser.parse_args() - if verbose: - c.print('Argparse Args: ',args, color='cyan') - args.kwargs = json.loads(args.kwargs.replace("'",'"')) - args.args = json.loads(args.args.replace("'",'"')) + def argparse(cls, verbose: bool = False, version=1): + if version == 1: + parser = argparse.ArgumentParser(description='Argparse for the module') + parser.add_argument('-fn', '--fn', dest='function', help='The function of the key', type=str, default="__init__") + parser.add_argument('-kwargs', '--kwargs', dest='kwargs', help='key word arguments to the function', type=str, default="{}") + parser.add_argument('-p', '-params', '--params', dest='params', help='key word arguments to the function', type=str, default="{}") + parser.add_argument('-i','-input', '--input', dest='input', help='key word arguments to the function', type=str, default="{}") + parser.add_argument('-args', '--args', dest='args', help='arguments to the function', type=str, default="[]") + args = parser.parse_args() + if verbose: + c.print('Argparse Args: ',args, color='cyan') + args.kwargs = json.loads(args.kwargs.replace("'",'"')) + args.params = json.loads(args.params.replace("'",'"')) + args.inputs = json.loads(args.input.replace("'",'"')) + + # if you pass in the params, it will override the kwargs + if len(args.params) > len(args.kwargs): + args.kwargs = args.params + args.args = json.loads(args.args.replace("'",'"')) + elif version == 2: + args = c.parseargs() + return args @classmethod - def run(cls, name:str = None, verbose:bool = False) -> Any: - if name == '__main__' or name == None or name == cls.__name__: - args = cls.argparse() - if args.function == '__init__': - return cls(*args.args, **args.kwargs) + def run(cls, name:str = None, verbose:bool = False, version=1) -> Any: + is_main = name == '__main__' or name == None or name == cls.__name__ + if not is_main: + return {'success':False, 'message':f'Not main module {name}'} + args = cls.argparse(version=version) + + if args.function == '__init__': + return cls(*args.args, **args.kwargs) + else: + fn = getattr(cls, args.function) + fn_type = cls.classify_fn(fn) + + if fn_type == 'self': + module = cls(*args.args, **args.kwargs) else: - return getattr(cls, args.function)(*args.args, **args.kwargs) + module = cls - - - - @classmethod - def api(cls, *args, **kwargs): - from commune.api import API - return API(*args, **kwargs) + return getattr(module, args.function)(*args.args, **args.kwargs) @classmethod def learn(cls, *args, **kwargs): return c.module('model.hf').learn(*args, **kwargs) - + + @classmethod + def commit_hash(cls, libpath:str = None): + if libpath == None: + libpath = c.libpath + return c.cmd('git rev-parse HEAD', cwd=libpath, verbose=False).split('\n')[0].strip() + @classmethod def get_methods(cls, obj:type= None, modes:Union[str, List[str]] = 'all', ) -> List[str]: @@ -3264,99 +3155,15 @@ def get_methods(cls, obj:type= None, modes:Union[str, List[str]] = 'all', ) -> assert mode in default_modes, f'{mode} not in {default_modes}' methods.extend(getattr(cls, f'get_{mode}_methods')(obj)) - - - @classmethod - def get_self_methods(cls, obj=None) -> List[str]: - from commune.utils.function import get_self_methods - return get_self_methods(obj if obj else cls) - - ## RAY LAND - @classmethod - def ray_stop(cls): - cls.run_command('ray stop') - - @classmethod - def ray_import(cls): - import ray - return ray - @classmethod - def ray_start(cls): - ''' - Start the ray cluster - (TODO: currently supports head) - ''' - return cls.run_command('ray start --head') - - @classmethod - def ray_restart(cls, stop:dict={}, start:dict={}): - ''' - - Restart ray cluster - - ''' - command_out_dict = {} - command_out_dict['stop'] = cls.ray_stop(**stop) - command_out_dict['start'] = cls.ray_start(**start) - return command_out_dict - - - default_ray_env = {'address':'auto', - 'namespace': 'default', - 'ignore_reinit_error': False, - 'dashboard_host': '0.0.0.0', - '_system_config': { - "object_spilling_config": json.dumps( - {"type": "filesystem", "params": {"directory_path": "/tmp/spill"}}, - ) - } - - } - - @classmethod - def ray_init(cls,init_kwargs={}): - import ray - - init_kwargs = {**cls.default_ray_env, **init_kwargs} - if cls.ray_initialized(): - # shutdown if namespace is different - if cls.ray_namespace() == cls.default_ray_env['namespace']: - return cls.ray_runtime_context() - else: - ray.shutdown() - - ray_context = ray.init(**init_kwargs) - return ray_context - - @classmethod - def ray_runtime_context(cls): - return ray.get_runtime_context() - - - @classmethod - def ray_stop(cls): - return cls.run_command('ray stop') - @classmethod - def ray_start(cls): - return cls.run_command('ray start --head') - - - @classmethod - def ray_status(cls, *args, **kwargs): - return cls.run_command('ray status', *args, **kwargs) + def transfer_multiple(cls, *args, **kwargs): + return c.module('subspace')().transfer_multiple(*args, **kwargs) + @classmethod - def ray_initialized(cls): - import ray - return ray.is_initialized() + def transfer_stake(cls, *args, **kwargs): + return c.module('subspace')().transfer_stake(*args, **kwargs) - # def resource_usage(self): - # resource_dict = self.config.get('actor', {}).get('resources', None) - # resource_dict = {k.replace('num_', ''):v for k,v in resource_dict.items()} - # resource_dict['memory'] = self.memory_usage(mode='ratio') - # return resource_dict - @classmethod def transfer_fn_code(cls, module1= 'module', @@ -3382,313 +3189,23 @@ def transfer_fn_code(cls, module1= 'module', module2_code += '\n' module2_code += '\n'.join([ ' ' + line for line in fn_code.split('\n')]) module2_code += '\n' - c.print('module2_code', module2_code) c.put_text(filepath, module2_code) return {'success': True, 'module2_code': module2_code, 'module2_fns': module2_fns, 'module1_fn_code_map': module1_fn_code_map} - @classmethod - def ensure_ray_context(cls, ray_config:dict = None): - ray_config = ray_config if ray_config != None else {} - - if cls.ray_initialized(): - ray_context = cls.get_ray_context() - else: - ray_context = cls.ray_init(init_kwargs=ray_config) - - return ray_context - @classmethod - def ray_env(cls): - import ray - if not cls.ray_initialized(): - cls.ray_init() - return ray - + @classmethod def get_server_name(cls, name:str=None, tag:str=None, seperator:str='.'): name = name if name else cls.__name__.lower() if tag != None: name = tag + seperator + name return name - @classmethod - def ray_launch(cls, - module= None, - name:Optional[str]=None, - tag:str=None, - args:List = None, - refresh:bool = False, - kwargs:Dict = None, - serve: bool = False, - **actor_kwargs): - - launch_kwargs = dict(locals()) - launch_kwargs.update(launch_kwargs.pop('actor_kwargs')) - launch_kwargs = deepcopy(launch_kwargs) - ray = cls.ray_env() - """ - deploys process as an actor or as a class given the config (config) - """ - args = args if args != None else [] - kwargs = kwargs if kwargs != None else {} - module_class = None - if isinstance(module, str): - module_class = cls.get_module(module) - elif module == None : - module_class = cls - - else: - module_class = c.module(module) - - assert isinstance(name, str) - - actor_kwargs['name'] = name - actor_kwargs['refresh'] = refresh - - actor = cls.create_actor(module=module_class, args=args, kwargs=kwargs, **actor_kwargs) - if serve: - actor = actor.serve(ray_get=False) - - return actor - - - default_ray_env = {'address':'auto', - 'namespace': 'default', - 'ignore_reinit_error': False, - 'dashboard_host': '0.0.0.0'} - @classmethod - def ray_init(cls,init_kwargs={}): - import ray - init_kwargs = {**cls.default_ray_env, **init_kwargs} - ray_context = {} - if cls.ray_initialized(): - ray_context = cls.ray_runtime_context() - else: - ray_context = ray.init(**init_kwargs) - - return ray_context - - @classmethod - def create_actor(cls, - module : str = None, - name:str = None, - tag:str = None, - kwargs: dict = None, - args:list =None, - cpus:int = 1.0, - gpus:int = 0, - detached:bool=True, - max_concurrency:int=50, - refresh:bool=True, - verbose:bool= True, - virtual:bool = True): - - # self.ray_init() - import ray, torch - module = module if module != None else cls - - cls_kwargs = kwargs if kwargs else {} - cls_args = args if args else [] - name = name if name != None else module.__name__ - resources = {} - resources['num_cpus'] = cpus - resources['num_gpus'] = gpus - - if not torch.cuda.is_available() and 'num_gpus' in resources: - del resources['num_gpus'] - - # configure the option_kwargs - options_kwargs = {'name': name, - 'max_concurrency': max_concurrency, - **resources} - - # detatch the actor from the process when it finishes - if detached: - options_kwargs['lifetime'] = 'detached' - - # setup class init config - # refresh the actor by killing it and starting it (assuming they have the same name) - if refresh: - if cls.actor_exists(name): - cls.kill_actor(actor=name,verbose=verbose) - # assert not Module.actor_exists(name) - - options_kwargs['namespace'] = 'default' - - # create the actor if it doesnt exisst - # if the actor is refreshed, it should not exist lol (TODO: add a check) - - - - actor = cls.get_actor(name, virtual=virtual) - - - return actor - - @staticmethod - def get_actor_id( actor): - assert isinstance(actor, ray.actor.ActorHandle) - return actor.__dict__['_ray_actor_id'].hex() - - - @classmethod - def virtual_actor(cls, actor): - from commune.block.ray.client.ray_client import ClientModule - return ClientModule(actor=actor) - - @classmethod - def kill_actor(cls, actor, verbose=True): - import ray - - if cls.actor_exists(actor): - actor = ray.get_actor(actor) - else: - if verbose: - print(f'{actor} does not exist for it to be removed') - return None - ray.kill(actor) - - return True - ray_kill = kill_actor - - - @classmethod - def actor_exists(cls, actor): - ray = cls.ray_env() - if isinstance(actor, str): - try: - ray.get_actor(actor) - actor_exists = True - except ValueError as e: - actor_exists = False - - return actor_exists - else: - raise NotImplementedError - - @classmethod - def ray_actor(cls ,actor_name:str, virtual:bool=True): - ''' - Gets the ray actor - ''' - ray = cls.ray_env() - actor = ray.get_actor(actor_name) - # actor = Module.add_actor_metadata(actor) - if virtual: - actor = cls.virtual_actor(actor=actor) - return actor - - get_actor = ray_actor - - @classmethod - def ray_runtime_context(cls): - import ray - return ray.get_runtime_context() - - @classmethod - def ray_namespace(cls): - import ray - return ray.get_runtime_context().namespace - - @classmethod - def ray_context(cls): - import ray - import ray - return ray.runtime_context.get_runtime_context() - - @staticmethod - def ray_objects( *args, **kwargs): - import ray - return ray.experimental.state.api.list_objects(*args, **kwargs) - - @classmethod - def ray_actors(cls, state='ALIVE', names_only:bool = True,detail:bool=True, *args, **kwargs): - - ray = cls.ray_env() - from ray.experimental.state.api import list_actors - - kwargs['filters'] = kwargs.get('filters', [("state", "=", state)]) - kwargs['detail'] = detail - - actor_info_list = list_actors(*args, **kwargs) - ray_actors = [] - for i, actor_info in enumerate(actor_info_list): - # resource_map = {'memory': Module.get_memory_info(pid=actor_info['pid'])} - resource_list = actor_info_list[i].pop('resource_mapping', []) - resource_map = {} - for resource in resource_list: - resource_map[resource['name'].lower()] = resource['resource_ids'] - actor_info_list[i]['resources'] = resource_map - if names_only: - ray_actors.append(actor_info_list[i]['name']) - else: - ray_actors.append(actor_info_list[i]) - - return ray_actors - actors = ray_actors - - @classmethod - def actor_resources(cls, actor:str): - resource_map = cls.ray_actor_map()[actor]['required_resources'] - k_map = { - 'GPU': 'gpus', - 'CPU': 'cpus' - } - return {k_map[k]:float(v) for k,v in resource_map.items() } - @classmethod - def ray_actor_map(cls, ): - ray = cls.ray_env() - actor_list = cls.ray_actors(names_only=False, detail=True) - actor_map = {} - for actor in actor_list: - actor_name = actor.pop('name') - actor_map[actor_name] = actor - return actor_map - actor_map = ray_actor_map - - @classmethod - def ray_tasks(cls, running=False, name=None, *args, **kwargs): - ray = cls.ray_env() - filters = [] - if running == True: - filters.append([("scheduling_state", "=", "RUNNING")]) - if isinstance(name, str): - filters.append([("name", "=", name)]) - - if len(filters)>0: - kwargs['filters'] = filters - - ray_tasks = ray.experimental.state.api.list_tasks(*args, **kwargs) - return ray_tasks - - @staticmethod - def ray_nodes( *args, **kwargs): - from ray.experimental.state.api import list_nodes - return list_nodes(*args, **kwargs) - @classmethod - def ray_get(cls,*jobs): - cls.ray_env() - return ray.get(jobs) - @classmethod - def ray_wait(cls, *jobs): - cls.ray_env() - finished_jobs, running_jobs = ray.wait(jobs) - return finished_jobs, running_jobs - - - @classmethod - def ray_put(cls, *items): - ray = cls.ray_env() - import ray - return [ray.put(i) for i in items] + - @staticmethod - def get_ray_context(): - import ray - return ray.runtime_context.get_runtime_context() - @classmethod def fn(cls, module:str, fn:str , args:list = None, kwargs:dict= None): module = c.module(module) - is_self_method = bool(fn in module.self_methods()) + is_self_method = bool(fn in module.self_functions()) + if is_self_method: module = module() fn = getattr(module, fn) @@ -3705,28 +3222,17 @@ def fn(cls, module:str, fn:str , args:list = None, kwargs:dict= None): else: return fn() module_fn = fn - - module_cache = {} @classmethod - def module(cls,module: Any = 'module' , tree=None, **kwargs): + def module(cls,module: Any = 'module' , **kwargs): ''' Wraps a python class as a module ''' - if module is None: - module = cls.module_path() - modules = c.modules() - assert module in modules, f'{module} does not exist' - if module in c.module_cache: - module_class = c.module_cache[module] - else: - module_class = c.get_module(module,**kwargs) - c.module_cache[module] = module_class - + shortcuts = c.shortcuts() + if module in shortcuts: + module = shortcuts[module] + module_class = c.get_module(module,**kwargs) return module_class - - - m = mod = module # UNDER CONSTRUCTION (USE WITH CAUTION) @@ -3803,20 +3309,14 @@ def merge(cls, a = None, b= None, c.print(error_fn_list, 'DEBUG') return a - @classmethod - def nest_asyncio(cls): - import nest_asyncio - try: - nest_asyncio.apply() - except RuntimeError as e: - pass - # JUPYTER NOTEBOOKS @classmethod - def jupyter(cls): + def enable_jupyter(cls): cls.nest_asyncio() - enable_jupyter = jupyter + + jupyter = enable_jupyter + @classmethod def int_to_ip(cls, *args, **kwargs): @@ -3839,7 +3339,7 @@ def pip_list(cls, lib=None): @classmethod - def libs(cls): + def pip_libs(cls): return list(cls.lib2version().values()) @classmethod @@ -3852,26 +3352,17 @@ def ensure_lib(cls, lib:str, verbose:bool=False): required_libs = [] @classmethod - def ensure_libs(cls, *libs, verbose:bool=False): - assert len(libs) > 0, 'No libraries specified' - if len(libs) == 1: - if isinstance(libs[0], list): - libs = libs[0] - elif len(libs) == 0: - raise Exception('No libraries specified') - elif len(libs) > 1: - libs = list(libs) - else: - raise Exception('No libraries specified, WTF WAS THIS') - - - if libs == None: - libs = cls.required_libs - r = [] + def ensure_libs(cls, libs: List[str] = None, verbose:bool=False): + if hasattr(cls, 'libs'): + libs = cls.libs + results = [] for lib in libs: - r.append(cls.ensure_lib(lib, verbose=verbose)) - c.print(r[-1]) - return r + results.append(cls.ensure_lib(lib, verbose=verbose)) + return results + + @classmethod + def install(cls, libs: List[str] = None, verbose:bool=False): + return cls.ensure_libs(libs, verbose=verbose) @classmethod def ensure_env(cls): @@ -3900,16 +3391,9 @@ def pip_install(cls, cmd += ' --upgrade' return cls.cmd(cmd, verbose=verbose) - def install(self, lib:str = None, verbose:bool=True, upgrade=True): - return self.pip_install(lib, verbose=verbose) - - - - @classmethod def pip_exists(cls, lib:str, verbose:str=True): - return bool(lib in cls.libs()) - + return bool(lib in cls.pip_libs()) @classmethod def lib2version(cls, lib:str = None) -> dict: @@ -3924,7 +3408,7 @@ def lib2version(cls, lib:str = None) -> dict: return lib2version @classmethod - def version(cls, lib:str=library_name): + def version(cls, lib:str=libname): lines = [l for l in cls.cmd(f'pip list', verbose=False).split('\n') if l.startswith(lib)] if len(lines)>0: return lines[0].split(' ')[-1].strip() @@ -3933,26 +3417,18 @@ def version(cls, lib:str=library_name): @classmethod def external_ip(cls, *args, **kwargs) -> str: - ip = c.module('network').get_external_ip(*args, **kwargs) - if ip == None or len(ip) == 0: - ip = c.default_ip - return ip - - + return c.module('network').external_ip(*args, **kwargs) @classmethod - def ip(cls, update:bool = False, **kwargs) -> str: - if not update: - ip = c.get('ip', None) - if ip != None: - return ip - - ip = cls.external_ip(**kwargs) + def ip(cls, max_age=10000, update:bool = False, **kwargs) -> str: + ip = c.get('ip', None, max_age=max_age, update=update) if ip == None: - ip = '0.0.0.0' - if update: + ip = cls.external_ip(**kwargs) c.put('ip', ip) return ip + + + @classmethod def queue(cls, size:str=-1, *args, mode='queue', **kwargs): if mode == 'queue': @@ -3968,7 +3444,6 @@ def queue(cls, size:str=-1, *args, mode='queue', **kwargs): else: raise NotImplementedError(f'mode {mode} not implemented') - @classmethod def resolve_ip(cls, ip=None, external:bool=True) -> str: if ip == None: @@ -3993,7 +3468,6 @@ def set_env(cls, key:str, value:str)-> None: ''' Pay attention to this function. It sets the environment variable ''' - import os os.environ[key] = value return value @@ -4002,7 +3476,6 @@ def get_env(cls, key:str)-> None: ''' Pay attention to this function. It sets the environment variable ''' - import os return os.environ[key] env = get_env @@ -4025,45 +3498,51 @@ def cuda_available(cls) -> bool: import torch return torch.cuda.is_available() @classmethod - def gpu_info_map(cls) -> Dict[int, Dict[str, float]]: + def gpu_info_map(cls, device:int = None, fmt='gb') -> Dict[int, Dict[str, float]]: import torch gpu_info = {} for gpu_id in cls.gpus(): + gpu_id = int(gpu_id) mem_info = torch.cuda.mem_get_info(gpu_id) - gpu_info[int(gpu_id)] = { + gpu_info[gpu_id] = { 'name': torch.cuda.get_device_name(gpu_id), 'free': mem_info[0], 'used': (mem_info[1]- mem_info[0]), - 'total': mem_info[1] + 'total': mem_info[1], + 'ratio': mem_info[0]/mem_info[1], } + if fmt != None: + keys = ['free', 'used', 'total'] + for k in keys: + gpu_info[gpu_id][k] = c.format_data_size(gpu_info[gpu_id][k], fmt=fmt) + if device != None: + return gpu_info[device] + return gpu_info @classmethod def gpu_total_map(cls) -> Dict[int, Dict[str, float]]: import torch - return {k:v['total'] for k,v in c.gpu_info_map().items()} - + return {k:v['total'] for k,v in c.gpu_info().items()} - @classmethod - def gpu_total(cls, idx=0, fmt='b') -> Dict[int, Dict[str, float]]: - import torch - return c.format_data_size(c.gpu_total_map()[idx]) - - gpu_map =gpu_info_map @classmethod def total_gpu_memory(cls) -> int: total_gpu_memory = 0 - for gpu_id, gpu_info in cls.gpu_map().items(): + for gpu_id, gpu_info in cls.gpu_info_map().items(): total_gpu_memory += gpu_info['total'] return total_gpu_memory @classmethod def used_gpu_memory(cls) -> int: used_gpu_memory = 0 - for gpu_id, gpu_info in cls.gpu_map().items(): + for gpu_id, gpu_info in cls.gpu_info_map().items(): used_gpu_memory += gpu_info['used'] return used_gpu_memory + + + def forward(self, a=1, b=2): + return a+b @staticmethod def format_data_size(x: Union[int, float], fmt:str='b', prettify:bool=False): @@ -4162,8 +3641,11 @@ def gpu_info(cls, device:int = None) -> Dict[str, Union[int, float]]: ''' if device is None: device = 0 - gpu_map = cls.gpu_map() - return gpu_map[device] + gpu_map = cls.gpu_info_map() + if device in gpu_map: + return gpu_map[device] + else: + return gpu_map # CPU LAND @@ -4239,28 +3721,7 @@ def get_tensor_size(cls, tensor:'torch.Tensor' = None, fmt:str='b') -> float: tensor = torch.rand(1) tensor_size = tensor.nelement() * tensor.element_size() return c.format_data_size(tensor_size, fmt=fmt) - @classmethod - def get_model_device(cls, model, fast_and_lazy:bool = True) -> 'torch.device': - if fast_and_lazy: - return next(model.parameters()).device - else: - unique_devices = set() - for p in model.parameters(): - unique_devices.add(p.device) - return list(unique_devices)[0] - return next(model.parameters()).device - - - @classmethod - def update_loop(cls, period=2, ): - while True: - c.print('Updating...', color='yellow') - modules = c.servers() - c.print(f'Modules (n): {modules}', color='cyan') - c.print(modules, color='purple') - c.update() - c.sleep(period) - + @classmethod def model_shortcuts(cls, **kwargs): return c.module('hf').getc('shortcuts') @@ -4278,136 +3739,41 @@ def rm_model_shortcut(cls, *args, **kwargs): def add_remote(self, *args, **kwargs): return c.module('namespace').add_remote(*args, **kwargs) + - @classmethod - def model_options(cls): - return list(c.model_shortcuts().keys()) - - @classmethod - def shortcut2model(cls, shortcut:str): - return c.model_shortcuts()[shortcut] - @staticmethod - def get_trainable_params(model:'nn.Module')->List[str]: - return c.module('model').get_trainable_params(model) - @classmethod - def model_gpu_memory(cls, model:str, num_shard = 2): - model_size = cls.get_model_size(model) - size_per_shard = model_size/num_shard - free_gpu_memory = cls.free_gpu_memory() - model_gpu_memory = {} - for i in range(num_shard): - for gpu_id in c.copy(list(free_gpu_memory.keys())): - gpu_memory = free_gpu_memory[gpu_id] - if gpu_memory > size_per_shard: - model_gpu_memory[gpu_id] = size_per_shard - free_gpu_memory.pop(gpu_id) - break - return model_gpu_memory + @classmethod - def model_gpus(cls, model, num_shard=2): - return list(cls.model_gpu_memory(model,num_shard).keys()) - - - - + def model_options(cls): + return list(c.model_shortcuts().keys()) - @classmethod - def get_empty_model(cls, model, - verbose: bool = False, - trust_remote_code:bool=True, - init_device:str = 'meta', - **kwargs): - model = c.model_shortcuts().get(model, model) - from transformers import AutoModelForCausalLM, AutoModel, AutoConfig - from accelerate import init_empty_weights - - kwargs['trust_remote_code'] = trust_remote_code - model = c.model_shortcuts().get(model, model) + def shortcut2model(cls, shortcut:str): + return c.model_shortcuts()[shortcut] - if isinstance(model, str): - if verbose: - c.print(f'loading config model from {model}...') + @staticmethod + def get_trainable_params(model:'nn.Module')->List[str]: + return c.module('model').get_trainable_params(model) - config = AutoConfig.from_pretrained(model, **kwargs) - config.init_device=init_device - config_dict = config.to_dict() - with init_empty_weights(): - model = AutoModelForCausalLM.from_config(config, **kwargs) - - - return model - - @classmethod - def init_empty_weights(cls, *args, **kwargs): - from accelerate import init_empty_weights - return init_empty_weights(*args, **kwargs) - - @classmethod - def get_model_size(cls, - model: 'nn.Module', - model_inflation_ratio: float = 1.0, - fmt = 'b', - keys:List[str]=None): - - # get the size of the model by initializing an empty model - model = c.resolve_model(model) - - params = {} - size_in_bytes = 0 - for name, param in model.state_dict().items(): - if keys != None and name not in keys: - continue - - size_in_bytes += cls.get_tensor_size(param) - - return c.format_data_size(size_in_bytes * model_inflation_ratio, fmt=fmt) + def model_gpus(cls, model, num_shard=2): + return list(cls.model_gpu_memory(model,num_shard).keys()) - model_size = get_model_size @classmethod def resolve_model(cls, model): if isinstance(model, str): model = c.get_empty_model(model) return model - - @classmethod - def params_size_map(cls, - model: str, - block_prefix:str = 'layers', - fmt= 'b', - keys:List[str]=None): - - - - # get the size of the model by initializing an empty model - model = c.resolve_model(model) - - params = {} - size_in_bytes = 0 - - for name, param in model.state_dict().items(): - params_size = c.format_data_size(cls.get_tensor_size(param), fmt=fmt) - if name.startswith(block_prefix): - - idx = name.replace(block_prefix+'.','').split('.')[0] - block_name = f'{block_prefix}.{idx}' - if block_name not in params: - params[block_name] = 0 - params[block_name] += params_size - else: - params[name] = params_size - - return params def num_params(self)->int: return self.get_num_params(self) + ### DICT LAND ### + def to_dict(self)-> Dict: return self.__dict__ @@ -4422,6 +3788,13 @@ def to_json(self) -> str: assert self.jsonable(state_dict), 'State dict must be jsonable' return json.dumps(state_dict) + @classmethod + def from_json(cls, json_str:str) -> 'Module': + import json + return cls.from_dict(json.loads(json_str)) + + + ### LOGGER LAND ### @classmethod def resolve_logger(cls, logger = None): if not hasattr(cls,'logger'): @@ -4432,13 +3805,19 @@ def resolve_logger(cls, logger = None): return cls.logger @classmethod - def resolve_console(cls, console = None): - if not hasattr(cls,'console'): - from rich.console import Console - cls.console = Console() - if console is not None: - cls.console = console - return cls.console + def resolve_console(cls, console = None, **kwargs): + if hasattr(cls,'console'): + return cls.console + + + import logging + from rich.logging import RichHandler + from rich.console import Console + logging.basicConfig(level=logging.DEBUG, handlers=[RichHandler()]) + # print the line number + console = Console() + cls.console = console + return console @classmethod def critical(cls, *args, **kwargs): @@ -4461,22 +3840,25 @@ def logmap(cls, *args, **kwargs): @classmethod def print(cls, *text:str, color:str=None, - return_text:bool=False, verbose:bool = True, console: Console = None, + flush:bool = False, **kwargs): - if verbose: - if color == 'random': - color = cls.random_color() - if color: - kwargs['style'] = color - console = cls.resolve_console(console) - try: - return console.print(*text, **kwargs) - except Exception as e: - print(e) - # print(*text, **kwargs) - + + if not verbose: + return + if color == 'random': + color = cls.random_color() + if color: + kwargs['style'] = color + + console = cls.resolve_console(console) + try: + if flush: + console.print(**kwargs, end='\r') + console.print(*text, **kwargs) + except Exception as e: + print(e) @classmethod def success(cls, *args, **kwargs): logger = cls.resolve_logger() @@ -4497,11 +3879,6 @@ def warning(cls, *args, **kwargs): logger = cls.resolve_logger() return logger.warning(*args, **kwargs) - @classmethod - def from_json(cls, json_str:str) -> 'Module': - import json - return cls.from_dict(json.loads(json_str)) - @classmethod @@ -4510,35 +3887,35 @@ def status(cls, *args, **kwargs): return cls.console.status(*args, **kwargs) @classmethod def log(cls, *args, **kwargs): - console = cls.resolve_console() return cls.console.log(*args, **kwargs) - - @classmethod - def test(cls, modules=['server', 'key', 'executor', 'namespace'], verbose:bool=False): - test_results = [] - for module_name in modules: - c.print('#'*300) - c.print(f'[bold cyan]Testing {module_name}[/bold cyan]', color='yellow') - - module = c.module(module_name) - assert hasattr(module, 'test'), f'Module {module_name} does not have a test function' - module_test_results = module.test() - test_results.append(module_test_results) - c.print(f'Test Results: {module_test_results}', color='white') - return test_results - - + @classmethod - def import_bittensor(cls): - try: - import bittensor - except RuntimeError: - cls.new_event_loop() - import bittensor - return bittensor - - - # TIME LAND + def test_fns(cls, *args, **kwargs): + return [f for f in cls.functions(*args, **kwargs) if f.startswith('test_')] + + @classmethod + def test(cls, module=None, timeout=60, trials=3): + module = module or cls.module_path() + if c.module_exists(module + '.test'): + c.print('FOUND TEST MODULE', color='yellow') + module = module + '.test' + cls = c.module(module) + self = cls() + future2fn = {} + for fn in self.test_fns(): + c.print(f'testing {fn}') + f = c.submit(getattr(self, fn), timeout=timeout) + future2fn[f] = fn + fn2result = {} + for f in c.as_completed(future2fn, timeout=timeout): + fn = future2fn[f] + result = f.result() + c.print(f'{fn} result: {result}') + assert result['success'], f'{fn} failed, {result}' + fn2result[fn] = result + return fn2result + + ### TIME LAND ### @classmethod def time( cls, t=None) -> float: @@ -4552,18 +3929,17 @@ def time( cls, t=None) -> float: def datetime(cls): import datetime # UTC - return datetime.datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S") + return datetime.datetime.utcnow().strftime("%Y-%m-%d_%H:%M:%S") @classmethod def time2datetime(cls, t:float): import datetime - return datetime.datetime.fromtimestamp(t).strftime("%Y-%m-%d %H:%M:%S") + return datetime.datetime.fromtimestamp(t).strftime("%Y-%m-%d_%H:%M:%S") time2date = time2datetime @classmethod def datetime2time(cls, x:str): import datetime - c.print(x) return datetime.datetime.strptime(x, "%Y-%m-%d %H:%M:%S").timestamp() date2time = datetime2time @@ -4578,11 +3954,7 @@ def sleep(cls, seconds:float) -> None: import time time.sleep(seconds) return None - - # DICT LAND - - @classmethod def dict_put(cls, *args, **kwargs): dict_put = c.import_object('commune.utils.dict.dict_put') @@ -4610,29 +3982,6 @@ def argv(cls, include_script:bool = False): else: return args[1:] - @classmethod - def parse_args(cls, argv = None): - if argv is None: - argv = cls.argv() - - args = [] - kwargs = {} - parsing_kwargs = False - for arg in argv: - # TODO fix exception with "=" - # if any([arg.startswith(_) for _ in ['"', "'"]]): - # assert parsing_kwargs is False, 'Cannot mix positional and keyword arguments' - # args.append(cls.determine_type(arg)) - if '=' in arg: - parsing_kwargs = True - key, value = arg.split('=', 1) - # use determine_type to convert the value to its actual type - - kwargs[key] = cls.determine_type(value) - else: - assert parsing_kwargs is False, 'Cannot mix positional and keyword arguments' - args.append(cls.determine_type(arg)) - return args, kwargs # BYTES LAND @@ -4646,9 +3995,12 @@ def str2bytes(cls, data: str, mode: str = 'hex') -> bytes: @classmethod def bytes2str(cls, data: bytes, mode: str = 'utf-8') -> str: + if hasattr(data, 'hex'): return data.hex() else: + if isinstance(data, str): + return data return bytes.decode(data, mode) # JSON2BYTES @@ -4673,7 +4025,6 @@ def python2str(cls, input): input_type = type(input) if input_type == str: return input - if input_type in [dict]: input = json.dumps(input) elif input_type in [bytes]: @@ -4695,6 +4046,35 @@ def str2python(cls, input)-> dict: return output_dict + @classmethod + def is_file_module(cls, module = None) -> bool: + if module != None: + cls = c.module(module) + dirpath = cls.dirpath() + filepath = cls.filepath() + return bool(dirpath.split('/')[-1] != filepath.split('/')[-1].split('.')[0]) + + @classmethod + def module2isfolder(cls, module = None) -> bool: + modules = c.modules() + module2isfolder = {} + for m in modules: + try: + module2isfolder[m] = c.is_folder_module(m) + except Exception as e: + c.print(e) + module2isfolder[m] = False + + return module2isfolder + + + + @classmethod + def is_folder_module(cls, module = None) -> bool: + if module != None: + cls = c.module(module) + return not cls.is_file_module() + @staticmethod def jsonable( value): import json @@ -4704,15 +4084,9 @@ def jsonable( value): except: return False - @classmethod def restart_server(cls, module:str, **kwargs) -> None: - if c.server_exists(module): - c.print(f'Server {module} does not exist', color='red') - c.kill_server(module) - address = c.get_address(module, network='local') - port = address.split(':')[-1] if address != None else None - return c.serve(module, port=port, **kwargs) + return c.serve(module, **kwargs) server_restart = restart_server @@ -4726,10 +4100,14 @@ def get_keys(cls,*args, **kwargs ): @classmethod def rm_keys(cls,*args, **kwargs ): return c.module('key').rm_keys(*args, **kwargs ) - + @classmethod def key2address(cls,*args, **kwargs ): return c.module('key').key2address(*args, **kwargs ) + + @classmethod + def key_addresses(cls,*args, **kwargs ): + return c.module('key').addresses(*args, **kwargs ) k2a = key2address @classmethod @@ -4740,18 +4118,64 @@ def is_key(self, key:str) -> bool: def root_key(cls): return c.get_key() + @classmethod + def root_key_address(cls) -> str: + return c.root_key().ss58_address + + @classmethod + def root_keys(cls, search='module', address:bool = False): + keys = c.keys(search) + if address: + key2address = c.key2address(search) + keys = [key2address.get(k) for k in keys] + return keys + + @classmethod + def root_addys(cls): + return c.root_keys(address=True) + + + def transfer2roots(self, amount:int=1,key:str=None, n:int=10): + destinations = c.root_addys()[:n] + c.print(f'Spreading {amount} to {len(destinations)} keys', color='yellow') + return c.transfer_many(destinations=destinations, amounts=amount, n=n, key=key) + + + def add_root_keys(self, n=1, tag=None, **kwargs): + keys = [] + for i in range(n): + key_path = 'module' + '::'+ (tag if tag != None else '') + str(i) + c.add_key(key_path, **kwargs) + keys.append(key_path) + return {'success': True, 'keys': keys, 'msg': 'Added keys'} + + @classmethod + def asubmit(cls, fn:str, *args, **kwargs): + + async def _asubmit(): + kwargs.update(kwargs.pop('kwargs',{})) + return fn(*args, **kwargs) + return _asubmit() + + @classmethod def address2key(cls,*args, **kwargs ): return c.module('key').address2key(*args, **kwargs ) + + @classmethod + def key_addresses(cls,*args, **kwargs ): + return list(c.module('key').address2key(*args, **kwargs ).keys()) + + @classmethod def get_key_for_address(cls, address:str): return c.module('key').get_key_for_address(address) - # @classmethod - # def key_info(cls, key:str = None, **kwargs): - # return c.module('key').key_info(key, **kwargs) - + @classmethod + def get_key_address(cls, key): + return c.get_key(key).ss58_address + @classmethod def get_key(cls,key:str = None ,mode='commune', **kwargs) -> None: @@ -4772,6 +4196,23 @@ def get_key(cls,key:str = None ,mode='commune', **kwargs) -> None: key = module(key, **kwargs) return key + + @classmethod + def id(self): + return self.key.ss58_address + + @property + def ss58_address(self): + if not hasattr(self, '_ss58_address'): + self._ss58_address = self.key.ss58_address + return self._ss58_address + + @ss58_address.setter + def ss58_address(self, value): + self._ss58_address = value + return self._ss58_address + + def idcard(self) -> str: @@ -4785,50 +4226,70 @@ def verify_idcard(self, idcard:str = None) -> bool: idcard = c.str2bytes(idcard) return self.key.verify(idcard) + @classmethod + def hash_map(cls): + return { + 'code': cls.chash(), + 'commit': cls.commit_hash(), + } @classmethod def hash(cls, data: Union[str, bytes], - mode: str = 'sha256', **kwargs) -> bytes: - if not hasattr(cls, 'hash_module'): - cls.hash_module = cls.get_module('crypto.hash')() - return cls.hash_module(data, mode=mode, **kwargs) + if not hasattr(cls, '_hash_module'): + cls._hash_module = c.module('crypto.hash')() + return cls._hash_module(data, **kwargs) - default_password = 'bitconnect' + @classmethod - def resolve_password(cls, password: str) -> str: - if password == None: - password = cls.default_password - - - password = cls.python2str(password) - assert isinstance(password, str), f'Password must be a string , not {type(password)}' - return password + def readme_paths(cls): + readme_paths = [f for f in c.ls(cls.dirpath()) if f.endswith('md')] + return readme_paths + + @classmethod + def has_readme(cls): + return len(cls.readme_paths()) > 0 + + @classmethod + def readme(cls) -> str: + readme_paths = cls.readme_paths() + if len(readme_paths) == 0: + return '' + return c.get_text(readme_paths[0]) @classmethod def encrypt(cls, data: Union[str, bytes], key: str = None, - prefix = encrypted_prefix) -> bytes: - + password: str = None, + **kwargs + ) -> bytes: + """ + encrypt data with key + """ key = c.get_key(key) - data = c.python2str(data) - encrypted_data = key.encrypt(data) - return encrypted_data + return key.encrypt(data, password=password,**kwargs) + + + def test_encrypt(self): + data = 'hello world' + password = 'bitconnect' + encrypted = self.encrypt(data, password=password) + decrypted = self.decrypt(encrypted, password=password) + assert data == decrypted, f'Encryption failed. {data} != {decrypted}' + return {'success': True, 'msg': 'Encryption successful'} @classmethod def decrypt(cls, data: Union[str, bytes], key: str = None, - prefix = encrypted_prefix) -> bytes: - + password : str = None, + **kwargs) -> bytes: key = c.get_key(key) - data = c.python2str(data) - encrypted_data = key.decrypt(data) - return encrypted_data + return key.decrypt(data, password=password, **kwargs) @classmethod def put_cache(cls,k,v ): @@ -4842,64 +4303,17 @@ def get_cache(cls,k, default=None, **kwargs): def auth(self,*args, key=None, **kwargs): key = self.resolve_key(key) return self.module('subspace')().auth(*args, key=key, **kwargs) - - @classmethod - def call(cls, *args , n: int=1, return_future:bool=False, **kwargs) -> None: - if n == 1: - futures = c.async_call(*args,**kwargs) - else: - futures = [ c.async_call(fn, *args,**kwargs) for i in range(n)] - if return_future: - return futures - - return c.gather(futures) - @classmethod - async def async_call(cls, - module : str, - fn : str = 'info', - *args, - timeout : int = 10, - prefix_match:bool = False, - network:str = None, - key:str = None, - ignore_error = False, - kwargs = None, - **extra_kwargs - ) -> None: - - kwargs = kwargs or {} - kwargs.update(extra_kwargs) - try: - module = c.connect(module, prefix_match=prefix_match, network=network, virtual=False, key=key) - future = module.async_forward(fn=fn, kwargs=kwargs, args=args) - result = await asyncio.wait_for(future, timeout=timeout) - except Exception as e: - if ignore_error: - result = c.detailed_error(e) - else: - raise e - - return result - @classmethod - def live_modules(cls, **kwargs): - return cls.call_pool(fn='address', **kwargs) - - @classmethod - def call_pool(cls, *args, **kwargs): - loop = cls.get_event_loop() - return loop.run_until_complete(cls.async_call_pool(*args, **kwargs)) - - cpool = call_pool @classmethod - async def async_call_pool(cls, - modules, - fn = 'info', - *args, - network = 'local', - n=None, - **kwargs): + def call_pool(cls, + modules, + fn = 'info', + *args, + network = 'local', + timeout = 10, + n=None, + **kwargs): args = args or [] kwargs = kwargs or {} @@ -4910,35 +4324,20 @@ async def async_call_pool(cls, n = len(modules) modules = cls.shuffle(modules)[:n] assert isinstance(modules, list), 'modules must be a list' - c.print(f'Calling {fn} on {len(modules)} modules', color='green') - jobs = [] + futures = [] for m in modules: - job = c.call(m, fn, *args, return_future=True, network=network, **kwargs) - jobs.append(job) - - responses = await asyncio.gather(*jobs) - - is_error = lambda r: isinstance(r, dict) and 'error' in r - successes = [r for r in responses if not is_error(r)] - errors = [r for r in responses if is_error(r)] - - if len(successes) == 0: - c.print(f'ERRORS {errors}', color='red') - return dict(zip(modules, successes)) - - - @classmethod - def resolve_fn_module(cls, fn, module=None ) -> str: + job_kwargs = {'module': m, 'fn': fn, 'network': network, **kwargs} + future = c.submit(c.call, kwargs=job_kwargs, args=[*args] , timeout=timeout) + futures.append(future) + responses = c.wait(futures, timeout=timeout) + return responses - if module == None and len(fn.split('.')) > 1: - module = '.'.join(fn.split('.')[:-1]) - module = cls.connect(module) - - return fn, module def resolve_key(self, key: str = None) -> str: if key == None: + if hasattr(self, 'key'): + key = self.key key = self.resolve_keypath(key) key = self.get_key(key) return key @@ -4949,19 +4348,21 @@ def type_str(cls, x): return type(x).__name__ @classmethod - def keys(cls, search = None, *args, **kwargs): + def keys(cls, search = None, ss58=False,*args, **kwargs): if search == None: search = cls.module_path() if search == 'module': search = None - return c.module('key').keys(search, *args, **kwargs) + keys = c.module('key').keys(search, *args, **kwargs) + if ss58: + keys = [c.get_key_address(k) for k in keys] + return keys @classmethod def get_mem(cls, *args, **kwargs): return c.module('key').get_mem(*args, **kwargs) - - + mem = get_mem @classmethod def set_key(self, key:str = None, **kwargs) -> None: @@ -4985,22 +4386,20 @@ def add_key(cls, key, *args, **kwargs): return c.module('key').add_key(key, *args, **kwargs) @classmethod - def loadmems(self, *args, **kwargs): - return c.module('key').loadmems(*args, **kwargs) - - def savemems(self, *args, **kwargs): - return c.module('key').savemems(*args, **kwargs) - + def new_key(cls, *args, **kwargs): + return c.module('key').new_key( *args, **kwargs) + @classmethod def save_keys(cls, *args, **kwargs): - c.print('saving keys') return c.module('key').save_keys(*args, **kwargs) + savemems = savekeys = save_keys @classmethod def load_keys(cls, *args, **kwargs): return c.module('key').load_keys(*args, **kwargs) - + loadmems = loadkeys = load_keys + @classmethod def load_key(cls, *args, **kwargs): return c.module('key').load_key(*args, **kwargs) @@ -5010,24 +4409,10 @@ def sign(self, data:dict = None, key: str = None, **kwargs) -> bool: key = self.resolve_key(key) signature = key.sign(data, **kwargs) return signature - - - def timestamp_to_iso(timestamp): - import datetime - # Convert timestamp to datetime object - dt = datetime.datetime.fromtimestamp(timestamp) - - # Format datetime object as ISO date string - iso_date = dt.date().isoformat() - - return iso_date - - - @classmethod - def verify(cls, auth, module='subspace', **kwargs ) -> bool: - return c.module(module)(**kwargs).verify(auth) - + def verify(cls, auth, key=None, **kwargs ) -> bool: + key = c.get_key(key) + return key.verify(auth, **kwargs) @classmethod def get_signer(cls, data:dict ) -> bool: @@ -5037,7 +4422,6 @@ def get_signer(cls, data:dict ) -> bool: def start(cls, *args, **kwargs): return cls(*args, **kwargs) - @classmethod def is_encrypted(cls, data, prefix=encrypted_prefix): if isinstance(data, str): @@ -5047,18 +4431,12 @@ def is_encrypted(cls, data, prefix=encrypted_prefix): return bool(data.get('encrypted', False) == True) else: return False - - - - @classmethod - def rm_user(cls, user: str = None): - self.users.pop(user, None) - - @classmethod def network(cls) -> str: return c.resolve_network() + + net = network @classmethod @@ -5075,127 +4453,90 @@ def remove_user(self, key: str) -> None: self.users = [] self.users.pop(key, None) - @classmethod - def reserve_port(cls,port:int = None, var_path='reserved_ports' , root=True): - if port == None: - port = cls.free_port() - reserved_ports = cls.get(var_path, {}, root=root) - reserved_ports[str(port)] = {'time': cls.time()} - cls.put(var_path, reserved_ports, root=root) - c.print(f'reserving {port}') - return {'success':f'reserved port {port}', 'reserved': cls.reserved_ports()} - - resport = reserve_port - - @classmethod - def reserved_ports(cls, var_path='reserved_ports'): - return list(map(int, cls.get(var_path, {}, root=True).keys())) - resports = reserved_ports - @classmethod - def unreserve_port(cls,port:int, - var_path='reserved_ports' , - verbose:bool = True, - root:bool = True): - reserved_ports = cls.get(var_path, {}, root=True) - - port_info = reserved_ports.pop(port,None) - if port_info == None: - port_info = reserved_ports.pop(str(port),None) - - output = {} - if port_info != None: - cls.put(var_path, reserved_ports, root=True) - output['msg'] = 'port removed' - else: - output['msg'] = f'port {port} doesnt exist, so your good' + def fleet(cls, + module = None, + n=2, + tag=None, + max_workers=10, + parallel=True, + timeout=20, + remote=False, + **kwargs): + + if module == None: + module = cls.module_path() - output['reserved'] = cls.reserved_ports() - return output - - - - unresport = unreserve_port - - @classmethod - def unreserve_ports(cls,*ports, - var_path='reserved_ports' , - verbose:bool = True, - root:bool = True): - output ={} - reserved_ports = cls.get(var_path, {}, root=root) - if len(ports) == 0: - # if zero then do all fam, tehe - ports = list(reserved_ports.keys()) - elif len(ports) == 1 and isinstance(ports[0],list): - ports = ports[0] - ports = list(map(str, ports)) - reserved_ports = {rp:v for rp,v in reserved_ports.items() if not any([p in ports for p in [str(rp), int(rp)]] )} - cls.put(var_path, reserved_ports, root=root) - return cls.reserved_ports() - - - unresports = unreserve_ports - @classmethod - def fleet(cls,n=2, tag=None, max_workers=1, **kwargs): - executor = c.module('executor')(max_workers=max_workers) - futures = [] if tag == None: tag = '' - if max_workers == 1: - results = [] - for i in range(n): - c.print(f'Launching {tag}') - server_kwargs={'tag':tag + str(i), **kwargs} - result = cls.serve(**server_kwargs) - results = results + [result] - else: - for i in range(n): - future = executor.submit(fn=cls.serve, kwargs=server_kwargs) - futures = futures + [future] - results = c.wait(futures) + futures = [] + for i in range(n): + f = c.submit(c.serve, + kwargs={'module': module, 'tag':tag + str(i), **kwargs}, + timeout=timeout) + futures += [f] + results = [] + for future in c.as_completed(futures, timeout=timeout): + result = future.result() + c.print(result) + results += [result] return results - - @classmethod def kill_fleet(cls, tag=None, network='local', **kwargs): path = cls.resolve_server_name(tag=tag) servers = c.servers(path, network=network) - executor = c.module('executor')() + executor = c.module('executor')(mode='process') for server in servers: futures += [executor.submit(fn=cls.kill_server, kwargs={'server_name':p, 'network':network})] return c.wait(futures) + executor_cache = {} @classmethod - def get_executor(cls, *args, **kwargs): - if not hasattr(cls, 'executor'): - cls.executor = c.module('executor')() - return c.module('executor')(*args, **kwargs) + def executor(cls, max_workers:int=None, mode:str="thread", cache:bool = True, **kwargs): + if cache: + if mode in cls.executor_cache: + return cls.executor_cache[mode] + executor = c.module(f'executor').executor(max_workers=max_workers, mode=mode, **kwargs) + if cache: + cls.executor_cache[mode] = executor + return executor + @classmethod def submit(cls, fn, - args:list = [], - kwargs: dict = {}, + params = None, + kwargs: dict = None, + args:list = None, timeout:int = 20, - return_future:bool=False, + return_future:bool=True, init_args : list = [], init_kwargs:dict= {}, executor = None, - module = None, + module: str = None, + mode:str='thread', + max_workers : int = 100, ): - - + kwargs = {} if kwargs == None else kwargs + args = [] if args == None else args + if params != None: + if isinstance(params, dict): + kwargs = {**kwargs, **params} + elif isinstance(params, list): + args = [*args, *params] + else: + raise ValueError('params must be a list or a dictionary') + fn = c.get_fn(fn) - executor = c.get_executor() if executor == None else executor + executor = c.executor(max_workers=max_workers, mode=mode) if executor == None else executor args = c.copy(args) kwargs = c.copy(kwargs) init_kwargs = c.copy(init_kwargs) @@ -5205,9 +4546,9 @@ def submit(cls, else: module = c.module(module) if isinstance(fn, str): - method_type = c.classify_method(getattr(module, fn)) + method_type = c.classify_fn(getattr(module, fn)) elif callable(fn): - method_type = c.classify_method(fn) + method_type = c.classify_fn(fn) else: raise ValueError('fn must be a string or a callable') @@ -5215,6 +4556,12 @@ def submit(cls, module = module(*init_args, **init_kwargs) future = executor.submit(fn=fn, args=args, kwargs=kwargs, timeout=timeout) + + if not hasattr(cls, 'futures'): + cls.futures = [] + + cls.futures.append(future) + if return_future: return future @@ -5225,54 +4572,20 @@ def submit(cls, def submit_batch(cls, fn:str, batch_kwargs: List[Dict[str, Any]], return_future:bool=False, timeout:int=10, module = None, *args, **kwargs): n = len(batch_kwargs) module = cls if module == None else module - executor = c.get_executor(max_workers=n) + executor = c.executor(max_workers=n) futures = [ executor.submit(fn=getattr(module, fn), kwargs=batch_kwargs[i], timeout=timeout) for i in range(n)] if return_future: return futures return c.wait(futures) - @classmethod - def regfleet(cls,module = None, tag:str=None, n:int=2, timeout=40 , stake=None, multithread:bool=False, **kwargs): - subspace = c.module('subspace')() - if tag == None: - tag = '' - server_names = [] - if stake == None: - stake = subspace.min_stake() - c.print('No stake provided, using min stake, which is {}'.format(stake), color='yellow') - if multithread: - executor = c.module('executor')(max_workers=n) - futures = [] - for i in range(n): - future = executor.submit(fn=cls.register, kwargs={'module':module, 'tag':tag+str(i), 'stake': stake, **kwargs}, timeout=timeout) - futures = futures + [future] - return c.wait(futures, timeout=timeout) - else: - for i in range(n): - r = cls.register(module=module, tag=tag+str(i), stake=stake, **kwargs) - assert r['success'] == True, r - server_names.append(r['server_name']) - return {'servers':server_names} - @classmethod - def servefleet(cls,module = None, tag:str=None, n:int=2, refresh=False, **kwargs): - subspace = c.module('subspace')() - if tag == None: - tag = '' - server_names = [] - for i in range(n): - r = cls.serve(module=module, tag=tag+str(i), refresh=refresh, **kwargs) - server_names.append(r) - return {'servers':server_names} - @classmethod def client(cls, *args, **kwargs) -> 'Client': - return c.module('module.client')(*args, **kwargs) + return c.module('client')(*args, **kwargs) @classmethod def serialize(cls, x, **kwargs): - serializer = c.serializer() - return serializer.serialize(x, **kwargs) + return c.serializer().serialize(x, **kwargs) @classmethod def serializer(cls, *args, **kwargs): @@ -5281,30 +4594,20 @@ def serializer(cls, *args, **kwargs): @classmethod def deserialize(cls, x, **kwargs): return c.serializer().deserialize(x, **kwargs) - - @classmethod - def proto2json(cls, data): - from google.protobuf.json_format import MessageToJson - return MessageToJson(data) - - @classmethod - def process(cls, *args, **kwargs): - return c.module('process').process(*args, **kwargs) - - @classmethod - def json2proto(cls, data): - from google.protobuf.json_format import JsonToMessage - return JsonToMessage(data) + @classmethod + def process(cls, *args, **kwargs): + return c.module('process').process(*args, **kwargs) @classmethod def copy(cls, data: Any) -> Any: import copy return copy.deepcopy(data) - + @classmethod - def launchpad(cls): - return c.import_object('commune.launchpad.Launchpad')() + def mv_key(cls, key:str, new_key:str): + return c.module('key').mv_key(key, new_key) + @classmethod def determine_type(cls, x): if x.lower() == 'null' or x == 'None': @@ -5363,12 +4666,8 @@ def set_port_range(cls, *port_range: list): return port_range - - @classmethod def get_port_range(cls, port_range: list = None) -> list: - - if port_range == None: port_range = c.get('port_range', default=cls.default_port_range) @@ -5392,6 +4691,48 @@ def resolve_port_range(cls, port_range: list = None) -> list: @classmethod def check_module(cls, module:str): return c.connect(module) + + @classmethod + def pwdtree(cls): + tree2path = c.tree2path() + pwd = c.pwd() + return {v:k for k,v in tree2path.items()}.get(pwd, None) + which_tree = pwdtree + + @classmethod + def istree(cls): + return cls.pwdtree() != None + + @classmethod + def is_pwd(cls, module:str = None): + if module != None: + module = c.module(module) + else: + module = cls + return module.dirpath() == c.pwd() + + @classmethod + def server_many(cls, *modules, n=2, **kwargs): + + if isinstance(modules[0], list): + modules = modules[0] + + futures = [] + for module in modules: + future = c.submit(c.serve, kwargs={'module': module, **kwargs}) + futures.append(future) + + results = [] + for future in c.as_completed(futures): + result = future.result() + c.print(result) + results.append(result) + return results + + + @classmethod + def currnet_module(cls): + return c.module(cls.module_path()) @classmethod def is_success(cls, x): @@ -5405,20 +4746,44 @@ def is_success(cls, x): return True @classmethod - def is_error(cls, x:dict): - return not cls.is_success(x) + def is_error(cls, x:Any): + if isinstance(x, dict): + if 'error' in x: + return True + if 'success' in x and x['success'] == True: + return True + return False + - @staticmethod - def is_number(value): - try: + @classmethod + def is_int(cls, value) -> bool: + o = False + try : int(value) - except ValueError: - return False - return True + if '.' not in str(value): + o = True + except: + pass + return o + + + @classmethod + def is_float(cls, value) -> bool: + o = False + try : + float(value) + if '.' in str(value): + o = True + except: + pass + + return o + + @classmethod - def resolve_network(cls, network=None): + def resolve_network(cls, network='local'): network_shortcuts = { 'r': 'remote', @@ -5430,10 +4795,7 @@ def resolve_network(cls, network=None): } network = network_shortcuts.get(network, network) - - if network == None: - network = cls.get_network() - + return network get_network = resolve_network @@ -5465,34 +4827,43 @@ def get_network(self): getnet = get_network resnet = resolve_network + + def update_config(self, k, v): + self.config[k] = v + return self.config + # local update @classmethod def update(cls, - network: str = None, + module = None, + tree:bool = True, + namespace: bool = False, + subspace: bool = False, + network: str = 'local', + **kwargs ): - + responses = [] + if tree: + r = c.tree() + responses.append(r) + if module != None: + return c.module(module).update() # update local namespace c.ip(update=True) - c.namespace(network=network, update=True) - servers = c.servers(network=network) + if namespace: + responses.append(c.namespace(network=network, update=True)) + if subspace: + responses.append(c.module('subspace').sync()) + c.ip(update=1) - return {'success': True, 'servers': servers} + return {'success': True, 'responses': responses} @classmethod def sync(cls, *args, **kwargs): + return c.module('subspace')().sync(*args, **kwargs) - @classmethod - def run_jobs(cls, jobs: List, mode ='asyncio',**kwargs): - if mode == 'asyncio': - loop = asyncio.get_event_loop() - results = loop.run_until_complete(asyncio.gather(*jobs)) - return results - else: - raise ValueError(f"Invalid mode: {mode}") - - @classmethod def filter(cls, text_list: List[str], filter_text: str) -> List[str]: return [text for text in text_list if filter_text in text] @@ -5516,40 +4887,72 @@ def get_file_contents(cls, class_name = None): return file_contents @classmethod - def put_text(cls, path:str, text:str, root=False, key=None) -> None: + def put_text(cls, path:str, text:str, key=None, bits_per_character=8) -> None: # Get the absolute path of the file - path = cls.resolve_path(path, root=root) + path = cls.resolve_path(path) + if not isinstance(text, str): + text = c.python2str(text) if key != None: text = c.get_key(key).encrypt(text) # Write the text to the file with open(path, 'w') as file: file.write(text) - # get size - text_size = len(text)*8 + text_size = len(text)*bits_per_character - return {'success': True, 'msg': f'Wrote text to {path}', 'size': text_size} - + + + def rm_lines(self, path:str, start_line:int, end_line:int) -> None: + # Get the absolute path of the file + text = c.get_text(path) + text = text.split('\n') + text = text[:start_line-1] + text[end_line:] + text = '\n'.join(text) + c.put_text(path, text) + return {'success': True, 'msg': f'Removed lines {start_line} to {end_line} from {path}'} + + def rm_line(self, path:str, line:int, text=None) -> None: + # Get the absolute path of the file + text = c.get_text(path) + text = text.split('\n') + text = text[:line-1] + text[line:] + text = '\n'.join(text) + c.put_text(path, text) + return {'success': True, 'msg': f'Removed line {line} from {path}'} + # Write the text to the file @classmethod - def add_text(cls, path:str, text:str, root=False) -> None: + def add_line(cls, path:str, text:str, line=None) -> None: # Get the absolute path of the file - path = cls.resolve_path(path, root=root) + path = cls.resolve_path(path) + text = str(text) # Write the text to the file + if line != None: + line=int(line) + lines = c.get_text(path).split('\n') + lines = lines[:line] + [text] + lines[line:] + c.print(lines) + + text = '\n'.join(lines) with open(path, 'w') as file: file.write(text) + + + return {'success': True, 'msg': f'Added line to {path}'} + + add_text = add_line + @classmethod def readlines(self, path:str, start_line:int = 0, end_line:int = 0, - root=False, resolve:bool = True) -> List[str]: # Get the absolute path of the file if resolve: - path = self.resolve_path(path, root=root) + path = self.resolve_path(path) # Read the contents of the file with open(path, 'r') as file: lines = file.readlines() @@ -5570,6 +4973,7 @@ def readlines(self, path:str, lines = lines[start_line:end_line] lines = '\n'.join(lines) return lines + @classmethod @@ -5592,7 +4996,6 @@ def get_text(cls, start_byte = file_size - start_byte if end_byte <= 0: end_byte = file_size - end_byte - if end_byte < start_byte: end_byte = start_byte + 100 chunk_size = end_byte - start_byte + 1 @@ -5634,37 +5037,8 @@ def get_text(cls, @classmethod - def free_gpu_memory(cls, - max_gpu_ratio: float = 1.0 , - reserved_gpus: bool = False, - buffer_memory: float = 0, - fmt = 'b') -> Dict[int, float]: - import torch - free_gpu_memory = {} - - buffer_memory = c.resolve_memory(buffer_memory) - - gpu_info_map = cls.gpu_map() - gpus = [int(gpu) for gpu in gpu_info_map.keys()] - - if reserved_gpus != False: - reserved_gpus = reserved_gpus if isinstance(reserved_gpus, dict) else cls.copy(cls.reserved_gpus()) - assert isinstance(reserved_gpus, dict), 'reserved_gpus must be a dict' - - for r_gpu, r_gpu_memory in reserved_gpus.items(): - gpu_info_map[r_gpu]['total'] -= r_gpu_memory - - for gpu_id, gpu_info in gpu_info_map.items(): - if int(gpu_id) in gpus or str(gpu_id) in gpus: - gpu_memory = max(gpu_info['total']*max_gpu_ratio - gpu_info['used'] - buffer_memory, 0) - if gpu_memory <= 0: - continue - free_gpu_memory[gpu_id] = c.format_data_size(gpu_memory, fmt=fmt) - - assert sum(free_gpu_memory.values()) > 0, 'No free memory on any GPU, please reduce the buffer ratio' - - - return cls.copy(free_gpu_memory) + def free_gpu_memory(cls, *args, **kwargs) -> Dict[int, float]: + return c.module('os').free_gpu_memory(*args, **kwargs) free_gpus = free_gpu_memory @@ -5687,88 +5061,135 @@ def repo2module( repo, module = None): c.new_module(module=module, repo=repo) return {'module':module, 'repo':repo, 'status':'success'} - - def new_modules(self, *modules, **kwargs): for module in modules: self.new_module(module=module, **kwargs) + + @classmethod + def find_code_lines(cls, search:str = None , module=None) -> List[str]: + module_code = c.module(module).code() + return c.find_lines(search=search, text=module_code) + + + + @classmethod + def find_lines(self, text:str, search:str) -> List[str]: + """ + Finds the lines in text with search + """ + found_lines = [] + lines = text.split('\n') + for line in lines: + if search in line: + found_lines += [line] + + return found_lines + + + + + # @classmethod + # def code2module(cls, code:str='print x'): + # new_module = + + @classmethod def new_module( cls, - module : str = None, + module : str , repo : str = None, - base : str = 'base', - overwrite : bool = False, - module_type : str ='dir'): + base_module : str = 'demo', + tree : bool = 'commune', + overwrite : bool = True, + **kwargs): + """ Makes directories for path. """ if module == None: assert repo != None, 'repo must be specified if module is not specified' module = os.path.basename(repo).replace('.git','').replace(' ','_').replace('-','_').lower() - module_path = 'path' - module = module.replace('.','/') - if c.has_module(module) and overwrite==False: - return {'success': False, 'msg': f' module {module} already exists, set overwrite=True to overwrite'} - module_path = os.path.join(c.modules_path, module) + tree_path = c.tree2path().get(tree) - if overwrite and c.module_exists(module_path): - c.rm(module_path) - - if repo != None: - # Clone the repository - c.cmd(f'git clone {repo} {module_path}') - # Remove the .git directory - c.cmd(f'rm -rf {module_path}/.git') + class_name = '' + for m in module.split('.'): + class_name += m[0].upper() + m[1:] # capitalize first letter + if c.module_exists(module): + if overwrite: + module_path = c.module(module).dirpath() if c.is_file_module(module) else c.module(module).filepath() + c.rm(module_path) + else: + return {'success': False, + 'path': module_path, + 'msg': f' module {module} already exists, set overwrite=True to overwrite'} + + # get the code ready from the base module + c.print(f'Getting {base_module}') + base_module = c.module(base_module) + is_folder_module = base_module.is_folder_module() + + base_module_class = base_module.class_name() + module_class_name = ''.join([m[0].upper() + m[1:] for m in module.split('.')]) + + # build the path2text dictionary + if is_folder_module: + dirpath = tree_path + '/'+ module.replace('.','/') + '/' + base_dirpath = base_module.dirpath() + path2text = c.path2text( base_module.dirpath()) + path2text = {k.replace(base_dirpath +'/',dirpath ):v for k,v in path2text.items()} + else: + module_path = tree_path + '/'+ module.replace('.','/') + '.py' + code = base_module.code() + path2text = {module_path: code} + og_path2text = c.copy(path2text) + for path, text in og_path2text.items(): + file_type = path.split('.')[-1] + is_module_python_file = (file_type == 'py' and 'class ' + base_module_class in text) - # Create the module name if it doesn't exist, infer it from the repo name - if module == None: - assert repo != None, 'repo must be specified if module is not specified' - module = os.path.basename(repo).replace('.git','').replace(' ','_').replace('-','_').lower() - - # currently we are using the directory name as the module name - if module_type == 'dir': - c.mkdir(module_path, exist_ok=True) - else: - raise ValueError(f'Invalid module_type: {module_type}, options are dir, file') - - - base_module = c.module(base) - base_code = base_module.code() - base_config = base_module.config() - module = module.replace('/','_') # replace / with _ for the class name - - # define the module code and config paths - module_config_path = f'{module_path}/{module}.yaml' - module_code_path =f'{module_path}/{module}.py' - module_code_lines = [] - class_name = module[0].upper() + module[1:] # capitalize first letter - class_name = ''.join([m.capitalize() for m in module.split('_')]) - - for code_ln in base_code.split('\n'): - if all([ k in code_ln for k in ['class','c.Module', ')', '(']]): - indent = code_ln.split('class')[0] - code_ln = f'{indent}class {class_name}(c.Module):' - module_code_lines.append(code_ln) - module_code = '\n'.join(module_code_lines) - c.put_text(module_code_path, module_code) - c.save_yaml(module_config_path, base_config) - - c.update() + if is_folder_module: + if file_type == 'yaml' or is_module_python_file: + path_filename = path.split('/')[-1] + new_filename = module.replace('.', '_') + '.'+ file_type + path = path[:-len(path_filename)] + new_filename - return {'success': True, 'msg': f' created a new repo called {module}'} + + if is_module_python_file: + text = text.replace(base_module_class, module_class_name) + + path2text[path] = text + c.put_text(path, text) + c.print(f'Created {path} :: {module}') + + assert c.module_exists(module), f'Failed to create module {module}' + + return {'success': True, 'msg': f'Created module {module}', 'path': path, 'paths': list(c.path2text(c.module(module).dirpath()).keys())} + + add_module = new_module + make_dir= mkdir @classmethod - def filepath2text(cls, path:str = None): - if path == None: - path = c.root_path - filepath2text = {} - for filepath in c.glob(path): - filepath2text[filepath] = c.get_text(filepath) - return filepath2text + def path2text(cls, path:str, relative=False): + + path = cls.resolve_path(path) + assert os.path.exists(path), f'path {path} does not exist' + if os.path.isdir(path): + filepath_list = c.glob(path + '/**') + else: + assert os.path.exists(path), f'path {path} does not exist' + filepath_list = [path] + path2text = {} + for filepath in filepath_list: + try: + path2text[filepath] = c.get_text(filepath) + except Exception as e: + pass + if relative: + pwd = c.pwd() + path2text = {os.path.relpath(k, pwd):v for k,v in path2text.items()} + return path2text @classmethod @@ -5794,9 +5215,6 @@ def max_gpu_memory(cls, memory:Union[str,int] = None, fmt:str = 'b', decimals:int = 3, **kwargs): - - - memory = cls.resolve_memory(memory) min_memory = min_memory_ratio * memory @@ -5869,28 +5287,34 @@ def resolve_module(cls, module=None): thread_map = {} @classmethod - def get_fn(cls, fn:str, seperator='.'): + def get_fn(cls, fn:str, + seperator='/',ignore_module_pattern:bool = False): + if isinstance(fn, str): - if seperator in fn: - # module:fn + if seperator in fn and (not ignore_module_pattern): + # module{sperator}fn fn_splits = fn.split(seperator) + # incase you have multiple seperators in the name module = seperator.join(fn_splits[:-1]) fn = fn_splits[-1] + # get the model module = c.module(module) else: module = cls - # get the mdoule function - if hasattr(module, fn): - fn = getattr(module, fn) - else: - return None - if not callable(fn): - return fn - + fn = getattr(module, fn) + elif callable(fn): + pass + elif isinstance(fn, property): + pass + else: + raise ValueError(f'fn must be a string or callable, got {type(fn)}') + # assert callable(fn), 'Is not callable' return fn - + + resolve_fn = get_fn + @classmethod def resolve_memory(cls, memory: Union[str, int, float]) -> str: @@ -5919,9 +5343,9 @@ def resolve_memory(cls, memory: Union[str, int, float]) -> str: @classmethod - def reserve_gpus(cls,gpu_memory: Union[Dict, str, int, float], refresh:bool = False, root=True, **kwargs): + def reserve_gpus(cls,gpu_memory: Union[Dict, str, int, float], refresh:bool = False, **kwargs): reserved_gpu_memory = {} if refresh else cls.reserved_gpus() - if type(gpu_memory) in [int, float, str]: + if type(gpu_mmory) in [int, float, str]: gpu_memory = cls.max_gpu_memory(gpu_memory, **kwargs) for gpu, memory in gpu_memory.items(): memory = cls.resolve_memory(memory) @@ -5930,12 +5354,12 @@ def reserve_gpus(cls,gpu_memory: Union[Dict, str, int, float], refresh:bool = Fa reserved_gpu_memory[gpu] += memory else: reserved_gpu_memory[gpu] = memory - cls.put('reserved_gpu_memory', reserved_gpu_memory, root=root) + c.put('reserved_gpu_memory', reserved_gpu_memory) return reserved_gpu_memory @classmethod def reserved_gpus(cls,*args, **kwargs) -> Dict[str, int]: - reserved_gpus = cls.get('reserved_gpu_memory', {}, root=True) + reserved_gpus = c.get('reserved_gpu_memory', {}) reserved_gpus = {k:int(v) for k,v in reserved_gpus.items() if v > 0} reserved_gpus = {int(k):int(v) for k,v in reserved_gpus.items()} return reserved_gpus @@ -5956,7 +5380,7 @@ def unreserve_gpus(cls,gpu_memory: Union[dict] = None,*args, **kwargs): c.print(f'unreserving {gpu_memory}') reserved_gpu_memory = {k:v for k,v in reserved_gpu_memory.items() if v > 0} - cls.put('reserved_gpu_memory', reserved_gpu_memory, root=True) + c.put('reserved_gpu_memory', reserved_gpu_memory) return cls.reserved_gpus() release_gpus = unleash_gpus = unreserve_gpus @@ -5971,25 +5395,6 @@ def link_cmd(cls, old, new): link_cmd[new] = old cls.put('link_cmd', link_cmd) - - # @classmethod - # def remote(cls, name:str = None, remote :str = False,**remote_kwargs): - # def decorator(fn): - # if name is None: - # name = fn.__name__ - # def inner_function(**kwargs): - # remote = kwargs.pop('remote', remote) - # if remote: - # kwargs['remote'] = False - # return cls.launch(fn=fn, kwargs=kwargs, name=name, **remote_kwargs) - # else: - # return fn(**kwargs) - - # # Return the inner function (wrapper) - # return inner_function - - # # Return the decorator function - # return decorator @classmethod @@ -5998,15 +5403,18 @@ def remote_fn(cls, module: str = None, args : list = None, kwargs : dict = None, - locals = None, name : str =None, tag: str = None, refresh : bool =True, - tag_seperator : str = '::',): - - if locals != None: - kwargs = c.locals2kwargs(locals) + mode = 'pm2', + tag_seperator : str = '::', + cwd = None, + **extra_launch_kwargs + ): + kwargs = c.locals2kwargs(kwargs) + if 'remote' in kwargs: + kwargs['remote'] = False if len(fn.split('.'))>1: module = '.'.join(fn.split('.')[:-1]) fn = fn.split('.')[-1] @@ -6014,33 +5422,34 @@ def remote_fn(cls, kwargs = kwargs if kwargs else {} args = args if args else [] - - + if name == None: module_path = cls.resolve_module(module).module_path() name = f"{module_path}{tag_seperator}{fn}" + if tag != None: name = f'{name}{tag_seperator}{tag}' if 'remote' in kwargs: kwargs['remote'] = False - - cls.launch(fn=fn, + + return cls.launch(fn=fn, module = module, kwargs=kwargs, refresh=refresh, - name=name) + name=name, + cwd = cwd or cls.dirpath(), + **extra_launch_kwargs) + + return {'success': True, 'msg': f'Launched {name}', 'timestamp': c.timestamp()} - rfn = remote_fn @classmethod def choice(cls, options:Union[list, dict])->list: - import random options = c.copy(options) # copy to avoid changing the original if len(options) == 0: return None if isinstance(options, dict): options = list(options.values()) - assert isinstance(options, list),'options must be a list' return random.choice(options) @@ -6070,21 +5479,16 @@ def colors(cls): colours = colors @classmethod def random_color(cls): - import random return random.choice(cls.colors()) randcolor = randcolour = colour = color = random_colour = random_color @classmethod def random_float(cls, min=0, max=1): - import random return random.uniform(min, max) @classmethod def random_ratio_selection(cls, x:list, ratio:float = 0.5)->list: - - - import random if type(x) in [float, int]: x = list(range(int(x))) assert len(x)>0 @@ -6095,23 +5499,6 @@ def random_ratio_selection(cls, x:list, ratio:float = 0.5)->list: k = max(int(len(x) * ratio),1) return x[:k] - default_tag = 'base' - @property - def tag(self): - tag = None - if not hasattr(self, 'config') or not isinstance(self.config, dict): - self.config = c.dict2munch({}) - if 'tag' in self.config: - tag = self.config['tag'] - return tag - @tag.setter - def tag(self, value): - if not hasattr(self, 'config') or not isinstance(self.config, dict): - self.config = c.dict2munch({}) - self.config['tag'] = value - return value - - @classmethod def tags(cls): return ['alice', 'bob', 'chris', 'dan', 'fam', 'greg', 'elon', 'huck'] @@ -6119,55 +5506,116 @@ def tags(cls): @classmethod def rand_tag(cls): return cls.choice(cls.tags()) - @staticmethod - def wait(futures:list, timeout:int = 20, verbose:bool = False) -> list: - + + @classmethod + def obj2typestr(cls, obj): + return str(type(obj)).split("'")[1] + + @classmethod + def is_coroutine(cls, future): + """ + returns True if future is a coroutine + """ + return cls.obj2typestr(future) == 'coroutine' + + @classmethod + def as_completed(cls , futures:list, timeout:int=10, **kwargs): + return concurrent.futures.as_completed(futures, timeout=timeout) + @classmethod + def wait(cls, futures:list, timeout:int = 30, generator:bool=False, return_dict:bool = True) -> list: import concurrent.futures - futures = [futures] if not isinstance(futures, list) else futures - future2idx = {future:i for i,future in enumerate(futures)} - results = [] - # wait for the futures as they complete + is_singleton = bool(not isinstance(futures, list)) - results = [] - results = [None]*len(futures) + futures = [futures] if is_singleton else futures + # if type(futures[0]) in [asyncio.Task, asyncio.Future]: + # return c.gather(futures, timeout=timeout) + + if len(futures) == 0: + return [] + if c.is_coroutine(futures[0]): + return c.gather(futures, timeout=timeout) + + future2idx = {future:i for i,future in enumerate(futures)} - try: - for future in concurrent.futures.as_completed(futures, timeout=timeout): - idx = future2idx[future] - results[idx] = future.result() - except Exception as e: - if verbose: - c.print(c.detailed_error(e)) + if timeout == None and hasattr(futures[0], 'timeout'): + timeout = futures[0].timeout - return results + + if generator: + def get_results(futures): + try: + for future in concurrent.futures.as_completed(futures, timeout=timeout): + if return_dict: + idx = future2idx[future] + yield {'idx': idx, 'result': future.result()} + else: + yield future.result() + except Exception as e: + c.print(f'Error: {e}') + yield None + + else: + def get_results(futures): + results = [None]*len(futures) + try: + for future in concurrent.futures.as_completed(futures, timeout=timeout): + idx = future2idx[future] + results[idx] = future.result() + del future2idx[future] + if is_singleton: + results = results[0] + except Exception as e: + unfinished_futures = [future for future in futures if future in future2idx] + c.print(f'Error: {e}, {len(unfinished_futures)} unfinished futures with timeout {timeout} seconds') + return results + + return get_results(futures) + + @staticmethod + def address2ip(address:str) -> str: + return str('.'.join(address.split(':')[:-1])) @staticmethod def as_completed( futures, timeout=10, **kwargs): import concurrent.futures return concurrent.futures.as_completed(futures, timeout=timeout, **kwargs) - @classmethod - def gather(cls,jobs:list, mode='asyncio', loop=None, timeout = 20)-> list: + def gather(cls,jobs:list, timeout:int = 20, loop=None)-> list: + + if loop == None: + loop = c.get_event_loop() + if not isinstance(jobs, list): singleton = True jobs = [jobs] else: singleton = False - assert isinstance(jobs, list) - if mode == 'asyncio': - if loop == None: - loop = c.get_event_loop() - results = loop.run_until_complete(asyncio.wait_for(asyncio.gather(*jobs), timeout=timeout)) - else: - raise NotImplementedError + + assert isinstance(jobs, list) and len(jobs) > 0, f'Invalid jobs: {jobs}' + # determine if we are using asyncio or multiprocessing + + # wait until they finish, and if they dont, give them none + + # return the futures that done timeout or not + async def wait_for(future, timeout): + try: + result = await asyncio.wait_for(future, timeout=timeout) + except asyncio.TimeoutError: + result = {'error': f'TimeoutError: {timeout} seconds'} + + return result + + jobs = [wait_for(job, timeout=timeout) for job in jobs] + future = asyncio.gather(*jobs) + results = loop.run_until_complete(future) if singleton: return results[0] return results - + @classmethod def split_gather(cls,jobs:list, n=3, **kwargs)-> list: if len(jobs) < n: @@ -6177,8 +5625,8 @@ def split_gather(cls,jobs:list, n=3, **kwargs)-> list: results = [] for gather_result in gather_results: results += gather_result - return results + @classmethod def addresses(cls, *args, **kwargs) -> List[str]: return list(c.namespace(*args,**kwargs).values()) @@ -6195,38 +5643,9 @@ def task(cls, fn, timeout=1, mode='asyncio'): assert callable(fn) future = asyncio.wait_for(fn, timeout=timeout) return future - else: - raise NotImplemented - - - @staticmethod - def is_ss58(address): - # Check address length - if len(address) != 47: - return False - - # Check prefix - network_prefixes = ['1', '2', '5', '7'] # Add more prefixes as needed - if address[0] not in network_prefixes: - return False - - # Verify checksum - encoded = '0123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz' - address_without_checksum = address[:-1] - checksum = address[-1] - address_hash = 0 - for char in address_without_checksum: - address_hash = address_hash * 58 + encoded.index(char) - - # Calculate the expected checksum - expected_checksum = encoded[address_hash % 58] - - # Compare the expected checksum with the provided checksum - if expected_checksum != checksum: - return False + else: + raise NotImplemented - return True - @staticmethod def is_mnemonic(s: str) -> bool: import re @@ -6234,7 +5653,6 @@ def is_mnemonic(s: str) -> bool: pattern = r'^(\w+\s){11}\w+(\s\w+){11}$|^(\w+\s){23}\w+$' return bool(re.match(pattern, s)) - @staticmethod def is_private_key(s: str) -> bool: import re @@ -6242,22 +5660,12 @@ def is_private_key(s: str) -> bool: pattern = r'^[0-9a-fA-F]{64}$' return bool(re.match(pattern, s)) - @classmethod def mv(cls, path1, path2): - import shutil path1 = cls.resolve_path(path1) path2 = cls.resolve_path(path2) - assert os.path.exists(path1), path1 - if not os.path.isdir(path2): - path2_dirpath = os.path.dirname(path2) - if not os.path.isdir(path2_dirpath): - os.makedirs(path2_dirpath, exist_ok=True) - shutil.move(path1, path2) - return path2 + return c.module('os').mv(path1, path2) - - @classmethod def cp(cls, path1:str, path2:str, refresh:bool = False): import shutil @@ -6280,8 +5688,12 @@ def cp(cls, path1:str, path2:str, refresh:bool = False): shutil.copy(path1, path2) else: raise ValueError(f'path1 is not a file or a folder: {path1}') - return path2 + return {'success': True, 'msg': f'Copied {path1} to {path2}'} + + # def cp_module(self, module:str, new_module:str = None, refresh:bool = False): + # if refresh:c + # self.rm_module(new_module) @classmethod def get_sample_schema(cls, x:dict) -> dict: @@ -6325,7 +5737,6 @@ def check_miners(cls, *args, module='bittensor', **kwargs): @classmethod def shuffle(cls, x:list)->list: - import random if len(x) == 0: return x random.shuffle(x) @@ -6355,9 +5766,17 @@ def push(cls, msg='update', cwd=None): @classmethod def make_pull(cls): return cls.cmd('make pull') + + def is_fn_self(self, fn): + fn = self.resolve_fn(fn) + c.print(dir(fn)) + return hasattr(fn, '__self__') and fn.__self__ == self @staticmethod - def retry(fn, trials:int = 3, verbose:bool = True): + def retry(fn, trials:int = 3, verbose:bool = True): + # if fn is a self method, then it will be a bound method, and we need to get the function + if hasattr(fn, '__self__'): + fn = fn.__func__ def wrapper(*args, **kwargs): for i in range(trials): try: @@ -6369,135 +5788,8 @@ def wrapper(*args, **kwargs): c.print(f'Retrying {fn.__name__} {i+1}/{trials}', color='red') return wrapper - - - @staticmethod - def encode_topk( forward_response_tensor: 'torch.Tensor' , topk:int=4096) -> 'torch.Tensor': - import torch - """ Returns topk tokens/probabilities given unnormalized logits as input. """ - - #import ipdb; ipdb.set_trace() - - logits = forward_response_tensor # unnormalized logit scores: [batch_size, sequence_len, vocab_size] - probs = torch.softmax(logits, dim=-1).to(torch.float32) # normalized probabilities: [batch_size, sequence_len, vocab_size] - - topk_indices = torch.argsort(probs, dim=-1, descending=True)[...,:topk] - # topk_values, topk_indices = torch.topk(probs, topk) # topk probs and indices: [batch_size, sequence_len, topk] - - topk_values = probs.gather( index=topk_indices, dim=-1) - encoded_probs = torch.cat([topk_values, topk_indices], dim=-1) # [batch_size, sequence_len, topk + topk] - return encoded_probs # [batch_size, sequence_len, topk + topk] - - # @staticmethod - # def private_key_to_mnemonic(private_key): - # # Convert the public key to a hex string - # public_key_hex = substrate.keccak_256(private_key).hex() - - # # Convert the public key hex to a mnemonic - # mnemonic = bip39.mnemonic_from_entropy(public_key_hex) - - # return mnemonic - - @classmethod - def docker_ps(cls, sudo=True): - return cls.cmd('docker ps', sudo=True) - dps = docker_ps - - ''' - SSH LAND - ''' - - @classmethod - def add_ssh_key(cls,public_key:str, authorized_keys_file:str='~/authorized_keys'): - authorized_keys_file = os.path.expanduser(authorized_keys_file) - with open(authorized_keys_file, 'a') as auth_keys_file: - auth_keys_file.write(public_key) - auth_keys_file.write('\n') - - c.print('Added the key fam') - - @classmethod - def ssh_authorized_keys(cls, authorized_keys_file:str='~/.ssh/authorized_keys'): - authorized_keys_file = os.path.expanduser(authorized_keys_file) - return cls.get_text(authorized_keys_file) - - @staticmethod - def get_public_key_from_file(public_key_file='~/.ssh/id_rsa.pub'): - public_key_file = os.path.expanduser(public_key_file) - os.path.exists(public_key_file), f'public key file {public_key_file} does not exist' - with open(public_key_file, 'r') as key_file: - public_key_data = key_file.read().strip() - - # Extract the public key from the data - public_key = None - if public_key_data.startswith("ssh-rsa"): - public_key = public_key_data - - return public_key - - ssh_path = os.path.expanduser('~/.ssh/id_rsa.pub') - - @classmethod - def resolve_ssh_path(cls, ssh_path=None): - if ssh_path is None: - ssh_path = cls.ssh_path - return os.path.expanduser(ssh_path) - @classmethod - def ssh_pubkey(cls,ssh_path=None): - ssh_path = cls.resolve_ssh_path(ssh_path) - return cls.get_text(ssh_path) - @classmethod - def generate_ssh_key_pair(cls, path=None, - passphrase=None): - c.ensure_lib('paramiko') - path = cls.resolve_ssh_path(path) - import paramiko - key = paramiko.RSAKey.generate(bits=2048) - # Save the private key to a file - key.write_private_key_file(path, password=passphrase) - # Save the public key to a file - with open(path, "w") as pub_key_file: - pub_key_file.write(f"{key.get_name()} {key.get_base64()}") - - return cls.ssh_pubkey(path) - - @classmethod - def ssh_key(cls, key_file=os.path.expanduser('~/.ssh/id_rsa'), - passphrase=None): - c.ensure_lib('paramiko') - import paramiko - key = paramiko.RSAKey.generate(bits=2048) - - # Save the private key to a file - key.write_private_key_file(key_file, password=passphrase) - - # Save the public key to a file - ssh_key_path = f"{key_file}.pub" - with open(ssh_key_path, "w") as pub_key_file: - pub_key_file.write(f"{key.get_name()} {key.get_base64()} Generated by Python") - - c.print(f"SSH key pair generated and saved to {ssh_key_path}") - - @classmethod - def miner(cls, - api_key = None, - wallet = 'ensemble.vali', - miner = '~/commune/bittensor/neurons/text/prompting/miners/openai/neuron.py', - port=2012, - network = 'finney', - netuid = 1, - *args, **kwargs): - miner = os.path.expanduser(miner) - api_key = api_key or os.environ.get('OPENAI_API_KEY') - wallet_name, wallet_hotkey = wallet.split('.') - name = f'miner::{wallet}::{network}::{netuid}' - command = f"pm2 start {miner} --name {name} --interpreter python3 -- --wallet.name {wallet_name} --wallet.hotkey {wallet_hotkey} --axon.port {port} --openai.api_key {api_key} --neuron.no_set_weights --subtensor.network {network} --netuid {netuid} --logging.debug" - cls.cmd(command) - c.print({'msg': f"Started miner {name} on port {port}"}) - - @staticmethod def reverse_map(x:dict)->dict: ''' @@ -6555,20 +5847,21 @@ def ss58_decode(*args, **kwargs): @classmethod def fn2str(cls,search = None, code = True, defaults = True, **kwargs): - schema = cls.schema(search=search, code=code, defaults=defaults) + fns = cls.fns(search=search) fn2str = {} - for k,v in schema.items(): - fn2str[k] = c.python2str(v) + for fn in fns: + fn2str[fn] = cls.fn_code(fn) return fn2str @classmethod - def fn2hash(cls, *args, mode='sha256', **kwargs): + def fn2hash(cls, fn=None , mode='sha256', **kwargs): fn2hash = {} - for k,v in cls.fn2str(*args, **kwargs).items(): + for k,v in cls.fn2str(**kwargs).items(): fn2hash[k] = c.hash(v,mode=mode) + if fn: + return fn2hash[fn] return fn2hash - @classmethod def module2fn2str(self, code = True, defaults = False, **kwargs): module2fn2str = { } @@ -6582,11 +5875,7 @@ def module2fn2str(self, code = True, defaults = False, **kwargs): return module2fn2str - @classmethod - def stwrite(self, *args, **kwargs): - import streamlit as st - st.write(*args, **kwargs) - + # TAG CITY def set_tag(self, tag:str,default_tag:str='base'): @@ -6602,10 +5891,7 @@ def resolve_tag(self, tag:str=None, default_tag='base'): tag = default_tag assert tag != None return tag - def resolve_tag_path(self, tag=None): - tag = self.resolve_tag(tag) - return self.resolve_path(tag) - + @classmethod def python2types(cls, d:dict)-> dict: return {k:str(type(v)).split("'")[1] for k,v in d.items()} @@ -6616,7 +5902,6 @@ def echo(x): @staticmethod def get_files_code(directory): - import os code_dict = {} for root, dirs, files in os.walk(directory): @@ -6635,39 +5920,57 @@ def pool(cls , n=5, **kwargs): for i in range(n): cls.serve(tag=str(i), **kwargs) + @classmethod + def self_functions(cls): + return c.classify_fns(cls)['self'] + + self_functions = self_functions @classmethod - def classify_methods(cls, obj= None): - obj = obj or cls + def classify_fns(cls, obj= None, mode=None): method_type_map = {} + obj = obj or c.module(obj) + if isinstance(obj, str): + obj = c.module(obj) for attr_name in dir(obj): method_type = None try: - method_type = cls.classify_method(getattr(obj, attr_name)) + method_type = cls.classify_fn(getattr(obj, attr_name)) except Exception as e: continue if method_type not in method_type_map: method_type_map[method_type] = [] method_type_map[method_type].append(attr_name) - + if mode != None: + method_type_map = method_type_map[mode] return method_type_map - @classmethod def get_function_args(cls, fn): - fn = cls.get_fn(fn) + if not callable(fn): + fn = cls.get_fn(fn) args = inspect.getfullargspec(fn).args return args + + + @classmethod + def has_function_arg(cls, fn, arg:str): + args = cls.get_function_args(fn) + return arg in args fn_args = get_fn_args = get_function_args @classmethod - def classify_method(cls, fn): - fn = cls.get_fn(fn) + def classify_fn(cls, fn): + + if not callable(fn): + fn = cls.get_fn(fn) + if not callable(fn): + return None args = cls.get_function_args(fn) if len(args) == 0: return 'static' @@ -6675,6 +5978,16 @@ def classify_method(cls, fn): return 'self' else: return 'class' + + def fn2type(self): + fn2type = {} + fns = self.fns() + for f in fns: + if callable(getattr(self, f)): + fn2type[f] = self.classify_fn(getattr(self, f)) + return fn2type + + @classmethod def build(cls, *args, **kwargs): @@ -6712,15 +6025,15 @@ def ps(cls, *args, **kwargs): @staticmethod def get_parents(obj) -> List[str]: - cls = resolve_class(obj) + cls = c.resolve_class(obj) return list(cls.__mro__[1:-1]) @staticmethod def get_parent_functions(cls) -> List[str]: - parent_classes = get_parents(cls) + parent_classes = c.get_parents(cls) function_list = [] for parent in parent_classes: - function_list += get_functions(parent) + function_list += c.get_functions(parent) return list(set(function_list)) @@ -6729,9 +6042,14 @@ def is_property(cls, fn: 'Callable') -> bool: ''' is the function a property ''' - fn = cls.get_fn(fn) + try: + fn = cls.get_fn(fn, ignore_module_pattern=True) + except : + return False + return isinstance(fn, property) + @classmethod def property_fns(cls) -> bool: ''' @@ -6741,6 +6059,7 @@ def property_fns(cls) -> bool: @classmethod def get_functions(cls, obj: Any = None, + search = None, include_parents:bool=False, include_hidden:bool = False) -> List[str]: ''' @@ -6768,18 +6087,20 @@ def get_functions(cls, obj: Any = None, dir_list = obj.__dict__.keys() for fn_name in dir_list: + if search != None and search not in fn_name: + continue + fn_obj = getattr(obj, fn_name) + if not callable(fn_obj): continue - # skip hidden functions if include_hidden is False - if (include_hidden==False) and (fn_name.startswith('__') and fn_name.endswith('__')): - + if (not include_hidden) and ((fn_name.startswith('__') or fn_name.endswith('_'))): if fn_name != '__init__': continue - + # if the function is in the parent class, skip it - if (fn_name in parent_functions) and (include_parents==False): + if (fn_name in parent_functions) and (not include_parents): continue # if the function is a property, skip it @@ -6790,43 +6111,40 @@ def get_functions(cls, obj: Any = None, # if the function is callable, include it if callable(getattr(obj, fn_name)): functions.append(fn_name) - - - return functions + functions = list(set(functions)) + + return functions + + @classmethod - def get_class_methods(cls: Union[str, type], obj = None)-> List[str]: + def self_fns(cls: Union[str, type], obj=None): ''' - Gets the class methods in a class + Gets the self methods in a class ''' - if obj is None: - obj = cls - - functions = c.get_functions(cls) - signature_map = {} - for f in functions: - if f.startswith('__'): - continue - signature_map[f] = cls.get_function_args(getattr(cls, f)) - - return [k for k, v in signature_map.items() if 'self' not in v] + obj = obj or cls + functions = c.get_functions(obj) + signature_map = {f:cls.get_function_args(getattr(obj, f)) for f in functions} + return [k for k, v in signature_map.items() if 'self' in v] + + self_functions = get_self_functions = self_fns @classmethod - def get_self_methods(cls: Union[str, type], obj=None): + def class_functions(cls: Union[str, type], obj=None): ''' Gets the self methods in a class ''' obj = obj or cls functions = c.get_functions(obj) signature_map = {f:cls.get_function_args(getattr(obj, f)) for f in functions} - return [k for k, v in signature_map.items() if 'self' in v] + return [k for k, v in signature_map.items() if 'cls' in v] - self_methods = self_fns = get_self_methods + class_methods = get_class_methods = class_fns = class_functions @classmethod - def get_static_methods(cls: Union[str, type], obj=None): + def static_functions(cls: Union[str, type], obj=None): ''' Gets the self methods in a class ''' @@ -6835,32 +6153,48 @@ def get_static_methods(cls: Union[str, type], obj=None): signature_map = {f:cls.get_function_args(getattr(obj, f)) for f in functions} return [k for k, v in signature_map.items() if not ('self' in v or 'cls' in v)] - static_meethods = static_fns = get_static_methods + static_methods = static_fns = static_functions @classmethod - def get_method_type(cls, fn): - return cls.get_function_signature( fn) - - - @classmethod - def get_function_signature(cls, fn) -> dict: + def fn_signature(cls, fn) -> dict: ''' get the signature of a function ''' if isinstance(fn, str): fn = getattr(cls, fn) - - import inspect return dict(inspect.signature(fn)._parameters) + + get_function_signature = fn_signature + @classmethod + def is_arg_key_valid(cls, key='config', fn='__init__'): + fn_signature = cls.fn_signature(fn) + if key in fn_signature: + return True + else: + for param_info in fn_signature.values(): + if param_info.kind._name_ == 'VAR_KEYWORD': + return True + + return False + + @classmethod + def has_var_keyword(cls, fn='__init__', fn_signature=None): + if fn_signature == None: + fn_signature = cls.resolve_fn(fn) + for param_info in fn_signature.values(): + if param_info.kind._name_ == 'VAR_KEYWORD': + return True + return False @staticmethod def get_function_input_variables(fn)-> dict: - return list(c.get_function_signature(fn).keys()) + return list(c.resolve_fn(fn).keys()) @classmethod - def get_function_defaults(cls, fn): - import inspect - + def fn_defaults(cls, fn): + """ + Gets the function defaults + """ fn = cls.get_fn(fn) function_defaults = dict(inspect.signature(fn)._parameters) for k,v in function_defaults.items(): @@ -6870,10 +6204,7 @@ def get_function_defaults(cls, fn): function_defaults[k] = None return function_defaults - - - - + @staticmethod def is_class(obj): ''' @@ -6919,21 +6250,6 @@ def has_fn(cls,fn_name, obj = None): if obj == None: obj = cls return callable(getattr(obj, fn_name, None)) - - @staticmethod - def try_fn_n_times(fn, kwargs:Dict, try_count_limit: int = 10): - ''' - try a function n times - ''' - try_count = 0 - return_output = None - while try_count < try_count_limit: - try: - return_output = fn(**kwargs) - break - except RuntimeError: - try_count += 1 - return return_output @classmethod @@ -6941,19 +6257,10 @@ def jload(cls, json_string): import json return json.loads(json_string.replace("'", '"')) - @classmethod - def bro(cls, x): - return x - - - @classmethod - def giturl(cls): - return c.cmd('git remote -v', verbose=False).split('\n')[0].split('\t')[1].split(' ')[0] - url = giturl - @classmethod def my_modules(cls, *args, **kwargs): return c.module('subspace')().my_modules(*args, **kwargs) + @classmethod def my_stake(cls, *args, **kwargs): return c.module('subspace')().my_stake(*args, **kwargs) @@ -6967,11 +6274,22 @@ def my_stakefrom(cls, *args, **kwargs): return c.module('subspace')().my_stakefrom(*args, **kwargs) @classmethod - def my_tokens(cls, *args, **kwargs): - return c.module('subspace')().my_tokens(*args, **kwargs) + def my_value(cls, *args, **kwargs): + return c.module('subspace')().my_value(*args, **kwargs) + + @classmethod + def get_value(cls, *args, **kwargs): + return c.module('subspace')().get_value(*args, **kwargs) - my_value = my_tokens + @classmethod + def get_stake_to(cls, *args, **kwargs): + return c.module('subspace')().get_stake_to(*args, **kwargs) + + @classmethod + def get_stake_from(cls, *args, **kwargs): + return c.module('subspace')().get_stake_from(*args, **kwargs) + @classmethod def partial(cls, fn, *args, **kwargs): from functools import partial @@ -6981,7 +6299,6 @@ def partial(cls, fn, *args, **kwargs): @staticmethod def sizeof( obj): import sys - type_str = c.type_str(obj) sizeof = 0 if isinstance(obj, dict): for k,v in obj.items(): @@ -6996,12 +6313,21 @@ def sizeof( obj): sizeof += sys.getsizeof(obj) return sizeof - + @classmethod - def code(cls, module = None, *args, **kwargs): + def filesize(cls, filepath:str): + filepath = cls.resolve_path(filepath) + return os.path.getsize(filepath) + @classmethod + def code(cls, module = None, search=None, *args, **kwargs): + if '/' in str(module) or module in cls.fns(): + return c.fn_code(module) + module = cls.resolve_module(module) - path = module.pypath() text = c.get_text( module.pypath(), *args, **kwargs) + if search != None: + find_lines = c.find_lines(text=text, search=search) + return find_lines return text @@ -7009,12 +6335,18 @@ def code(cls, module = None, *args, **kwargs): def get_text_line(cls, module = None, *args, **kwargs): module = cls.resolve_module(module) return c.get_text_line( module.pypath(), *args, **kwargs) + + pycode = code + @classmethod - def codehash(cls, *args, **kwargs): + def chash(cls, *args, **kwargs): + """ + The hash of the code, where the code is the code of the class (cls) + """ code = cls.code(*args, **kwargs) return c.hash(code) - chash = pyhash = codehash + @classmethod def match_module_hash(cls, hash:str, module:str=None, *args, **kwargs): ''' @@ -7025,7 +6357,7 @@ def match_module_hash(cls, hash:str, module:str=None, *args, **kwargs): module = c.module(module) else: module = cls - return module.script_hash(*args, **kwargs) == hash + return module.chash(*args, **kwargs) == hash @classmethod def find_code_line(cls, search:str, code:str = None): @@ -7034,23 +6366,144 @@ def find_code_line(cls, search:str, code:str = None): found_lines = [] # list of found lines for i, line in enumerate(code.split('\n')): if search in line: - found_lines.append({'idx': i, 'text': line}) + found_lines.append({'idx': i+1, 'text': line}) if len(found_lines) == 0: return None elif len(found_lines) == 1: return found_lines[0]['idx'] return found_lines + @classmethod + def fn_info(cls, fn:str='test_fn') -> dict: + r = {} + code = cls.fn_code(fn) + lines = code.split('\n') + mode = 'self' + if '@classmethod' in lines[0]: + mode = 'class' + elif '@staticmethod' in lines[0]: + mode = 'static' + start_line_text = None + lines_before_fn_def = 0 + for l in lines: + + if f'def {fn}('.replace(' ', '') in l.replace(' ', ''): + start_line_text = l + break + else: + lines_before_fn_def += 1 + + assert start_line_text != None, f'Could not find function {fn} in {cls.pypath()}' + module_code = cls.code() + start_line = cls.find_code_line(start_line_text, code=module_code) - lines_before_fn_def - 1 + end_line = start_line + len(lines) # find the endline + has_docs = bool('"""' in code or "'''" in code) + filepath = cls.filepath() + + # start code line + for i, line in enumerate(lines): + + is_end = bool(')' in line and ':' in line) + if is_end: + start_code_line = i + break + + + return { + 'start_line': start_line, + 'end_line': end_line, + 'has_docs': has_docs, + 'code': code, + 'n_lines': len(lines), + 'hash': c.hash(code), + 'path': filepath, + 'start_code_line': start_code_line + start_line , + 'mode': mode + + } - def ensure_self_attr(self, attr, default=None): - if not hasattr(self, attr): - setattr(self, attr, default) + + @classmethod + def set_line(cls, idx:int, text:str): + code = cls.code() + lines = code.split('\n') + if '\n' in text: + front_lines = lines[:idx] + back_lines = lines[idx:] + new_lines = text.split('\n') + c.print(new_lines) + lines = front_lines + new_lines + back_lines + else: + lines[idx-1] = text + new_code = '\n'.join(lines) + cls.put_text(cls.filepath(), new_code) + return {'success': True, 'msg': f'Set line {idx} to {text}'} + + @classmethod + def add_line(cls, idx=0, text:str = '', module=None ): + ''' + ### Documentation + + #### `add_line` Method + + **Description:** + + The `add_line` method is a class method that allows you to insert one or multiple lines of text at a specified index in the code of a file or module. If no module is provided, it defaults to modifying the code of the class itself. + + **Parameters:** + + - `idx` (optional): The index (line number) at which the new text should be inserted. Default is `0`. + - `text` (optional): A string representing the new line(s) of text to be added. If multiple lines are provided, they should be separated by '\n'. Default is an empty string `''`. + - `module` (optional): The module whose code should be modified. If `None`, the class's own code is modified. + + **Returns:** + + A dictionary with two key-value pairs: + - `'success'`: A boolean value indicating the success of the operation. + - `'msg'`: A formatted string message indicating the line number and text that was added. + + **Usage:** + + ```python + result = ClassName.add_line(idx=5, text="New line of code", module='some_module') + print(result) + # Output: {'success': True, 'msg': 'Added line 5 to New line of code'} + ``` + + **Notes:** + + - The method accesses and modifies the code by converting it into a list of lines. + - After inserting the new lines of text, the modified code is joined back into a single string and updated within the file or module. + - The method assumes that the class contains `code`, `put_text`, and `filepath` methods which are responsible for retrieving the current code, updating the text in the file, and providing the file path respectively. + + --- + + Developers should ensure that the index provided is within the bounds of the code line count to avoid any errors. The method does not perform any syntax or error-checking on the new lines of text to be added, so developers should ensure the text is valid code before insertion. + ''' + + code = cls.code() if module == None else c.module(module).code() + lines = code.split('\n') + new_lines = text.split('\n') if '\n' in text else [text] + lines = lines[:idx] + new_lines + lines[idx:] + new_code = '\n'.join(lines) + cls.put_text(cls.filepath(), new_code) + return {'success': True, 'msg': f'Added line {idx} to {text}'} + @classmethod - def ensure_class_attr(cls, attr, default=None): - if not hasattr(cls, attr): - setattr(cls, attr, default) + def get_line(cls, idx): + + + code = cls.code() + lines = code.split('\n') + assert idx < len(lines), f'idx {idx} is out of range for {len(lines)}' + line = lines[max(idx, 0)] + c.print(len(line)) + return line + + + tokenizer_cache = {} @classmethod def tokenizer(cls, tokenizer='gpt2', cache = True, **kwargs): @@ -7072,19 +6525,20 @@ def detokenize(cls, tokens, tokenizer='gpt2', *args, **kwargs): @classmethod def num_tokens(cls, text, **kwargs): return len(cls.tokenize(text, **kwargs)) - + @staticmethod + def num_words( text): + return len(text.split(' ')) + def generate_completions(self, past_tokens = 10, future_tokens = 10, tokenizer:str='gpt2', mode:str='lines', **kwargs): code = self.code() code_lines = code.split('\n') - c.tokenizer() if mode == 'lines': code_lines else: raise ValueError(f'unknown mode {mode}') return - ## SUBSPACE FNS @classmethod def is_registered(cls, *args, **kwargs): @@ -7094,6 +6548,27 @@ def is_registered(cls, *args, **kwargs): def transfer(cls, *args, **kwargs): return c.module('subspace')().transfer(*args, **kwargs) + @classmethod + def is_repo(cls, path='.'): + return c.module('repo')().is_repo(path) + + @classmethod + def staked(cls, *args, **kwargs): + return c.module('subspace')().staked(*args, **kwargs) + + @classmethod + def stake_transfer(cls, *args, **kwargs): + return c.module('subspace')().stake_transfer(*args, **kwargs) + + @classmethod + def add_profit_shares(cls, *args, **kwargs): + return c.module('subspace')().add_profit_shares(*args, **kwargs) + + @classmethod + def profit_shares(cls, *args, **kwargs): + return c.module('subspace')().profit_shares(*args, **kwargs) + + send = transfer @classmethod @@ -7103,24 +6578,77 @@ def block(self, *args, **kwargs): @classmethod def total_supply(self, *args, **kwargs): return c.module('subspace')().total_supply(*args, **kwargs) + + @classmethod def update_module(cls, *args, **kwargs): return c.module('subspace')().update_module(*args, **kwargs) - - def update_servers(self, *args, **kwargs): - subspace = c.module('subspace')() - for name, address in c.namespace(network='localf').items(): - subspace.update_module(name, address) - return subspacec @classmethod - def vote(cls, *args, **kwargs): + def set_weights(cls, *args, **kwargs): return c.module('subspace')().vote(*args, **kwargs) + vote = set_weights + @classmethod + def set_weights(cls, *args, **kwargs): + return c.module('subspace')().vote(*args, **kwargs) + + @classmethod + def vote_loop(cls, *args, **kwargs): + return c.module('vali.parity').vote_loop(*args, **kwargs) + + voteloop = vote_loop + + + + @classmethod + def self_vote(cls, *args, **kwargs): + return c.module('subspace')().self_vote(*args, **kwargs) + @classmethod + def self_vote_pool(cls, *args, **kwargs): + return c.module('subspace')().self_vote_pool(*args, **kwargs) @classmethod def stake(cls, *args, **kwargs): return c.module('subspace')().stake(*args, **kwargs) + + @classmethod + def my_stake_from(cls, *args, **kwargs): + return c.module('subspace')().my_stake_from(*args, **kwargs) + + @classmethod + def my_stake_to(cls, *args, **kwargs): + return c.module('subspace')().my_stake_to(*args, **kwargs) + + + @classmethod + def stake_many(cls, *args, **kwargs): + return c.module('subspace')().stake_many(*args, **kwargs) + @classmethod + def transfer_many(cls, *args, **kwargs): + return c.module('subspace')().transfer_many(*args, **kwargs) + + @classmethod + def random_word(cls, *args, n=1, seperator='_', **kwargs): + random_words = c.module('key').generate_mnemonic(*args, **kwargs).split(' ')[0] + random_words = random_words.split(' ')[:n] + if n == 1: + return random_words[0] + else: + return seperator.join(random_words.split(' ')[:n]) + @classmethod + def random_words(cls, n=2, **kwargs): + return c.module('key').generate_mnemonic(n=n, **kwargs) + @classmethod + def unstake_many(cls, *args, **kwargs): + return c.module('subspace')().unstake_many(*args, **kwargs) + + + unstake_all = unstake_many + @classmethod + def repo_url(cls, *args, **kwargs): + return c.module('git').repo_url(*args, **kwargs) + @classmethod def get_stake(cls, *args, **kwargs): return c.module('subspace')().get_stake(*args, **kwargs) @@ -7165,11 +6693,21 @@ def my_keys(cls, *args, **kwargs): @classmethod def balance(cls, *args, **kwargs): return c.module('subspace')().balance(*args, **kwargs) - get_balance = balance - @classmethod - def my_balances(cls, *args, **kwargs): - return c.module('subspace')().my_balances(*args, **kwargs) + def balances(cls, *args, **kwargs): + return c.module('subspace')().balance(*args, **kwargs) + + + @classmethod + def get_balance(cls, *args, **kwargs): + return c.module('subspace')().get_balance(*args, **kwargs) + @classmethod + def get_balances(cls, *args, **kwargs): + return c.module('subspace')().get_balances(*args, **kwargs) + + @classmethod + def key2balances(cls, *args, **kwargs): + return c.module('subspace')().key2balances(*args, **kwargs) @classmethod def my_keys(cls, *args, **kwargs): @@ -7179,27 +6717,21 @@ def my_keys(cls, *args, **kwargs): def key_info(cls, *args, **kwargs): return c.module('key').key_info(*args, **kwargs) + + @classmethod def key2mem(cls, *args, **kwargs): return c.module('key').key2mem(*args, **kwargs) @classmethod def key_info_map(cls, *args, **kwargs): return c.module('key').key_info_map(*args, **kwargs) - - @property - def key(self): - if not hasattr(self, '_key'): - self._key = c.get_key(self.server_name, create_if_not_exists=True) - return self._key + - @staticmethod - def is_valid_ss58_address(address:str): - return c.module('key').is_valid_ss58_address(address) - @key.setter - def key(self, key): - self._key = c.get_key(key) - return self._key + @staticmethod + def valid_ss58_address(address:str): + return c.module('key').valid_ss58_address(str(address)) + is_valid_ss58_address = valid_ss58_address @classmethod def node_keys(cls, *args, **kwargs): @@ -7214,55 +6746,6 @@ def add_node_key(cls, *args, **kwargs): return c.module('subspace').add_node_key(*args, **kwargs) - @classmethod - def infer_device_map(cls, - model:str, - max_memory: dict = None, - block_prefix : str = 'model.layers', - buffer_memory:float = '1gb', # 10GB buffer (bytes) - quantize:str = None, # - verbose: bool = False, - **kwargs, - ): - if quantize in ['int8']: - quantize_factor = 0.5 - elif quantize in ['int4']: - quantize_factor = 0.25 - elif quantize == None: - quantize_factor = 1 - model = c.resolve_model(model) - param_size_map = c.params_size_map(model, block_prefix=block_prefix, **kwargs) - - free_gpu_memory = c.free_gpu_memory() if max_memory == None else max_memory - buffer_memory = c.resolve_memory(buffer_memory) - device_map = {} - gpu = None - gpu_memory = 0 - unallocated_memory = sum(param_size_map.values()) - allocated_gpu_memory = {} - - gpu = None - - - - for param_key, param_size in param_size_map.items(): - # find the most free gpu if gpu is None or if the gpu has less memory than the buffer memory - - if (gpu == None) or (free_gpu_memory[gpu] < buffer_memory) or (free_gpu_memory[gpu] < param_size): - gpu = c.most_free_gpu( fmt='b', free_gpu_memory=free_gpu_memory) - allocated_gpu_memory[gpu] = 0 - - allocated_gpu_memory[gpu] += param_size - free_gpu_memory[gpu] -= param_size - unallocated_memory -= param_size - device_map[param_key] = gpu - - c.print(allocated_gpu_memory, c.free_gpu_memory()) - assert unallocated_memory == 0, f'unallocated memory {unallocated_memory} != 0' - - return device_map - - @classmethod def snap(cls, *args, **kwargs): return c.module('subspace')().snap(*args, **kwargs) @@ -7278,7 +6761,7 @@ def key2value(self, *args, **kwargs): return c.module('subspace')().key2value( *args, **kwargs) def key2stake(self, *args, **kwargs): - return c.module('subspace')().key2balance( *args, **kwargs) + return c.module('subspace')().key2stake( *args, **kwargs) def live_keys(self, *args, **kwargs): return c.module('subspace')().live_keys( *args, **kwargs) @@ -7287,8 +6770,8 @@ def dead_keys(self, *args, **kwargs): @classmethod - def my_balance(cls, *args, **kwargs): - return c.module('subspace')().my_balance(*args, **kwargs) + def key2balance(cls, *args, **kwargs): + return c.module('subspace')().key2balance(*args, **kwargs) @classmethod def nodes(cls, *args, **kwargs): @@ -7303,13 +6786,15 @@ def cj(cls, *args, **kwargs): return c.module('subspace')().cj(*args, **kwargs) j = cj - @classmethod - def watchdog(cls, *args, **kwargs): - return c.module('subspace')().watchdog(*args, **kwargs) - watch = watchdog + + @classmethod def n(self, *args, **kwargs): return c.module('subspace')().n(*args, **kwargs) + + @classmethod + def query_map(self, *args, **kwargs): + return c.module('subspace')().query_map(*args, **kwargs) @classmethod def upgrade_proto(cls, verbose:bool = True): @@ -7345,10 +6830,33 @@ def unregistered_servers(cls, *args, **kwargs): def subnets(cls, *args, **kwargs): return c.module('subspace')().subnets(*args, **kwargs) + @classmethod + def subnet2netuid(cls, *args, **kwargs): + return c.module('subspace')().subnet2netuid(*args, **kwargs) + + @classmethod + def netuid2subnet(cls, *args, **kwargs): + return c.module('subspace')().netuid2subnet(*args, **kwargs) + + @classmethod def subnet(cls, *args, **kwargs): return c.module('subspace')().subnet(*args, **kwargs) + @classmethod + def netuids(cls, *args, **kwargs): + return c.module('subspace')().netuids(*args, **kwargs) + + def glob_hash(self, path=None, **kwargs): + + glob_dict = c.glob(path, **kwargs) + glob_hash = c.hash(glob_dict) + c.put('glob_hash', glob_hash) + + @classmethod + def my_subnets(cls, *args, **kwargs): + return c.module('subspace')().my_subnets(*args, **kwargs) + @classmethod def networth(cls, *args, **kwargs): return c.module('subspace')().networth(*args, **kwargs) @@ -7362,26 +6870,16 @@ def key2tokens(cls, *args, **kwargs): return c.module('subspace')().key2tokens(*args, **kwargs) @classmethod def key2stake(cls, *args, **kwargs): - return c.module('subspace')().key2tokens(*args, **kwargs) - - @classmethod - def key2stake(cls, *args, **kwargs): - return c.module('subspace')().key2tokens(*args, **kwargs) - - - - - @classmethod - def build_proto(cls, *args, **kwargs): - src_dir = c.root_path + '/module/server/proto' - proto_path = src_dir + '/server.proto' - cmd = f"python3 -m grpc.tools.protoc {proto_path} -I {src_dir} --python_out={src_dir} --grpc_python_out={src_dir}" - c.cmd(cmd, verbose=True) + return c.module('subspace')().key2stake(*args, **kwargs) @classmethod def update_network(cls, *args, **kwargs): return c.module('subspace')().update_network(*args, **kwargs) + @classmethod + def update_global(cls, *args, **kwargs): + return c.module('subspace')().update_global(*args, **kwargs) + @classmethod def market_cap(cls, *args, **kwargs): return c.module('subspace')().market_cap(*args, **kwargs) @@ -7391,22 +6889,57 @@ def n(cls, *args, **kwargs): return c.module('subspace')().n(*args, **kwargs) @classmethod def stats(cls, *args, **kwargs): - t = c.timer() return c.module('subspace')().stats(*args, **kwargs) @classmethod - def vali_stats(cls, *args, **kwargs): - return c.module('vali').vali_stats(*args, **kwargs) - vstats = vali_stats - + def vstats(cls, *args, **kwargs): + return c.module('vali').all_stats(*args, **kwargs) + @classmethod + def valis(cls, network=None): + return c.servers('vali', network=network) + @classmethod def check_valis(cls, *args, **kwargs): - return c.module('vali').check_valis(*args, **kwargs) + return c.module('subspace')().check_valis(*args, **kwargs) @classmethod def check_servers(cls, *args, **kwargs): return c.module('subspace')().check_servers(*args, **kwargs) + @classmethod + def scan(cls, + search=None, + max_futures:int=100, + network='local', + update=False, + schema=True, + namespace=True, + hardware=True, + **kwargs): + + infos = {} + namespace = c.namespace(search=search, network=network, update=update) + futures = [] + name2future = {} + for name, address in namespace.items(): + future = [c.submit(c.call, kwargs={'fn': 'info', 'hardware': hardware, 'schema': schema }, module='subspace')] + name2future[name] = future + futures = list(name2future.values()) + if len(name2future) >= max_futures: + for f in c.as_completed(futures): + name2future.pop(f) + result = f.result() + c.print(result) + if 'error' not in result: + infos[name] = result + + + + + + + + def my_stats(self, *args, **kwargs): return c.module('subspace')().my_stats(*args, **kwargs) @@ -7426,61 +6959,41 @@ def add_shortcut(cls, shortcut, name) -> Dict[str, str]: if name in name2shortcut: del shortcuts[name2shortcut[name]] shortcuts[shortcut] = name - c.putc('shortcuts', shortcuts) + cls.putc('shortcuts', shortcuts) return {'success': True, 'msg': f'added shortcut ({shortcut} -> {name})'} @classmethod def resolve_shortcut(cls, name:str) -> str: return c.getc('shortcuts').get(name, name) - @classmethod - def model_menu(cls): - return c.model_shortcuts() - - @classmethod - def talk(cls , *args, module = 'model', num_jobs=1, timeout=6, **kwargs): - jobs = [] - for i in range(num_jobs): - model = c.connect(module, virtual=False) - c.print('Selecting: ', model) - job = model.async_forward(fn='talk', args=args, kwargs=kwargs) - jobs += [job] - - results = c.gather(jobs, timeout=timeout) - for r in results: - if c.is_success(r): - if isinstance(r, str) and len(r) > 0: - return r - - return 'Im sorry I dont know how to respond to that, can you rephrase that?' - chat = talk - def x(self, y=1): - c.print('fam', y) + @classmethod + def talk(cls, *args, **kwargs): + return c.module('model.openrouter')().talk(*args, **kwargs) @classmethod - def ask(cls, *args, **kwargs): - return c.module('model.hf').talk(*args, **kwargs) + def yesno(self, prompt:str): + return c.module('model.openrouter')().talk(f"{prompt} give a yes or no response ONLY IN ONE WORD", max_tokens=10) + ask = a = talk @classmethod def containers(cls): return c.module('docker').containers() @staticmethod - def chunk(sequence:list = [0,2,3,4,5,6,67,], - chunk_size:int=None, + def chunk(sequence:list = [0,2,3,4,5,6,6,7], + chunk_size:int=4, num_chunks:int= None): assert chunk_size != None or num_chunks != None, 'must specify chunk_size or num_chunks' if chunk_size == None: - chunk_size = len(sequence) // num_chunks - + chunk_size = len(sequence) / num_chunks if chunk_size > len(sequence): return [sequence] if num_chunks == None: - num_chunks = len(sequence) // chunk_size - - + num_chunks = int(len(sequence) / chunk_size) + if num_chunks == 0: + num_chunks = 1 chunks = [[] for i in range(num_chunks)] for i, element in enumerate(sequence): idx = i % num_chunks @@ -7496,16 +7009,16 @@ def chmod_scripts(cls): def install_docker_gpus(self): self.chmod_scripts() - c.cmd('./scripts/nvidia_docker_setup.sh', cwd=self.libpath, verbose=True, bash=True) + c.cmd(f'{c.libpath}/scripts/nvidia_docker_setup.sh', cwd=self.libpath, verbose=True, bash=True) def install_docker(self): self.chmod_scripts() - c.cmd('./scripts/install_docker.sh', cwd=self.libpath, verbose=True, bash=True) + c.cmd(f'{c.libpath}/scripts/install_docker.sh', cwd=self.libpath, verbose=True, bash=True) @classmethod def install_rust(cls, sudo=True) : cls.chmod_scripts() - c.cmd('./scripts/install_rust_env.sh', cwd=cls.libpath, verbose=True, bash=True, sudo=sudo) + c.cmd(f'{c.libpath}/scripts/install_rust_env.sh', cwd=cls.libpath, verbose=True, bash=True, sudo=sudo) @classmethod def install_npm(cls, sudo=False) : @@ -7519,6 +7032,11 @@ def install_pm2(cls, sudo=True) : def install_python(cls, sudo=True) : c.cmd('apt install -y python3-dev python3-pip', verbose=True, bash=True, sudo=sudo) + def cancel(self, futures): + for f in futures: + f.cancel() + return {'success': True, 'msg': 'cancelled futures'} + @classmethod def cachefn(cls, func, max_age=60, update=False, cache=True, cache_folder='cachefn'): import functools @@ -7531,7 +7049,7 @@ def wrapper(*args, **kwargs): if not update: - result = cls.get(fn_name, default=None, **cache_params) + result = cls.get(fn_name, **cache_params) if result != None: return result @@ -7555,33 +7073,38 @@ def ss58_decode(cls, data:Union[str, bytes],**kwargs): from scalecodec.utils.ss58 import ss58_decode return ss58_decode(data, **kwargs) - - @classmethod - def random_tmp_file_path(cls, prefix='randomtempfile_utc'): - return f"/tmp/{prefix}{c.time()}" - @classmethod def name2compose(self, **kwargs): return c.module('docker').name2compose(**kwargs) - - @classmethod - def generator(cls): - for i in range(10): + def generator(cls, n=10): + for i in range(n): yield i @classmethod def run_generator(cls): + """ + """ for i in cls.generator(): c.print(i) + + @classmethod def is_generator(cls, obj): - import inspect - return inspect.isgenerator(obj) + """ + Is this shiz a generator dawg? + """ + if isinstance(obj, str): + if not hasattr(cls, obj): + return False + obj = getattr(cls, obj) + if not callable(obj): + result = inspect.isgenerator(obj) + else: + result = inspect.isgeneratorfunction(obj) + return result - - @classmethod def module2docpath(cls): tree = c.tree() @@ -7606,20 +7129,18 @@ def module2docpath(cls): def hello(cls): c.print('hello') - - - - thread_map = {} @classmethod def thread(cls,fn: Union['callable', str], args:list = None, kwargs:dict = None, daemon:bool = True, + name = None, tag = None, start:bool = True, - tag_seperator:str=':'): - + tag_seperator:str='::', + **extra_kwargs): + if isinstance(fn, str): fn = c.get_fn(fn) if args == None: @@ -7631,40 +7152,45 @@ def thread(cls,fn: Union['callable', str], assert isinstance(args, list), f'args must be a list, got {args}' assert isinstance(kwargs, dict), f'kwargs must be a dict, got {kwargs}' - import threading - t = threading.Thread(target=fn, args=args, kwargs=kwargs) - t.__dict__['time'] = c.time() + if name == None: + name = fn.__name__ + cnt = 0 + while name in cls.thread_map: + cnt += 1 + if tag == None: + tag = '' + name = name + tag_seperator + tag + str(cnt) + + t = threading.Thread(target=fn, args=args, kwargs=kwargs, **extra_kwargs) + + # set the time it starts + setattr(t, 'start_time', c.time()) t.daemon = daemon if start: t.start() - fn_name = fn.__name__ - if tag == None: - tag = '' - else: - tag = str(tag) - name = fn_name + tag_seperator + tag - cnt = 0 - while name in cls.thread_map: - cnt += 1 - name = fn_name + tag_seperator + tag + str(cnt) - cls.thread_map[name] = t - return t @classmethod def join_threads(cls, threads:[str, list]): - threads = self.thread_map + threads = cls.thread_map for t in threads.values(): # throw error if thread is not in thread_map t.join() + return {'success': True, 'msg': 'all threads joined', 'threads': threads} @classmethod - def threads(cls, *args, **kwargs): - return list(cls.thread_map(*args, **kwargs).keys()) + def threads(cls, search:str=None, **kwargs): + threads = list(cls.thread_map.keys()) + if search != None: + threads = [t for t in threads if search in t] + return threads @classmethod + def thread_count(cls): + return threading.active_count() + @classmethod def resolve_key_address(cls, key): key2address = c.key2address() if key in key2address: @@ -7682,11 +7208,12 @@ def add_user(cls, address, role='user', **kwargs): @classmethod def users(cls, *args, **kwargs): return c.module('user').user(*args, **kwargs) + @classmethod - def is_user(cls, address): - return c.module('user').is_user(address) + def role2users(cls, *args, **kwargs): + return c.module('user')().role2users(*args, **kwargs) @classmethod - def is_user(self, address): + def is_user(cls, address): return c.module('user').is_user(address) @classmethod def get_user(cls, address): @@ -7707,8 +7234,8 @@ def user_exists(cls, address): def is_root_key(cls, address:str)-> str: return address == c.root_key().ss58_address @classmethod - def is_admin(cls, *args, **kwargs): - return c.module('user').is_admin(*args, **kwargs) + def is_admin(cls, address:str): + return c.module('user').is_admin(address=address) @classmethod def admins(cls): return c.module('user').admins() @@ -7737,37 +7264,12 @@ def restart_replicas(cls, network:str=None, **kwargs): for m in cls.replicas(network=network, **kwargs): c.print(m) c.restart(m) + + @classmethod - def kill_replicas(self, network:str=None, **kwargs): - for m in cls.replicas(network=network, **kwargs): - c.kill(m) - - @property - def access_module(self): - - ''' - Get the auth modules (modules that process the message to authrize the right people) - ''' - if not hasattr(self, '_access_module'): - # each module has a verify function, that takes in the input and returns the input - access_config = self.config.get('access_module', c.config()['access_module']) - - # sets self._access_module - self.set_access_module(**access_config) - return self._access_module - - default_access_module='access' - def set_access_module(self, refresh=False, **access_config): - if hasattr(self, '_access_module'): - if not refresh: - return self._access_module - # each module has a verify function, that takes in the input and returns the input - access_config = {**self._access_module.config, **access_config} - # get the access module if specified - access_module_name = access_config.pop('module_name', self.default_access_module) - self._access_module = c.module(access_module_name)(module=self, **access_config) - return self._access_module + def getcwd(cls): + return os.getcwd() @classmethod def gc(cls): @@ -7782,101 +7284,21 @@ def __str__(self) -> str: @classmethod def emoji(cls, name:str): - emojis = [] - for k,v in c.emojis.items(): - if name in k: - emojis += [v] - - return c.choice(emojis) - - emojis = {'dank': '🔥', - 'error': '💥', - 'white': '🕊️', - 'cool': '😎', - 'success': '✨', - 'sad': '😢', - 'time': '🕒', - 'count': '🔢', - 'output': '📤', - 'input': '📥', - 'party': '🥳', - 'fireworks': '🎆', - 'explosion': '💣', - 'alien': '👽', - 'rocket': '🚀', - 'money': '💰', - 'victory': '✌️', - 'unicorn': '🦄', - 'rainbow': '🌈', - 'music': '🎵', - 'pizza': '🍕', - 'taco': '🌮', - 'sunglasses': '😎', - 'flame': '🔥', - 'diamond': '💎', - 'savage': '😈', - 'laughing': '😂', - 'ninja': '🥷', - 'skull': '💀', - 'thumbs_up': '👍', - 'thumbs_down': '👎', - 'crown': '👑', - 'cyber_eye': '👁️‍🗨️', - 'data_stream': '🌐', - 'brain': '🧠', - 'robot': '🤖', - 'lightning': '⚡', - 'heart': '❤️', - 'heartbreak': '💔', - 'heartpulse': '💗', - 'green_heart': '💚', - 'blue_heart': '💙', - 'purple_heart': '💜', - 'yellow_heart': '💛', - 'orange_heart': '🧡', - 'error': '💥', - 'cross': '❌', - 'check': '✅', - 'checkmark': '✅', - 'check_mark': '✅', - 'checkered_flag': '🏁', - 'warning': '⚠️', - 'warning_sign': f'⚠️', - 'question': '❓', - 'happy': '😀', - 'sad': '😢', - 'angry': '😠', - 'angry_face': '😠', - 'angry_face_with_horns': '👿', - 'devil': '😈', - 'red_circle': '🔴', - 'green_circle': '🟢', - 'blue_circle': '🔵', - 'yellow_circle': '🟡', - 'orange_circle': '🟠', - 'purple_circle': '🟣', - 'black_circle': '⚫', - 'white_circle': '⚪', - 'brown_circle': '🟤', - 'red_square': '🟥', - 'green_square': '🟩', - 'blue_square': '🟦', - 'yellow_square': '🟨', - 'orange_square': '🟧', - 'purple_square': '🟪', - 'black_square': '⬛', - 'white_square': '⬜', - 'brown_square': '🟫', + return c.module('emoji').emoji(name) + + @classmethod + def emojis(cls, search = None): + + emojis = c.module('emoji').emojis + if search != None: + emojis = {k:v for k,v in emojis.items() if search in k} + return - - } - - - @staticmethod def tqdm(*args, **kwargs): from tqdm import tqdm return tqdm(*args, **kwargs) + progress = tqdm # PEER LAND @classmethod @@ -7903,35 +7325,375 @@ def random_peers(cls, network:str='local', n=2, tag=None): return c.shuffle(peers)[:n] + + @classmethod + def type(cls,x ): + return type(x).__name_ + + ## API MANAGEMENT ## + + def set_api_key(self, api_key:str, cache:bool = True): + api_key = os.getenv(str(api_key), None) + if api_key == None: + api_key = self.get_api_key() + + + self.api_key = api_key + if cache: + self.add_api_key(api_key) + + assert isinstance(api_key, str) + + + @classmethod + def add_api_key(cls, api_key:str): + assert isinstance(api_key, str) + api_keys = cls.get('api_keys', []) + api_keys.append(api_key) + api_keys = list(set(api_keys)) + cls.put('api_keys', api_keys) + return {'api_keys': api_keys} + + + @classmethod + def add_api_keys(cls, *api_keys:str): + if len(api_keys) == 1 and isinstance(api_keys[0], list): + api_keys = api_keys[0] + api_keys = list(set(api_keys + cls.get('api_keys', []))) + cls.put('api_keys', api_keys) + return {'api_keys': api_keys} + + @classmethod + def set_api_keys(cls, api_keys:str): + api_keys = list(set(api_keys)) + cls.put('api_keys', api_keys) + return {'api_keys': api_keys} + + @classmethod + def rm_api_key(cls, api_key:str): + assert isinstance(api_key, str) + api_keys = c.get(cls.resolve_path('api_keys'), []) + for i in range(len(api_keys)): + if api_key == api_keys[i]: + api_keys.pop(i) + break + path = cls.resolve_path('api_keys') + c.put(path, api_keys) + return {'api_keys': api_keys} + + @classmethod + def get_api_key(cls, module=None): + if module != None: + cls = c.module(module) + api_keys = cls.api_keys() + if len(api_keys) == 0: + return None + else: + return c.choice(api_keys) + + @classmethod + def api_keys(cls): + return c.get(cls.resolve_path('api_keys'), []) + + + @classmethod + def rm_api_keys(self): + c.put(self.resolve_path('api_keys'), []) + return {'api_keys': []} + + @classmethod + def send_api_keys(cls, module:str, network='local'): + api_keys = cls.api_keys() + assert len(api_keys) > 0, 'no api keys to send' + module = c.connect(module, network=network) + return module.add_api_keys(api_keys) + + @classmethod + def loop(cls, interval=30, network=None, remote:bool=True, local:bool=True, save:bool=True): + while True: + current_time = c.timestamp() + elapsed = current_time - start_time + if elapsed > interval: + c.print('SYNCING AND UPDATING THE SERVERS_INFO') + # subspace.sync(network=network, remote=remote, local=local, save=save) + start_time = current_time + c.sleep(interval) + + + @staticmethod + def get_pid(): + return os.getpid() + + @classmethod + def process_kwargs(cls, kwargs:dict, fn_schema:dict): + + for k,v in kwargs.items(): + if v == 'None': + v = None + + if isinstance(v, str): + if v.startswith('[') and v.endswith(']'): + if len(v) > 2: + v = eval(v) + else: + v = [] + + elif v.startswith('{') and v.endswith('}'): + + if len(v) > 2: + v = c.jload(v) + else: + v = {} + elif k in fn_schema['input'] and fn_schema['input'][k] == 'str': + if v.startswith("f'") or v.startswith('f"'): + v = c.ljson(v) + else: + v = v + + elif fn_schema['input'][k] == 'float': + v = float(v) + + elif fn_schema['input'][k] == 'int': + v = int(v) + + elif k == 'kwargs': + continue + elif v == 'NA': + assert k != 'NA', f'Key {k} not in default' + elif v in ['True', 'False']: + v = eval(v) + elif c.is_int(v): + v = eval(v) + else: + v = v + + kwargs[k] = v + + return kwargs + + @classmethod + def memory_info(cls, fmt:str='gb'): + return c.module('os').memory_info(fmt=fmt) + + @classmethod + def remove_number_from_word(cls, word:str) -> str: + while word[-1].isdigit(): + word = word[:-1] + return word + + @classmethod + def users(cls): + users = c.get(cls.resolve_path('users'), {}) + root_key_address = c.root_key().ss58_address + if root_key_address not in users: + cls.add_admin(root_key_address) + return cls.get('users', {}) + + + @classmethod + def lag(cls, *args, **kwargs): + return c.module('subspace').lag(*args, **kwargs) + + @classmethod + def loops(cls, **kwargs): + return c.pm2ls('loop', **kwargs) + + + def loop_fleet(self, n=2, **kwargs): + responses = [] + for i in range(n): + kwargs['remote'] = False + responses += [self.remote_fn('loop', kwargs=kwargs, tag=i)] + return responses + + @classmethod + def remote_fn_fleet(cls, fn:str, n=2, **kwargs): + responses = [] + for i in range(n): + responses += [cls.remote_fn(fn, kwargs=kwargs, tag=i)] + return responses + + + def generate(self, *args, **kwargs): + return 'hey' + + + + + @classmethod + def add_peers(cls, *args, **kwargs): + return c.module('remote').add_peers(*args,**kwargs) + + @classmethod + def sid(cls): + return c.module('subspace.chain')().id() + + @classmethod + def ticket(self, key=None): + key = c.get_key(key) + return key.ticket() + + def save_ticket(self, key=None, **kwargs): + key = c.get_key(key) + return key.save_ticket(**kwargs) + + def load_ticket(self, key=None, **kwargs): + key = c.get_key(key) + return key.load_ticket(**kwargs) + + @classmethod + def verify_ticket(cls, *args, **kwargs): + + return c.get_key().verify_ticket(*args, **kwargs) + + @classmethod + def load_style(cls): + return c.module('streamlit').load_style() + + @classmethod + def active_thread_count(cls): + return threading.active_count() + + @classmethod + def init_args(cls): + return list(cls.config().keys()) + @classmethod - def play(cls): - c.module('music').play() + def soup(cls, *args, **kwargs): + from bs4 import BeautifulSoup + return BeautifulSoup(*args, **kwargs) + ######## + @classmethod - def cleaar_commit_history(cls, cwd=None): - # 1. Navigate to the root of your repository (assuming c.cmd can change directories) - cwd = c.libpath if cwd == None else cwd - c.cmd('cd {cwd}') # Replace with your repository path + def document(cls, fn): + ''' + ## Documentation + + ### `docu` method + + ```python + @classmethod + def docu(cls, fn): + return c.module('agent.coder')().document_fn(fn) + ``` - # 2. Checkout to a temporary branch - c.cmd('git checkout --orphan temp_branch', cwd=cwd) + #### Description: + This class method is responsible for documenting a given function `fn`. - # 3. Add all the files & commit them - c.cmd('git add -A', cwd=cwd) - c.cmd('git commit -am "Initial commit"', cwd=cwd) + #### Parameters: + - `fn`: A function object that needs to be documented. - # # 4. Delete the old branch (assuming it's 'main', if not replace 'main' with your branch name) - # c.cmd('git branch -D main', cwd=cwd) + #### Returns: + - Returns the documentation of the provided function `fn` as generated by the `document_fn` method of the `agent.coder + ''' + return c.module('coder')().document_fn(fn) + + comment = document + + def set_page_config(self,*args, **kwargs): + return c.module('streamlit').set_page_config(*args, **kwargs) - # # 5. Rename the temporary branch to 'main' - # c.cmd('git branch -m main', cwd=cwd) + @classmethod + def get_state(cls, network='main', netuid='all', update=True, path='state'): + t1 = c.time() + if not update: + state = cls.get(path, default=None) + if state != None: + return state + + subspace = c.module('subspace')(network=network) + + state = { + 'subnets': subspace.subnet_params(netuid=netuid), + 'modules': subspace.modules(netuid=netuid), + 'balances': subspace.balances(), + 'stake_to': subspace.stake_to(netuid=netuid), + } + + state['total_balance'] = sum(state['balances'].values())/1e9 + state['key2address'] = c.key2address() + state['lag'] = c.lag() + state['block_time'] = 8 + c.print(f'get_state took {c.time() - t1:.2f} seconds') + cls.put(path, state) + return state - # 6. Force push to the remote repository (assuming remote is named 'origin') - c.cmd('git push -f origin main', cwd=cwd) + @classmethod + def eval(cls, module, vali=None, **kwargs): + vali = c.module('vali')() if vali == None else c.module(vali) + return c.eval(module, **kwargs) + + @classmethod + def run_epoch(self, *args, vali=None, **kwargs): + vali = c.module('vali')() if vali == None else c.module(vali) + return vali.run_epoch(*args, **kwargs) + + @classmethod + def comment(self,fn:str='module/ls'): + return c.module('code')().comment(fn) + + @classmethod + def host2ssh(cls, *args, **kwarg): + return c.module('remote').host2ssh(*args, **kwarg) + @classmethod + def imported_modules(self, module:str = None): + return c.module('code').imported_modules(module=module) + + def server2fn(self, *args, **kwargs ): + servers = c.servers(*args, **kwargs) + futures = [] + server2fn = {} + for s in servers: + server2fn[s] = c.submit(f'{s}/schema', kwargs=dict(code=True)) + futures = list(server2fn.values()) + fns = c.wait(futures,timeout=10) + for s, f in zip(servers, fns): + server2fn[s] = f + return server2fn + + def docker_compose_file(self, *args, **kwargs): + x = c.load_yaml(f'{c.libpath}/docker-compose.yml', *args, **kwargs) + port_range = c.port_range() + x['services']["commune"][f'ports'] = [f"{port_range[0]}-{port_range[1]}:{port_range[0]}-{port_range[1]}"] + return x + + @classmethod + def launcher_keys(cls, netuid=0, min_stake=500, **kwargs): + keys = c.keys() + key2balance = c.key2balance(**kwargs) + key2balance = {k: v for k,v in key2balance.items() if v > min_stake} + return [k for k in keys] + + @classmethod + def top_launchers(cls, amount=600, **kwargs): + launcher_keys = cls.launcher_keys() + key2address = c.key2address() + destinations = [] + amounts = [] + launcher2balance = c.get_balances(launcher_keys) + for k in launcher_keys: + k_address = key2address[k] + amount_needed = amount - launcher2balance.get(k_address, 0) + if amount_needed > 0: + destinations.append(k_address) + amounts.append(amount_needed) + else: + c.print(f'{k} has enough balance --> {launcher2balance.get(k, 0)}') + return c.transfer_many(amounts=amounts, destinations=destinations, **kwargs) + + load_launcher_keys = top_launchers + @classmethod + def launcher2balance(cls): + keys = cls.launcher_keys() + return c.get_balances(keys) + Module = c Module.run(__name__) + + diff --git a/commune/module/module.yaml b/commune/module/module.yaml index e25fa1694..5872ee52a 100755 --- a/commune/module/module.yaml +++ b/commune/module/module.yaml @@ -1,18 +1,23 @@ access_module: stake2rate: 100 +debug: false fam: bro -ip: 162.157.13.236 +k: 2 network: subspace shortcuts: bt: bittensor - chain: subspace + chain: subspace.chain d: docker f: frontend freegpt: model.freegpt lit: lit_gpt openai: model.openai + openrouter: model.openrouter + or: model.openrouter r: remote s: subspace + sel: selenium store: storage tg: textgen -tag: fams + wombo: w +tag: null diff --git a/commune/module/module_docs.md b/commune/module/module_docs.md deleted file mode 100644 index a95b18b0b..000000000 --- a/commune/module/module_docs.md +++ /dev/null @@ -1,133 +0,0 @@ -# Module Management Tutorial with Commune Library - -In this tutorial, we'll explore how to use the `commune` library for module management in Python. The `commune` library provides functionalities for managing and serving code modules easily. - -## Table of Contents -- [Finding Your Module](#finding-your-module) -- [Module Management](#module-management) -- [Serving](#serving) - ---- - -## Finding Your Module - -You can use the following steps to find and work with modules using the `commune` library. - -### List All Modules -You can list all available modules using the following code: - -```python -import commune as c - -modules_list = c.modules()[:10] -c.print(modules_list) -``` - -### Searching for a Specific Module -To search for a specific module, you can use the `c.modules()` function with a search query: - -```python -search_queries = ['model.llama', 'data', 'demo', 'hf'] -for query in search_queries: - c.print(f'Searching for {query}') - c.print(c.modules(query)) -``` - ---- - -## Module Management - -Once you've found your module, you can manage it using the following steps. - -### Accessing a Module -You can access a module using the `c.module()` function: - -```python -demo = c.module('demo') -c.print('## Code for demo module') -c.print(demo.code()) -``` - -### Viewing Module Config -You can view the configuration of a module using the `config()` method: - -```python -demo.config() -``` - -### Listing Module Functions -To list the functions of a module, use the `fns()` method: - -```python -demo_functions = demo.fns() -c.print(demo_functions) -``` - -### Searching for a Function -To search for a specific function within a module, use the `fns()` method with a search query: - -```python -function_search_query = 'bro' -matching_functions = demo.fns(function_search_query) -c.print(matching_functions) -``` - -### Getting Function Schema -You can retrieve the schema of a specific function using the `schema()` method: - -```python -function_name = 'bro' -function_schema = demo.schema(function_name) -c.print(function_schema) -``` - ---- - -## Serving - -You can serve a module to make its functions accessible via a server. - -### Serving a Module -You can serve a module using the `serve()` method, optionally providing a tag for versioning: - -```python -demo.serve(tag='tag1') -``` - -### Viewing Available Servers -You can view the available servers using the `servers()` method: - -```python -c.print(c.servers()) -``` - -### Viewing Server Logs -To view the logs of a served module, you can use the `logs()` method: - -```python -logs = c.logs('demo::tag1', mode='local') -c.print(logs) -``` - -### Connecting to a Served Module -You can connect to a served module using the `connect()` method: - -```python -demo_client = c.connect('demo::tag1') -demo_client.info() -``` - -### Restarting and Killing a Served Module -You can restart or kill a served module using the `restart()` and `kill()` methods: - -```python -c.restart('demo::tag1') # Restart the module -c.kill('demo::tag1') # Kill the module -``` - ---- - -This concludes our tutorial on module management using the `commune` library. You've learned how to find modules, manage their functions, serve them, and interact with served modules. This library can greatly simplify the process of managing and deploying code modules in your projects. -``` - -Feel free to use and adapt this markdown document for your tutorial needs. Make sure to adjust any details as necessary and include code snippets or explanations for each step to ensure clarity and comprehensiveness. \ No newline at end of file diff --git a/commune/module/peers.py b/commune/module/peers.py deleted file mode 100644 index da94ae23e..000000000 --- a/commune/module/peers.py +++ /dev/null @@ -1,149 +0,0 @@ -import commune as c - -class Peers(c.Module): - @classmethod - def add_peer(cls, *args, **kwargs)-> List: - loop = cls.get_event_loop() - peer = loop.run_until_complete(cls.async_add_peer(*args, **kwargs)) - return peer - - @classmethod - def add_peers(cls, *peer_addresses, **kwargs): - if len(peer_addresses) == 0: - peer_addresses = cls.boot_peers() - - if len(peer_addresses) == 1 and isinstance(peer_addresses[0], list): - peer_addresses = peer_addresses[0] - jobs = [] - for peer_address in peer_addresses: - job = cls.async_add_peer(peer_address, **kwargs) - jobs += [job] - - loop = cls.get_event_loop() - peers = loop.run_until_complete(asyncio.gather(*jobs)) - peers = [peer for peer in peers if peer != None] - return {'added_peers': peers, 'msg': f'Added {len(peers)} peers'} - - @classmethod - def peer_registry(cls, peers=None, update: bool = False): - if update: - if peers == None: - peers = cls.peers() - cls.add_peers(peers) - - peer_registry = c.get('peer_registry', {}) - return peer_registry - - @classmethod - async def async_add_peer(cls, - peer_address, - network = 'local', - timeout:int=1, - verbose:bool = True, - add_peer = True): - - peer_registry = await c.async_get_json('peer_registry', default={}, root=True) - - - peer_info = await C.async_call(module=peer_address, - fn='info', - include_namespace=True, - timeout=timeout) - - if add_peer: - await c.async_call(module=peer_address, - fn='add_peer', - args=[cls.root_address], - include_namespace=True, - timeout=timeout) - - - if 'error' in peer_info: - if verbose: - c.print(f'Error adding peer {peer_address} due to {peer_info["error"]}',color='red') - return None - else: - if verbose: - c.print(f'Successfully added peer {peer_address}', color='green') - - - assert isinstance(peer_info, dict) - assert 'address' in peer_info - assert 'namespace' in peer_info - - peer_ip = ':'.join(peer_info['address'].split(':')[:-1]) - peer_port = int(peer_info['address'].split(':')[-1]) - - # relace default local ip with external_ip - peer_info['namespace'] = {k:v.replace(c.default_ip,peer_ip) for k,v in peer_info['namespace'].items()} - - peer_registry[peer_address] = peer_info - - await c.async_put_json('peer_registry', peer_registry, root=True) - - return peer_registry - - @classmethod - def ls_peers(cls, update=False): - peer_registry = cls.get_json('peer_registry', default={}) - return list(peer_registry.keys()) - - @classmethod - def peers(cls, update=False): - peer_registry = cls.peer_registry(update=update) - return list(peer_registry.keys()) - - - @classmethod - def add_peer(cls, *args, **kwargs)-> List: - loop = cls.get_event_loop() - peer = loop.run_until_complete(cls.async_add_peer(*args, **kwargs)) - return peer - - @classmethod - def rm_peers(cls, peer_addresses: list = None): - rm_peers = [] - if peer_addresses == None: - peer_addresses = cls.peers() - if isinstance(peer_addresses, str): - peer_addresses = [peer_addresses] - for peer_address in peer_addresses: - - rm_peers.append(cls.rm_peer(peer_address)) - return rm_peers - - - - @classmethod - def rm_peer(cls, peer_address: str): - peer_registry = c.get_json('peer_registry', default={}) - result = peer_registry.pop(peer_address, None) - if result != None: - result = peer_address - cls.put_json('peer_registry', peer_registry, root=True) - return result - - @classmethod - def boot_peers(cls) -> List[str]: - return cls.get('boot_peers', []) - - - @classmethod - def get_peer_info(cls, peer: Union[str, 'Module']) -> Dict[str, Any]: - if isinstance(peer, str): - peer = cls.connect(peer) - - info = peer.info() - return info - - - @classmethod - def get_peer_addresses(cls, ip:str = None ) -> List[str]: - used_local_ports = cls.get_used_ports() - if ip == None: - ip = c.default_ip - peer_addresses = [] - for port in used_local_ports: - peer_addresses.append(f'{ip}:{port}') - - return peer_addresses \ No newline at end of file diff --git a/commune/module/test.py b/commune/module/test.py new file mode 100644 index 000000000..5ebdbf80f --- /dev/null +++ b/commune/module/test.py @@ -0,0 +1,22 @@ + +import os +import commune as c +class Test(c.Module): + def test_file(self, k='test_a', v=1): + c.put(k,v) + assert self.exists(k), f'file does not exist ({k})' + self.encrypt_file(k) + c.print(self.get_text(k)) + self.decrypt_file(k) + new_v = self.get(k) + assert new_v == v, f'new_v {new_v} != v {v}' + self.rm(k) + assert not self.exists(k) + assert not os.path.exists(self.resolve_path(k)) + return {'success': True, 'msg': 'test_file passed'} + + def test_folder_module_detector(self,positives = ['module', 'vali', 'client']): + for p in positives: + assert self.is_folder_module(p) == True, f'{p} is a folder module' + return {'success': True, 'msg': 'All folder modules detected', 'positives': positives} + \ No newline at end of file diff --git a/commune/module/tests/test_json.py b/commune/module/tests/test_json.py deleted file mode 100644 index ad3c62180..000000000 --- a/commune/module/tests/test_json.py +++ /dev/null @@ -1,33 +0,0 @@ -import commune - -def delete_all_files(data = {'bro': 2200}): - self = commune.Module() - self.put_json('bro/fam.json', data=data) - self.put_json('bro/dawg', data=data) - self.rm_json('**') - assert len(self.glob('**')) == 0, self.glob('**') - -def delete_individual_files(data = {'bro': 2200}): - self = commune.Module() - self.put_json('bro/fam.json', data=data) - self.put_json('bro/dawg', data=data) - assert len(self.glob('**')) == 2, self.glob('**') - self.rm_json('bro/fam') - assert len(self.glob('**')) == 1, len(self.glob('**')) - self.rm_json('bro/dawg.json') - assert len(self.glob('**')) == 0 - -def delete_directory(data = {'bro': 2200}): - self = commune.Module() - self.put_json('bro/fam/fam', data=data) - self.put_json('bro/fam/dawg.json', data=data) - assert len(self.glob('bro/**')) == 2 - self.rm_json('bro/fam') - assert len(self.glob('bro/fam/**')) == 0, self.glob('bro/fam/**') - - -if __name__ == '__main__': - delete_all_files() - delete_individual_files() - delete_directory() - \ No newline at end of file diff --git a/commune/module/tests/test_module.py b/commune/module/tests/test_module.py deleted file mode 100644 index 1d2739352..000000000 --- a/commune/module/tests/test_module.py +++ /dev/null @@ -1,59 +0,0 @@ - -import commune -import os - - -class DemoModule: - def __init__(self,x='bro'): - self.x = x - def return_x(self, x = None): - x = x if x else self.x - return x - -def test_module_inheritance(x='bro'): - - self = commune.module(DemoModule)(x=x ) - assert self.return_x(x) == x - assert self.module_name() == 'DemoModule' - - self = commune.module(DemoModule(x=x)) - assert self.return_x(x) == x - - - assert self.module_name() == 'DemoModule' - - -def test_serve(x='bro'): - self = commune.module(DemoModule)(x=x ) - self.serve(wait_for_termination=False, verbose=False) - - print(self.server_stats) - client_module = self.connect('DemoModule') - - - client_module.return_x(x) == 'x' - client_module.return_x(x=x) == 'x' - - assert 'DemoModule' in commune.servers() - print(self.kill_server(self.server_stats['port'])) - assert 'DemoModule' not in commune.servers(), commune.servers() - - -def test_load( config_path = './commune/module.yaml'): - - import munch - - for config in [False, config_path, None ]: - module = commune.Module() - assert isinstance(module, commune.Module) - assert hasattr(module, 'config') - assert isinstance(module.config, munch.Munch) - - - - - -if __name__ == '__main__': - test_serve() - test_module_inheritance() - test_load() \ No newline at end of file diff --git a/commune/module/tests/test_ray.py b/commune/module/tests/test_ray.py deleted file mode 100644 index f8d5cb0c7..000000000 --- a/commune/module/tests/test_ray.py +++ /dev/null @@ -1,48 +0,0 @@ -import commune - - -class ModuleF: - def __init__(self,bro='bro'): - self.bro = bro - def bro(self, fam:str): - return 'bro' - -class ModuleG: - def __init__(self,bro='bro'): - self.bro = bro - def bro(self, fam:str): - return self.bro - - -def test_launch(): - # # print(commune.block.ray_actors()) - commune.launch(ModuleF, mode='ray') - assert 'ModuleF' in commune.block.ray_actors() - commune.launch(ModuleG, mode='ray') - assert 'ModuleG' in commune.block.ray_actors() - commune.kill_actor('ModuleG') - assert 'ModuleG' not in commune.block.ray_actors() - commune.kill_actor('ModuleF') - - - -def test_functions(): - # # print(commune.block.ray_actors()) - module = commune.launch(ModuleF, mode='ray') - module.bro == 'fam' - commune.kill_actor('ModuleF') - -def test_gpu_allocation(gpus:int=1, cpus:int=1): - - commune.launch(ModuleF, mode='ray', gpus=gpus, cpus=cpus) - print(commune.actors()) - resources = commune.actor_resources('ModuleF') - assert int(resources['gpus']) == gpus - assert int(resources['cpus']) == cpus - commune.kill_actor('ModuleF') - commune.actor_exists('ModuleF') - -if __name__ == '__main__': - test_gpu_allocation() - test_launch() - test_functions() diff --git a/commune/module/tests/test_server.py b/commune/module/tests/test_server.py deleted file mode 100644 index 445f5ef81..000000000 --- a/commune/module/tests/test_server.py +++ /dev/null @@ -1,32 +0,0 @@ - -import commune -import os - - -class DemoModule: - def __init__(self,x='bro'): - self.x = x - def return_x(self, x = None): - x = x if x else self.x - return x - - -def test_serve(x='bro'): - self = commune.module(DemoModule)(x=x ) - self.serve(wait_for_termination=False, verbose=False) - - print(self.server_stats) - client_module = self.connect('DemoModule') - - - client_module.return_x(x) == 'x' - client_module.return_x(x=x) == 'x' - - assert 'DemoModule' in commune.servers() - print(self.kill_port(self.server_stats['port'])) - assert 'DemoModule' not in commune.servers(), f'{commune.servers()}' - - - -if __name__ == '__main__': - test_serve() \ No newline at end of file diff --git a/commune/module/tree.py b/commune/module/tree.py deleted file mode 100644 index dde9a1756..000000000 --- a/commune/module/tree.py +++ /dev/null @@ -1,118 +0,0 @@ -import commune as c -from typing import * -import os - -class Tree(c.Module): - base_module = c.base_module # - - @classmethod - def add_tree(cls, tree, path): - assert not c.isdir(path) - trees = cls.get(tree, {'path': path, 'tree': {}}) - return cls.put('trees', trees ) - - - @classmethod - def build_tree(cls, - update:bool = False, - verbose:bool = False) -> List[str]: - - if update and verbose: - c.print('Building module tree', verbose=verbose) - module_tree = {cls.path2simple(f):f for f in cls.get_module_python_paths()} - if cls.root_module_class in module_tree: - module_tree['module'] = module_tree.pop(cls.root_module_class) - return module_tree - - - module_python_paths = None - @classmethod - def get_module_python_paths(cls) -> List[str]: - ''' - Search for all of the modules with yaml files. Format of the file - ''' - if isinstance(cls.module_python_paths, list): - return cls.module_python_paths - modules = [] - failed_modules = [] - - # find all of the python files - for f in glob(c.root_path + '/**/*.py', recursive=True): - if os.path.isdir(f): - continue - file_path, file_ext = os.path.splitext(f) - - if file_ext == '.py': - dir_path, file_name = os.path.split(file_path) - dir_name = os.path.basename(dir_path) - previous_dir_path = dir_path.split('/')[-2] - - if dir_name.lower() == file_name.lower(): - # if the dirname is equal to the filename then it is a module - modules.append(f) - if file_name.lower().endswith(dir_name.lower()): - # if the dirname is equal to the filename then it is a module - modules.append(f) - if file_name.lower().endswith('module'): - # if the dirname is equal to the filename then it is a module - modules.append(f) - - elif 'module' in file_name.lower(): - modules.append(f) - elif any([os.path.exists(file_path+'.'+ext) for ext in ['yaml', 'yml']]): - modules.append(f) - else: - # FIX ME - f_classes = cls.find_python_class(f, search=['commune.Module', 'c.Module']) - # f_classes = [] - if len(f_classes) > 0: - modules.append(f) - - - cls.module_python_paths = modules - - return modules - - @classmethod - def get_tree_root_dir(cls): - tree_state = cls.get_tree_state()['path'] - - - @classmethod - def path2simple(cls, path:str) -> str: - - # does the config exist - - simple_path = path.split(c.copy(cls.root_dir))[-1] - - if cls.path_config_exists(path): - simple_path = os.path.dirname(simple_path) - - simple_path = simple_path.replace('.py', '') - - - simple_path = simple_path.replace('/', '.')[1:] - - # compress nae - chunks = simple_path.split('.') - new_chunks = [] - for i, chunk in enumerate(chunks): - if len(new_chunks)>0: - if new_chunks[-1] == chunks[i]: - continue - elif any([chunks[i].endswith(s) for s in ['_module', 'module']]): - continue - new_chunks.append(chunk) - simple_path = '.'.join(new_chunks) - - # remove the modules prefix - if simple_path.startswith('modules.'): - simple_path = simple_path.replace('modules.', '') - - # remove any files to compress the name even further for - if len(simple_path.split('.')) > 2: - - if simple_path.split('.')[-1].endswith(simple_path.split('.')[-2]): - simple_path = '.'.join(simple_path.split('.')[:-1]) - return simple_path - diff --git a/commune/module/tutorials/module_basics.ipynb b/commune/module/tutorials/module_basics.ipynb deleted file mode 100644 index 037f3b5f3..000000000 --- a/commune/module/tutorials/module_basics.ipynb +++ /dev/null @@ -1 +0,0 @@ -{"cells":[{"cell_type":"markdown","metadata":{},"source":["# Module Management"]},{"cell_type":"code","execution_count":51,"metadata":{},"outputs":[],"source":["import commune as c"]},{"cell_type":"markdown","metadata":{},"source":["## Finding Your Module"]},{"cell_type":"code","execution_count":52,"metadata":{},"outputs":[{"data":{"text/plain":["['demo_c',\n"," 'api',\n"," 'test',\n"," 'module.test',\n"," 'module',\n"," 'websocket',\n"," 'demo',\n"," 'pipeline',\n"," 'crypto.hash',\n"," 'crypto.key.aes']"]},"execution_count":52,"metadata":{},"output_type":"execute_result"}],"source":["# list all of the modules\n","c.modules()[:10]"]},{"cell_type":"code","execution_count":53,"metadata":{},"outputs":[{"data":{"text/html":["
Searching for  model.llama\n","
\n"],"text/plain":["Searching for model.llama\n"]},"metadata":{},"output_type":"display_data"},{"data":{"text/html":["
['model.llama']\n","
\n"],"text/plain":["\u001b[1m[\u001b[0m\u001b[32m'model.llama'\u001b[0m\u001b[1m]\u001b[0m\n"]},"metadata":{},"output_type":"display_data"},{"data":{"text/html":["
Searching for  data\n","
\n"],"text/plain":["Searching for data\n"]},"metadata":{},"output_type":"display_data"},{"data":{"text/html":["
[\n","    'finetune.data',\n","    'bittensor.neurons.text.prompting.miners.self_hosted.coati.dataset',\n","    'data',\n","    'data.image.globe',\n","    'data.hf',\n","    'data.text.squad',\n","    'data.text.folder',\n","    'data.text.pile',\n","    'data.text.code',\n","    'data.text.bittensor',\n","    'data.text.bt.pile',\n","    'data.text.bt.prompt',\n","    'data.text.realfake',\n","    'data.text.truthqa'\n","]\n","
\n"],"text/plain":["\u001b[1m[\u001b[0m\n"," \u001b[32m'finetune.data'\u001b[0m,\n"," \u001b[32m'bittensor.neurons.text.prompting.miners.self_hosted.coati.dataset'\u001b[0m,\n"," \u001b[32m'data'\u001b[0m,\n"," \u001b[32m'data.image.globe'\u001b[0m,\n"," \u001b[32m'data.hf'\u001b[0m,\n"," \u001b[32m'data.text.squad'\u001b[0m,\n"," \u001b[32m'data.text.folder'\u001b[0m,\n"," \u001b[32m'data.text.pile'\u001b[0m,\n"," \u001b[32m'data.text.code'\u001b[0m,\n"," \u001b[32m'data.text.bittensor'\u001b[0m,\n"," \u001b[32m'data.text.bt.pile'\u001b[0m,\n"," \u001b[32m'data.text.bt.prompt'\u001b[0m,\n"," \u001b[32m'data.text.realfake'\u001b[0m,\n"," \u001b[32m'data.text.truthqa'\u001b[0m\n","\u001b[1m]\u001b[0m\n"]},"metadata":{},"output_type":"display_data"},{"data":{"text/html":["
Searching for  demo\n","
\n"],"text/plain":["Searching for demo\n"]},"metadata":{},"output_type":"display_data"},{"data":{"text/html":["
['demo_c', 'demo']\n","
\n"],"text/plain":["\u001b[1m[\u001b[0m\u001b[32m'demo_c'\u001b[0m, \u001b[32m'demo'\u001b[0m\u001b[1m]\u001b[0m\n"]},"metadata":{},"output_type":"display_data"},{"data":{"text/html":["
Searching for  hf\n","
\n"],"text/plain":["Searching for hf\n"]},"metadata":{},"output_type":"display_data"},{"data":{"text/html":["
['hf', 'model.hf', 'data.hf']\n","
\n"],"text/plain":["\u001b[1m[\u001b[0m\u001b[32m'hf'\u001b[0m, \u001b[32m'model.hf'\u001b[0m, \u001b[32m'data.hf'\u001b[0m\u001b[1m]\u001b[0m\n"]},"metadata":{},"output_type":"display_data"}],"source":["# to search for a specific module, use the search function\n","for k in ['model.llama', 'data', 'demo', 'hf']:\n"," c.print('Searching for ', k)\n"," c.print( c.modules(k))"]},{"cell_type":"markdown","metadata":{},"source":["## Module Management"]},{"cell_type":"code","execution_count":54,"metadata":{},"outputs":[{"data":{"text/html":["
## Code for demo module\n","
\n"],"text/plain":["## Code for demo module\n"]},"metadata":{},"output_type":"display_data"},{"data":{"text/html":["
import commune as c\n","\n","class Demo(c.Module):\n","    def __init__(self, config = None, **kwargs):\n","        self.set_config(config, kwargs=kwargs)\n","    def bro(self, x='fam'):\n","        return f'whadup {x}'\n","    \n","    def hey(self, x='fam'):\n","        return f'whadup {x}'\n","    @classmethod\n","    def test(cls, **kwargs):\n","        self = cls(**kwargs)\n","        print('Testing demo')\n","        c.print(self.config)\n","        print(self.bro())\n","        print(self.hey())\n","        assert self.bro() == 'whadup fam'\n","        assert self.hey() == 'whadup fam'\n","        return True\n","    \n","
\n"],"text/plain":["import commune as c\n","\n","class \u001b[1;35mDemo\u001b[0m\u001b[1m(\u001b[0mc.Module\u001b[1m)\u001b[0m:\n"," def \u001b[1;35m__init__\u001b[0m\u001b[1m(\u001b[0mself, config = \u001b[3;35mNone\u001b[0m, **kwargs\u001b[1m)\u001b[0m:\n"," \u001b[1;35mself.set_config\u001b[0m\u001b[1m(\u001b[0mconfig, \u001b[33mkwargs\u001b[0m=\u001b[35mkwargs\u001b[0m\u001b[1m)\u001b[0m\n"," def \u001b[1;35mbro\u001b[0m\u001b[1m(\u001b[0mself, \u001b[33mx\u001b[0m=\u001b[32m'fam'\u001b[0m\u001b[1m)\u001b[0m:\n"," return f'whadup \u001b[1m{\u001b[0mx\u001b[1m}\u001b[0m'\n"," \n"," def \u001b[1;35mhey\u001b[0m\u001b[1m(\u001b[0mself, \u001b[33mx\u001b[0m=\u001b[32m'fam'\u001b[0m\u001b[1m)\u001b[0m:\n"," return f'whadup \u001b[1m{\u001b[0mx\u001b[1m}\u001b[0m'\n"," @classmethod\n"," def \u001b[1;35mtest\u001b[0m\u001b[1m(\u001b[0mcls, **kwargs\u001b[1m)\u001b[0m:\n"," self = \u001b[1;35mcls\u001b[0m\u001b[1m(\u001b[0m**kwargs\u001b[1m)\u001b[0m\n"," \u001b[1;35mprint\u001b[0m\u001b[1m(\u001b[0m\u001b[32m'Testing demo'\u001b[0m\u001b[1m)\u001b[0m\n"," \u001b[1;35mc.print\u001b[0m\u001b[1m(\u001b[0mself.config\u001b[1m)\u001b[0m\n"," \u001b[1;35mprint\u001b[0m\u001b[1m(\u001b[0m\u001b[1;35mself.bro\u001b[0m\u001b[1m(\u001b[0m\u001b[1m)\u001b[0m\u001b[1m)\u001b[0m\n"," \u001b[1;35mprint\u001b[0m\u001b[1m(\u001b[0m\u001b[1;35mself.hey\u001b[0m\u001b[1m(\u001b[0m\u001b[1m)\u001b[0m\u001b[1m)\u001b[0m\n"," assert \u001b[1;35mself.bro\u001b[0m\u001b[1m(\u001b[0m\u001b[1m)\u001b[0m == \u001b[32m'whadup fam'\u001b[0m\n"," assert \u001b[1;35mself.hey\u001b[0m\u001b[1m(\u001b[0m\u001b[1m)\u001b[0m == \u001b[32m'whadup fam'\u001b[0m\n"," return \u001b[3;92mTrue\u001b[0m\n"," \n"]},"metadata":{},"output_type":"display_data"}],"source":["demo = c.module('demo')\n","\n","c.print('## Code for demo module')\n","c.print(demo.code())"]},{"cell_type":"code","execution_count":55,"metadata":{},"outputs":[{"data":{"text/plain":["Munch({'api': 1234, 'whadup': 'hey'})"]},"execution_count":55,"metadata":{},"output_type":"execute_result"}],"source":["# see the config\n","demo.config()"]},{"cell_type":"code","execution_count":56,"metadata":{},"outputs":[{"data":{"text/plain":["['__init__', 'hey', 'bro', 'test']"]},"execution_count":56,"metadata":{},"output_type":"execute_result"}],"source":["# list the functions\n","\n","demo.fns()"]},{"cell_type":"code","execution_count":57,"metadata":{},"outputs":[{"data":{"text/plain":["['bro']"]},"execution_count":57,"metadata":{},"output_type":"execute_result"}],"source":["# search for a function\n","demo.fns('bro')"]},{"cell_type":"code","execution_count":58,"metadata":{},"outputs":[{"data":{"text/plain":["{'bro': {'input': {'x': 'NA'}, 'output': {}, 'type': 'self'}}"]},"execution_count":58,"metadata":{},"output_type":"execute_result"}],"source":["# get the function schema\n","demo.schema('bro')"]},{"cell_type":"markdown","metadata":{},"source":["## Serving"]},{"cell_type":"code","execution_count":66,"metadata":{},"outputs":[{"data":{"text/html":["
Launching class:Demo name:demo::tag1 fn:serve mode:pm2\n","
\n"],"text/plain":["\u001b[1;36mLaunching\u001b[0m\u001b[32m \u001b[0m\u001b[1;33mclass:Demo\u001b[0m\u001b[32m \u001b[0m\u001b[1;37mnam\u001b[0m\u001b[1;37me\u001b[0m\u001b[1;92m:de\u001b[0m\u001b[32mmo::tag1 \u001b[0m\u001b[1;37mfn\u001b[0m\u001b[32m:serve \u001b[0m\u001b[1;37mmode\u001b[0m\u001b[32m:pm2\u001b[0m\n"]},"metadata":{},"output_type":"display_data"},{"data":{"text/plain":["'demo::tag1'"]},"execution_count":66,"metadata":{},"output_type":"execute_result"}],"source":["# serve the module\n","demo.serve(tag='tag1') # tag is optional, but useful for versioning\n"]},{"cell_type":"code","execution_count":67,"metadata":{},"outputs":[{"data":{"text/plain":["['module', 'vali', 'demo', 'demo::tag1']"]},"execution_count":67,"metadata":{},"output_type":"execute_result"}],"source":["c.servers()"]},{"cell_type":"code","execution_count":68,"metadata":{},"outputs":[{"data":{"text/html":["
\u001b[33mAdding auth module:  base\u001b[0m\n","\u001b[1;35mMunch\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m{\u001b[0m\u001b[32m'max_staleness'\u001b[0m\u001b[33m: \u001b[0m\u001b[1;36m100\u001b[0m\u001b[33m, \u001b[0m\u001b[32m'role2rate'\u001b[0m\u001b[33m:\n","\u001b[0m\u001b[1;35mMunch\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m{\u001b[0m\u001b[32m'admin'\u001b[0m\u001b[33m: \u001b[0m\u001b[1;36m-1\u001b[0m\u001b[33m, \u001b[0m\u001b[32m'user'\u001b[0m\u001b[33m:\n","\u001b[0m\u001b[1;36m1\u001b[0m\u001b[1;33m}\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m}\u001b[0m\u001b[1;33m)\u001b[0m\n","\u001b🚀 Serving demo::tag1 on \u001b[1;92m99.230.89.152\u001b[0m:\u001b[1;36m50082\u001b[0m 🚀\u001b\n","\u001b🚀 Registered demo::tag1 on \u001b[1;92m99.230.89.152\u001b[0m:\u001b[1;36m50082\u001b[0m 🚀\u001b\n","INFO:     127.0.0.1:64341 - \"POST /info/ HTTP/1.1\" 307 Temporary Redirect\n","INFO:     127.0.0.1:64341 - \"POST /info HTTP/1.1\" 500 Internal Server Error\n","\u001b[33mStopping existing server demo::tag1\u001b[0m\n","\u001b[33mAdding auth module:  base\u001b[0m\n","\u001b[1;35mMunch\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m{\u001b[0m\u001b[32m'max_staleness'\u001b[0m\u001b[33m: \u001b[0m\u001b[1;36m100\u001b[0m\u001b[33m, \u001b[0m\u001b[32m'role2rate'\u001b[0m\u001b[33m:\n","\u001b[0m\u001b[1;35mMunch\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m{\u001b[0m\u001b[32m'admin'\u001b[0m\u001b[33m: \u001b[0m\u001b[1;36m-1\u001b[0m\u001b[33m, \u001b[0m\u001b[32m'user'\u001b[0m\u001b[33m:\n","\u001b[0m\u001b[1;36m1\u001b[0m\u001b[1;33m}\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m}\u001b[0m\u001b[1;33m)\u001b[0m\n","\u001b🚀 Serving demo::tag1 on \u001b[1;92m0.0.0.0\u001b[0m:\u001b[1;36m50083\u001b[0m 🚀\u001b\n","\u001b🚀 Registered demo::tag1 on \u001b[1;92m0.0.0.0\u001b[0m:\u001b[1;36m50083\u001b[0m 🚀\u001bINFO:     Started server process [39545]\n","INFO:     Waiting for application startup.\n","INFO:     Application startup complete.\n","INFO:     Uvicorn running on http://0.0.0.0:50082 (Press CTRL+C to quit)\n","ERROR:    Exception in ASGI application\n","Traceback (most recent call last):\n","  File \"/Users/salvivona/Library/Python/3.9/lib/python/site-packages/uvicorn/protocols/http/h11_impl.py\", line 428,\n","in run_asgi\n","    result = await app(  # type: ignore\n","  File \"/Users/salvivona/Library/Python/3.9/lib/python/site-packages/uvicorn/middleware/proxy_headers.py\", line 78,\n","in __call__\n","    return await self.app(scope, receive, send)\n","  File \"/Users/salvivona/Library/Python/3.9/lib/python/site-packages/fastapi/applications.py\", line 289, in \n","__call__\n","    await super().__call__(scope, receive, send)\n","  File \"/Users/salvivona/Library/Python/3.9/lib/python/site-packages/starlette/applications.py\", line 122, in \n","__call__\n","    await self.middleware_stack(scope, receive, send)\n","  File \"/Users/salvivona/Library/Python/3.9/lib/python/site-packages/starlette/middleware/errors.py\", line 184, in \n","__call__\n","    raise exc\n","  File \"/Users/salvivona/Library/Python/3.9/lib/python/site-packages/starlette/middleware/errors.py\", line 162, in \n","__call__\n","    await self.app(scope, receive, _send)\n","  File \"/Users/salvivona/Library/Python/3.9/lib/python/site-packages/starlette/middleware/cors.py\", line 83, in \n","__call__\n","    await self.app(scope, receive, send)\n","  File \"/Users/salvivona/Library/Python/3.9/lib/python/site-packages/starlette/middleware/exceptions.py\", line 79, \n","in __call__\n","    raise exc\n","  File \"/Users/salvivona/Library/Python/3.9/lib/python/site-packages/starlette/middleware/exceptions.py\", line 68, \n","in __call__\n","    await self.app(scope, receive, sender)\n","  File \"/Users/salvivona/Library/Python/3.9/lib/python/site-packages/fastapi/middleware/asyncexitstack.py\", line \n","20, in __call__\n","    raise e\n","  File \"/Users/salvivona/Library/Python/3.9/lib/python/site-packages/fastapi/middleware/asyncexitstack.py\", line \n","17, in __call__\n","    await self.app(scope, receive, send)\n","  File \"/Users/salvivona/Library/Python/3.9/lib/python/site-packages/starlette/routing.py\", line 718, in __call__\n","    await route.handle(scope, receive, send)\n","  File \"/Users/salvivona/Library/Python/3.9/lib/python/site-packages/starlette/routing.py\", line 276, in handle\n","    await self.app(scope, receive, send)\n","  File \"/Users/salvivona/Library/Python/3.9/lib/python/site-packages/starlette/routing.py\", line 66, in app\n","    response = await func(request)\n","  File \"/Users/salvivona/Library/Python/3.9/lib/python/site-packages/fastapi/routing.py\", line 273, in app\n","    raw_response = await run_endpoint_function(\n","  File \"/Users/salvivona/Library/Python/3.9/lib/python/site-packages/fastapi/routing.py\", line 190, in \n","run_endpoint_function\n","    return await dependant.call(**values)\n","  File \"/Users/salvivona/commune/commune/modules/server/http/server_http.py\", line 93, in forward_api\n","    result = self.process_result(result)\n","  File \"/Users/salvivona/commune/commune/modules/server/http/server_http.py\", line 145, in process_result\n","    from sse_starlette.sse import EventSourceResponse\n","ModuleNotFoundError: No module named 'sse_starlette'\n","INFO:     Started server process [39596]\n","INFO:     Waiting for application startup.\n","INFO:     Application startup complete.\n","INFO:     Uvicorn running on http://0.0.0.0:50083 (Press CTRL+C to quit)\n","
\n"],"text/plain":["\u001b\u001b[1m[\u001b[0m33mAdding auth module: base\u001b\u001b[1m[\u001b[0m0m\n","\u001b\u001b[1m[\u001b[0m\u001b[1;36m1\u001b[0m;35mMunch\u001b\u001b[1m[\u001b[0m0m\u001b\u001b[1m[\u001b[0m\u001b[1;36m1\u001b[0m;\u001b[1;35m33m\u001b[0m\u001b[1m(\u001b[0m\u001b\u001b[1m[\u001b[0m0m\u001b\u001b[1m[\u001b[0m\u001b[1;36m1\u001b[0m;33m\u001b[1m{\u001b[0m\u001b\u001b[1m[\u001b[0m0m\u001b\u001b[1m[\u001b[0m32m'max_staleness'\u001b\u001b[1m[\u001b[0m0m\u001b\u001b[1m[\u001b[0m33m: \u001b\u001b[1m[\u001b[0m0m\u001b\u001b[1m[\u001b[0m\u001b[1;36m1\u001b[0m;36m100\u001b\u001b[1m[\u001b[0m0m\u001b\u001b[1m[\u001b[0m33m, \u001b\u001b[1m[\u001b[0m0m\u001b\u001b[1m[\u001b[0m32m'role2rate'\u001b\u001b[1m[\u001b[0m0m\u001b\u001b[1m[\u001b[0m33m:\n","\u001b\u001b[1m[\u001b[0m0m\u001b\u001b[1m[\u001b[0m\u001b[1;36m1\u001b[0m;35mMunch\u001b\u001b[1m[\u001b[0m0m\u001b\u001b[1m[\u001b[0m\u001b[1;36m1\u001b[0m;\u001b[1;35m33m\u001b[0m\u001b[1m(\u001b[0m\u001b\u001b[1m[\u001b[0m0m\u001b\u001b[1m[\u001b[0m\u001b[1;36m1\u001b[0m;33m\u001b[1m{\u001b[0m\u001b\u001b[1m[\u001b[0m0m\u001b\u001b[1m[\u001b[0m32m'admin'\u001b\u001b[1m[\u001b[0m0m\u001b\u001b[1m[\u001b[0m33m: \u001b\u001b[1m[\u001b[0m0m\u001b\u001b[1m[\u001b[0m\u001b[1;36m1\u001b[0m;36m-\u001b[1;36m1\u001b[0m\u001b\u001b[1m[\u001b[0m0m\u001b\u001b[1m[\u001b[0m33m, \u001b\u001b[1m[\u001b[0m0m\u001b\u001b[1m[\u001b[0m32m'user'\u001b\u001b[1m[\u001b[0m0m\u001b\u001b[1m[\u001b[0m33m:\n","\u001b\u001b[1m[\u001b[0m0m\u001b\u001b[1m[\u001b[0m\u001b[1;36m1\u001b[0m;36m1\u001b\u001b[1m[\u001b[0m0m\u001b\u001b[1m[\u001b[0m\u001b[1;36m1\u001b[0m;33m\u001b[1m}\u001b[0m\u001b\u001b[1m[\u001b[0m0m\u001b\u001b[1m[\u001b[0m\u001b[1;36m1\u001b[0m;33m\u001b[1m)\u001b[0m\u001b\u001b[1m[\u001b[0m0m\u001b\u001b[1m[\u001b[0m\u001b[1;36m1\u001b[0m;33m\u001b[1m}\u001b[0m\u001b\u001b[1m[\u001b[0m0m\u001b\u001b[1m[\u001b[0m\u001b[1;36m1\u001b[0m;33m\u001b[1m)\u001b[0m\u001b\u001b[1m[\u001b[0m0m\n","\u001b🚀 Serving demo::tag1 on \u001b\u001b[1m[\u001b[0m\u001b[1;36m1\u001b[0m;92m\u001b[1;92m99.230.89.152\u001b[0m\u001b\u001b[1m[\u001b[0m0m:\u001b\u001b[1m[\u001b[0m\u001b[1;36m1\u001b[0m;36m50082\u001b\u001b[1m[\u001b[0m0m 🚀\u001b\n","\u001b🚀 Registered demo::tag1 on \u001b\u001b[1m[\u001b[0m\u001b[1;36m1\u001b[0m;92m\u001b[1;92m99.230.89.152\u001b[0m\u001b\u001b[1m[\u001b[0m0m:\u001b\u001b[1m[\u001b[0m\u001b[1;36m1\u001b[0m;36m50082\u001b\u001b[1m[\u001b[0m0m 🚀\u001b\n","INFO: \u001b[1;92m127.0.0.1\u001b[0m:\u001b[1;36m64341\u001b[0m - \u001b[32m\"POST /info/ HTTP/1.1\"\u001b[0m \u001b[1;36m307\u001b[0m Temporary Redirect\n","INFO: \u001b[1;92m127.0.0.1\u001b[0m:\u001b[1;36m64341\u001b[0m - \u001b[32m\"POST /info HTTP/1.1\"\u001b[0m \u001b[1;36m500\u001b[0m Internal Server Error\n","\u001b\u001b[1m[\u001b[0m33mStopping existing server demo::tag1\u001b\u001b[1m[\u001b[0m0m\n","\u001b\u001b[1m[\u001b[0m33mAdding auth module: base\u001b\u001b[1m[\u001b[0m0m\n","\u001b\u001b[1m[\u001b[0m\u001b[1;36m1\u001b[0m;35mMunch\u001b\u001b[1m[\u001b[0m0m\u001b\u001b[1m[\u001b[0m\u001b[1;36m1\u001b[0m;\u001b[1;35m33m\u001b[0m\u001b[1m(\u001b[0m\u001b\u001b[1m[\u001b[0m0m\u001b\u001b[1m[\u001b[0m\u001b[1;36m1\u001b[0m;33m\u001b[1m{\u001b[0m\u001b\u001b[1m[\u001b[0m0m\u001b\u001b[1m[\u001b[0m32m'max_staleness'\u001b\u001b[1m[\u001b[0m0m\u001b\u001b[1m[\u001b[0m33m: \u001b\u001b[1m[\u001b[0m0m\u001b\u001b[1m[\u001b[0m\u001b[1;36m1\u001b[0m;36m100\u001b\u001b[1m[\u001b[0m0m\u001b\u001b[1m[\u001b[0m33m, \u001b\u001b[1m[\u001b[0m0m\u001b\u001b[1m[\u001b[0m32m'role2rate'\u001b\u001b[1m[\u001b[0m0m\u001b\u001b[1m[\u001b[0m33m:\n","\u001b\u001b[1m[\u001b[0m0m\u001b\u001b[1m[\u001b[0m\u001b[1;36m1\u001b[0m;35mMunch\u001b\u001b[1m[\u001b[0m0m\u001b\u001b[1m[\u001b[0m\u001b[1;36m1\u001b[0m;\u001b[1;35m33m\u001b[0m\u001b[1m(\u001b[0m\u001b\u001b[1m[\u001b[0m0m\u001b\u001b[1m[\u001b[0m\u001b[1;36m1\u001b[0m;33m\u001b[1m{\u001b[0m\u001b\u001b[1m[\u001b[0m0m\u001b\u001b[1m[\u001b[0m32m'admin'\u001b\u001b[1m[\u001b[0m0m\u001b\u001b[1m[\u001b[0m33m: \u001b\u001b[1m[\u001b[0m0m\u001b\u001b[1m[\u001b[0m\u001b[1;36m1\u001b[0m;36m-\u001b[1;36m1\u001b[0m\u001b\u001b[1m[\u001b[0m0m\u001b\u001b[1m[\u001b[0m33m, \u001b\u001b[1m[\u001b[0m0m\u001b\u001b[1m[\u001b[0m32m'user'\u001b\u001b[1m[\u001b[0m0m\u001b\u001b[1m[\u001b[0m33m:\n","\u001b\u001b[1m[\u001b[0m0m\u001b\u001b[1m[\u001b[0m\u001b[1;36m1\u001b[0m;36m1\u001b\u001b[1m[\u001b[0m0m\u001b\u001b[1m[\u001b[0m\u001b[1;36m1\u001b[0m;33m\u001b[1m}\u001b[0m\u001b\u001b[1m[\u001b[0m0m\u001b\u001b[1m[\u001b[0m\u001b[1;36m1\u001b[0m;33m\u001b[1m)\u001b[0m\u001b\u001b[1m[\u001b[0m0m\u001b\u001b[1m[\u001b[0m\u001b[1;36m1\u001b[0m;33m\u001b[1m}\u001b[0m\u001b\u001b[1m[\u001b[0m0m\u001b\u001b[1m[\u001b[0m\u001b[1;36m1\u001b[0m;33m\u001b[1m)\u001b[0m\u001b\u001b[1m[\u001b[0m0m\n","\u001b🚀 Serving demo::tag1 on \u001b\u001b[1m[\u001b[0m\u001b[1;36m1\u001b[0m;92m\u001b[1;92m0.0.0.0\u001b[0m\u001b\u001b[1m[\u001b[0m0m:\u001b\u001b[1m[\u001b[0m\u001b[1;36m1\u001b[0m;36m50083\u001b\u001b[1m[\u001b[0m0m 🚀\u001b\n","\u001b🚀 Registered demo::tag1 on \u001b\u001b[1m[\u001b[0m\u001b[1;36m1\u001b[0m;92m\u001b[1;92m0.0.0.0\u001b[0m\u001b\u001b[1m[\u001b[0m0m:\u001b\u001b[1m[\u001b[0m\u001b[1;36m1\u001b[0m;36m50083\u001b\u001b[1m[\u001b[0m0m 🚀\u001bINFO: Started server process \u001b[1m[\u001b[0m\u001b[1;36m39545\u001b[0m\u001b[1m]\u001b[0m\n","INFO: Waiting for application startup.\n","INFO: Application startup complete.\n","INFO: Uvicorn running on \u001b[4;94mhttp://0.0.0.0:50082\u001b[0m \u001b[1m(\u001b[0mPress CTRL+C to quit\u001b[1m)\u001b[0m\n","ERROR: Exception in ASGI application\n","Traceback \u001b[1m(\u001b[0mmost recent call last\u001b[1m)\u001b[0m:\n"," File \u001b[32m\"/Users/salvivona/Library/Python/3.9/lib/python/site-packages/uvicorn/protocols/http/h11_impl.py\"\u001b[0m, line \u001b[1;36m428\u001b[0m,\n","in run_asgi\n"," result = await \u001b[1;35mapp\u001b[0m\u001b[1m(\u001b[0m # type: ignore\n"," File \u001b[32m\"/Users/salvivona/Library/Python/3.9/lib/python/site-packages/uvicorn/middleware/proxy_headers.py\"\u001b[0m, line \u001b[1;36m78\u001b[0m,\n","in __call__\n"," return await \u001b[1;35mself.app\u001b[0m\u001b[1m(\u001b[0mscope, receive, send\u001b[1m)\u001b[0m\n"," File \u001b[32m\"/Users/salvivona/Library/Python/3.9/lib/python/site-packages/fastapi/applications.py\"\u001b[0m, line \u001b[1;36m289\u001b[0m, in \n","__call__\n"," await \u001b[1;35msuper\u001b[0m\u001b[1m(\u001b[0m\u001b[1m)\u001b[0m\u001b[1;35m.__call__\u001b[0m\u001b[1m(\u001b[0mscope, receive, send\u001b[1m)\u001b[0m\n"," File \u001b[32m\"/Users/salvivona/Library/Python/3.9/lib/python/site-packages/starlette/applications.py\"\u001b[0m, line \u001b[1;36m122\u001b[0m, in \n","__call__\n"," await \u001b[1;35mself.middleware_stack\u001b[0m\u001b[1m(\u001b[0mscope, receive, send\u001b[1m)\u001b[0m\n"," File \u001b[32m\"/Users/salvivona/Library/Python/3.9/lib/python/site-packages/starlette/middleware/errors.py\"\u001b[0m, line \u001b[1;36m184\u001b[0m, in \n","__call__\n"," raise exc\n"," File \u001b[32m\"/Users/salvivona/Library/Python/3.9/lib/python/site-packages/starlette/middleware/errors.py\"\u001b[0m, line \u001b[1;36m162\u001b[0m, in \n","__call__\n"," await \u001b[1;35mself.app\u001b[0m\u001b[1m(\u001b[0mscope, receive, _send\u001b[1m)\u001b[0m\n"," File \u001b[32m\"/Users/salvivona/Library/Python/3.9/lib/python/site-packages/starlette/middleware/cors.py\"\u001b[0m, line \u001b[1;36m83\u001b[0m, in \n","__call__\n"," await \u001b[1;35mself.app\u001b[0m\u001b[1m(\u001b[0mscope, receive, send\u001b[1m)\u001b[0m\n"," File \u001b[32m\"/Users/salvivona/Library/Python/3.9/lib/python/site-packages/starlette/middleware/exceptions.py\"\u001b[0m, line \u001b[1;36m79\u001b[0m, \n","in __call__\n"," raise exc\n"," File \u001b[32m\"/Users/salvivona/Library/Python/3.9/lib/python/site-packages/starlette/middleware/exceptions.py\"\u001b[0m, line \u001b[1;36m68\u001b[0m, \n","in __call__\n"," await \u001b[1;35mself.app\u001b[0m\u001b[1m(\u001b[0mscope, receive, sender\u001b[1m)\u001b[0m\n"," File \u001b[32m\"/Users/salvivona/Library/Python/3.9/lib/python/site-packages/fastapi/middleware/asyncexitstack.py\"\u001b[0m, line \n","\u001b[1;36m20\u001b[0m, in __call__\n"," raise e\n"," File \u001b[32m\"/Users/salvivona/Library/Python/3.9/lib/python/site-packages/fastapi/middleware/asyncexitstack.py\"\u001b[0m, line \n","\u001b[1;36m17\u001b[0m, in __call__\n"," await \u001b[1;35mself.app\u001b[0m\u001b[1m(\u001b[0mscope, receive, send\u001b[1m)\u001b[0m\n"," File \u001b[32m\"/Users/salvivona/Library/Python/3.9/lib/python/site-packages/starlette/routing.py\"\u001b[0m, line \u001b[1;36m718\u001b[0m, in __call__\n"," await \u001b[1;35mroute.handle\u001b[0m\u001b[1m(\u001b[0mscope, receive, send\u001b[1m)\u001b[0m\n"," File \u001b[32m\"/Users/salvivona/Library/Python/3.9/lib/python/site-packages/starlette/routing.py\"\u001b[0m, line \u001b[1;36m276\u001b[0m, in handle\n"," await \u001b[1;35mself.app\u001b[0m\u001b[1m(\u001b[0mscope, receive, send\u001b[1m)\u001b[0m\n"," File \u001b[32m\"/Users/salvivona/Library/Python/3.9/lib/python/site-packages/starlette/routing.py\"\u001b[0m, line \u001b[1;36m66\u001b[0m, in app\n"," response = await \u001b[1;35mfunc\u001b[0m\u001b[1m(\u001b[0mrequest\u001b[1m)\u001b[0m\n"," File \u001b[32m\"/Users/salvivona/Library/Python/3.9/lib/python/site-packages/fastapi/routing.py\"\u001b[0m, line \u001b[1;36m273\u001b[0m, in app\n"," raw_response = await \u001b[1;35mrun_endpoint_function\u001b[0m\u001b[1m(\u001b[0m\n"," File \u001b[32m\"/Users/salvivona/Library/Python/3.9/lib/python/site-packages/fastapi/routing.py\"\u001b[0m, line \u001b[1;36m190\u001b[0m, in \n","run_endpoint_function\n"," return await \u001b[1;35mdependant.call\u001b[0m\u001b[1m(\u001b[0m**values\u001b[1m)\u001b[0m\n"," File \u001b[32m\"/Users/salvivona/commune/commune/modules/server/http/server_http.py\"\u001b[0m, line \u001b[1;36m93\u001b[0m, in forward_api\n"," result = \u001b[1;35mself.process_result\u001b[0m\u001b[1m(\u001b[0mresult\u001b[1m)\u001b[0m\n"," File \u001b[32m\"/Users/salvivona/commune/commune/modules/server/http/server_http.py\"\u001b[0m, line \u001b[1;36m145\u001b[0m, in process_result\n"," from sse_starlette.sse import EventSourceResponse\n","ModuleNotFoundError: No module named \u001b[32m'sse_starlette'\u001b[0m\n","INFO: Started server process \u001b[1m[\u001b[0m\u001b[1;36m39596\u001b[0m\u001b[1m]\u001b[0m\n","INFO: Waiting for application startup.\n","INFO: Application startup complete.\n","INFO: Uvicorn running on \u001b[4;94mhttp://0.0.0.0:50083\u001b[0m \u001b[1m(\u001b[0mPress CTRL+C to quit\u001b[1m)\u001b[0m\n"]},"metadata":{},"output_type":"display_data"}],"source":["# get the logs of your served module\n","c.print(c.logs('demo::tag1', mode='local'))"]},{"cell_type":"code","execution_count":75,"metadata":{},"outputs":[{"data":{"text/plain":["{'address': '99.230.89.152:50093',\n"," 'functions': ['bro', 'test', 'hey', '__init__'],\n"," 'attributes': ['config',\n"," 'kwargs',\n"," '_key',\n"," 'ip',\n"," 'port',\n"," 'address',\n"," '_access_module'],\n"," 'name': 'demo::tag1',\n"," 'path': 'demo',\n"," 'chash': '24a95de97f40865f2845b3dccfcaa9e13794e72af8df23586394476d4352b309',\n"," 'hash': 'b34685a63805a446f3d797a3a99c3eefcc842862844625a687b25a7e56fec4a1',\n"," 'signature': '521f3e72e1812912456bd383dfdd117154f1cf2541f978cdcc5e2a973af05b1c6d8bf1a86040e0c9aa1884e4037beb369cd847d7bec1eb55ecf9767145429c80',\n"," 'ss58_address': '5DFzVmQH8LbxAa5ZSnz45msUwYm41tnhhZjXojPctAKJ6RLN'}"]},"execution_count":75,"metadata":{},"output_type":"execute_result"}],"source":["demo_client = c.connect('demo::tag1')\n","demo_client.info()"]},{"cell_type":"code","execution_count":null,"metadata":{},"outputs":[],"source":["c.restart('demo::tag1') # restart the module\n","c.kill('demo::tag1') # kill the module"]}],"metadata":{"kernelspec":{"display_name":"Python 3","language":"python","name":"python3"},"language_info":{"codemirror_mode":{"name":"ipython","version":3},"file_extension":".py","mimetype":"text/x-python","name":"python","nbconvert_exporter":"python","pygments_lexer":"ipython3","version":"3.9.6"},"orig_nbformat":4},"nbformat":4,"nbformat_minor":2} diff --git a/commune/module/tutorials/module_config_management.ipynb b/commune/module/tutorials/module_config_management.ipynb deleted file mode 100644 index 040d99669..000000000 --- a/commune/module/tutorials/module_config_management.ipynb +++ /dev/null @@ -1,151 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [], - "source": [ - "import commune as c\n", - "c.enable_jupyter()" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [], - "source": [ - "module = c.module()" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
{\n",
-       "    'boot_peers': ['162.157.13.236:9057', '162.157.13.236:9402'],\n",
-       "    'ip': '162.157.13.236',\n",
-       "    'module_overrides': ['network', 'key'],\n",
-       "    'network': 'subspace',\n",
-       "    'network2url': {},\n",
-       "    'networks': ['subspace', 'local', 'global'],\n",
-       "    'shortcuts': {\n",
-       "        'bt': 'bittensor',\n",
-       "        'chain': 'subspace',\n",
-       "        'd': 'docker',\n",
-       "        'freegpt': 'model.freegpt',\n",
-       "        'hf': 'huggingface',\n",
-       "        'lit': 'lit_gpt',\n",
-       "        'openai': 'model.openai',\n",
-       "        's': 'subspace',\n",
-       "        'tg': 'textgen'\n",
-       "    }\n",
-       "}\n",
-       "
\n" - ], - "text/plain": [ - "\u001b[1m{\u001b[0m\n", - " \u001b[32m'boot_peers'\u001b[0m: \u001b[1m[\u001b[0m\u001b[32m'162.157.13.236:9057'\u001b[0m, \u001b[32m'162.157.13.236:9402'\u001b[0m\u001b[1m]\u001b[0m,\n", - " \u001b[32m'ip'\u001b[0m: \u001b[32m'162.157.13.236'\u001b[0m,\n", - " \u001b[32m'module_overrides'\u001b[0m: \u001b[1m[\u001b[0m\u001b[32m'network'\u001b[0m, \u001b[32m'key'\u001b[0m\u001b[1m]\u001b[0m,\n", - " \u001b[32m'network'\u001b[0m: \u001b[32m'subspace'\u001b[0m,\n", - " \u001b[32m'network2url'\u001b[0m: \u001b[1m{\u001b[0m\u001b[1m}\u001b[0m,\n", - " \u001b[32m'networks'\u001b[0m: \u001b[1m[\u001b[0m\u001b[32m'subspace'\u001b[0m, \u001b[32m'local'\u001b[0m, \u001b[32m'global'\u001b[0m\u001b[1m]\u001b[0m,\n", - " \u001b[32m'shortcuts'\u001b[0m: \u001b[1m{\u001b[0m\n", - " \u001b[32m'bt'\u001b[0m: \u001b[32m'bittensor'\u001b[0m,\n", - " \u001b[32m'chain'\u001b[0m: \u001b[32m'subspace'\u001b[0m,\n", - " \u001b[32m'd'\u001b[0m: \u001b[32m'docker'\u001b[0m,\n", - " \u001b[32m'freegpt'\u001b[0m: \u001b[32m'model.freegpt'\u001b[0m,\n", - " \u001b[32m'hf'\u001b[0m: \u001b[32m'huggingface'\u001b[0m,\n", - " \u001b[32m'lit'\u001b[0m: \u001b[32m'lit_gpt'\u001b[0m,\n", - " \u001b[32m'openai'\u001b[0m: \u001b[32m'model.openai'\u001b[0m,\n", - " \u001b[32m's'\u001b[0m: \u001b[32m'subspace'\u001b[0m,\n", - " \u001b[32m'tg'\u001b[0m: \u001b[32m'textgen'\u001b[0m\n", - " \u001b[1m}\u001b[0m\n", - "\u001b[1m}\u001b[0m\n" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "# default config\n", - "config = module.config(to_munch=False)\n", - "c.print(config)" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
Value of sup 1\n",
-       "
\n" - ], - "text/plain": [ - "Value of sup \u001b[1;36m1\u001b[0m\n" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "# Put and Get and Delete Configuration Keys\n", - "c.print()\n", - "module.putc('sup', 1)\n", - "c.print('Value of sup',module.config()['sup'])\n", - "module.rmc('sup')\n", - "assert 'sup' not in module.config()\n", - "c.print('Value of sup',module.config()['sup'])" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "module.get_config('sup', default=1)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.10.12" - }, - "orig_nbformat": 4 - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/commune/module/watchdog/watchdog.py b/commune/module/watchdog/watchdog.py deleted file mode 100644 index 83fb0077d..000000000 --- a/commune/module/watchdog/watchdog.py +++ /dev/null @@ -1,54 +0,0 @@ -import commune as c -import time -from watchdog.observers import Observer -from watchdog.events import FileSystemEventHandler - -class FileChangeHandler(FileSystemEventHandler): - def __init__(self, module): - super().__init__() - self.module = module - - - def on_any_event(self, event): - if event.is_directory: - return - if event.event_type in ['created', 'modified', 'deleted']: - c.print(f'File change detected: {event.src_path}') - c.module_tree(update=True, verbose=True) - -class WatchdogModule(c.Module, FileSystemEventHandler): - - - - def __init__(self, folder_path:str = c.root_path, run:bool = True ): - super().__init__() - self.folder_path = folder_path - self.observer = None - if run: - self.start_server() - def start_server(self): - event_handler = FileChangeHandler(self) - self.observer = Observer() - self.observer.schedule(event_handler, self.folder_path, recursive=True) - self.observer.start() - - try: - lifetime = 0 - sleep_period = 5 - while True: - c.print(f'Watching for file changes. {lifetime} seconds elapsed.') - - time.sleep(sleep_period) - lifetime += sleep_period - except KeyboardInterrupt: - self.observer.stop() - self.observer.join() - - def stop_server(self): - if self.observer is not None: - self.observer.stop() - self.observer.join() - self.observer = None - - def log_file_change(self, message): - c.print(message) diff --git a/commune/module/watchdog/watchdog.yaml b/commune/module/watchdog/watchdog.yaml deleted file mode 100644 index 7ac5c5e33..000000000 --- a/commune/module/watchdog/watchdog.yaml +++ /dev/null @@ -1,6 +0,0 @@ -return_val: pong -users: -- user1 -- user2 -- user3 -- bro diff --git a/commune/module/wrap/wrap.py b/commune/module/wrap/wrap.py deleted file mode 100644 index b682ad9c9..000000000 --- a/commune/module/wrap/wrap.py +++ /dev/null @@ -1,36 +0,0 @@ - -import commune as c -from typing import * - -class ModuleWrapper(c.Module): - protected_attributes = [ 'info', 'serve', 'module_file', 'module_path', 'server_name', 'test'] - - def __init__(self, - module:'Any' = None - ): - self.module = module - - @classmethod - def module_file(cls): - return cls.get_module_path(simple=False) - - def module_path(cls) -> str: - return cls.__name__.lower() - - - def schema(self, **kwargs) -> Dict[str, Any]: - return c.get_schema(obj=self.module, **kwargs) - - - def functions(self, ) -> Dict[str, Any]: - return c.get_functions(obj=self.module) - - - - def __getattr__(self, key): - - if key in self.protected_attributes : - return getattr(self, key) - else: - return lambda *args, **kwargs : getattr(self.module, (key))( *args, **kwargs) - diff --git a/commune/modules/access/access.py b/commune/modules/access/access.py deleted file mode 100644 index 65c1a87d3..000000000 --- a/commune/modules/access/access.py +++ /dev/null @@ -1,106 +0,0 @@ -import commune as c -from typing import * - - - - - -class Access(c.Module): - sync_time = 0 - timescale_map = {'sec': 1, 'min': 60, 'hour': 3600, 'day': 86400} - - def __init__(self, - module : Any, # the module or any python object - network: str = 'main', # mainnet - netuid: int = 0, # subnet id - sync_interval: int = 1000, # 1000 seconds per sync with the network - timescale:str = 'min', # 'sec', 'min', 'hour', 'day' - stake2rate: int = 100, # 1 call per every N tokens staked per timescale - rate: int = 1, # 1 call per timescale - base_rate: int = 0,# base level of calls per timescale (free calls) per account - fn2rate: dict = {}, # function name to rate map, this overrides the default rate - **kwargs): - config = self.set_config(kwargs=locals()) - c.print(config) - self.module = module - self.user_info = {} - - def sync(self): - sync_time = c.time() - self.sync_time - # if the sync time is greater than the sync interval, we need to sync - try: - if sync_time > self.config.sync_interval : - self.subspace = c.module('subspace')(network=self.config.network, netuid=self.config.netuid) - self.stakes = self.subspace.stakes(fmt='j') - self.sync_time = c.time() - except Exception as e: - c.print(f"Error syncing {e}") - self.subspace = None - self.stakes = {} - return - - - def verify(self, input:dict) -> dict: - - address = input['address'] - if c.is_admin(address): - return input - else: - self.sync() - if self.subspace == None: - raise Exception(f"Subspace not initialized and you are not an authorized admin {input['address']}, authorized admins: {c.admins()}") - # if not an admin address, we need to check the whitelist and blacklist - fn = input.get('fn') - assert fn in self.module.whitelist or fn in c.helper_whitelist, f"Function {fn} not in whitelist" - assert fn not in self.module.blacklist, f"Function {fn} is blacklisted" - - # RATE LIMIT CHECKING HERE - stake = self.stakes.get(address, 0) - # get the rate limit for the function - if fn in self.config.fn2rate: - rate = self.config.fn2rate[fn] - else: - rate = self.config.rate - rate_limit = (stake / self.config.stake2rate) - rate_limit = rate_limit + self.config.base_rate - - # convert the rate limit to the correct timescale - rate_limit = rate_limit / self.timescale_map[self.config.timescale] - - default_user_info = { - 'requests': 0, - 'last_time_called': 0, - 'rate': 0, - 'stake': stake - } - - - user_info = self.user_info.get(address, default_user_info) - user_rate = 1 / (c.time() - user_info['last_time_called'] + 1e-10) - assert user_rate < rate_limit, f"Rate limit too high (calls per second) {user_rate} > {rate_limit}" - # update the user info - user_info['last_time_called'] = c.time() - user_info['requests'] += 1 - user_info['rate'] = user_rate - user_info['rate_limit'] = rate_limit - - self.user_info[address] = user_info - - return input - - - @classmethod - def test(cls): - server_name = 'access_subspace.demo' - module = c.serve('module', server_name=server_name, wait_for_server=True) - - for key in [None, 'fam']: - client = c.connect(server_name, key=key) - for n in range(10): - c.print(client.info(timeout=4)) - c.kill(server_name) - return {'name': server_name, 'module': module, 'client': client} - - - - diff --git a/commune/modules/access/base/access_base.py b/commune/modules/access/base/access_base.py deleted file mode 100644 index 8b2f52f35..000000000 --- a/commune/modules/access/base/access_base.py +++ /dev/null @@ -1,40 +0,0 @@ -import commune as c - -class AccessBase(c.Module): - def __init__(self, module, **kwargs): - - config = self.set_config(kwargs) - self.module = module - self.max_staleness = config.max_staleness - self.requests = {} - - def verify(self, input:dict) -> dict: - - # here we want to verify the data is signed with the correct key - request_timestamp = input['data'].get('timestamp', 0) - request_staleness = c.timestamp() - request_timestamp - assert request_staleness < self.max_staleness, f"Request is too old, {request_staleness} > MAX_STALENESS ({self.max_request_staleness}) seconds old" - address = input.get('address', None) - fn = input.get('fn', None) - - role = c.get_role(address) - if bool(role == 'admin'): - # this is an admin address, so we can pass - pass - else: - # if not an admin address, we need to check the whitelist and blacklist - assert fn in self.module.whitelist , f"Function {fn} not in whitelist" - assert fn not in self.module.blacklist, f"Function {fn} is blacklisted" - - # RATE LIMIT CHECKING HERE - num_requests = self.requests.get(address, 0) + 1 - rate_limit = self.config.role2rate.get(role, 0) - if rate_limit >= 0: - assert self.requests[address] < self.module.rate_limit, f"Rate limit exceeded for {address}" - self.requests[address] = num_requests - - - return input - - - diff --git a/commune/modules/access/base/access_base.yaml b/commune/modules/access/base/access_base.yaml deleted file mode 100644 index e57d733b6..000000000 --- a/commune/modules/access/base/access_base.yaml +++ /dev/null @@ -1,5 +0,0 @@ -max_staleness: 100 -# rate limit for each role -role2rate: - admin: -1 # no limit - user: 1 # 1 request per second \ No newline at end of file diff --git a/commune/modules/access/subspace/access_subspace.md b/commune/modules/access/subspace/access_subspace.md deleted file mode 100644 index a1ba8155a..000000000 --- a/commune/modules/access/subspace/access_subspace.md +++ /dev/null @@ -1,29 +0,0 @@ - -# Access Module - -**What does this do?** - -This allows servers to control the calls per minute and have them depend on the caller's stake. In this case callers can call a module once per minute per 100 tokens. This prevents dossing from outside miners. - -``` -access_module: - network: main # mainnet - netuid: 0 # subnet id - sync_interval: 1000 # 1000 seconds per sync with the network - timescale: 'min' # 'sec', 'min', 'hour', 'day' - stake2rate: 100 # 1 call per every N tokens staked per timescale - rate: 1 # 1 call per timescale - fn2rate: {} # function name to rate map, this overrides the default rate - -``` - -**fn2stake** - -Lets say if you have a function that is really expensive, and another that just shows the info. Then you want to maybe weight the expensive function more. This access_module allows you to specify functions as follwos - -``` -fn2rate: - expensive_function: 1 # 1 call per minute per 100 tokens - cheap_function: 100 # 100 calls per minute per 100 tokens -``` - diff --git a/commune/modules/access/subspace/access_subspace.py b/commune/modules/access/subspace/access_subspace.py deleted file mode 100644 index c7b522859..000000000 --- a/commune/modules/access/subspace/access_subspace.py +++ /dev/null @@ -1,113 +0,0 @@ -import commune as c - - - - - -class AccessSubspace(c.Module): - sync_time = 0 - def __init__(self, - module : Any, # the module or any python object - network: str = 'main', # mainnet - netuid: int = 0, # subnet id - sync_interval: int = 1000, # 1000 seconds per sync with the network - timescale:str = 'min', # 'sec', 'min', 'hour', 'day' - stake2rate: int = 100, # 1 call per every N tokens staked per timescale - rate: int = 1, # 1 call per timescale - base_rate: int = 0,# base level of calls per timescale (free calls) per account - fn2rate: dict = {}, # function name to rate map, this overrides the default rate - **kwargs): - - config = self.set_config(kwargs=locals()) - self.module = module - self.sync() - self.user_info = {} - - def sync(self): - sync_time = c.time() - self.sync_time - # if the sync time is greater than the sync interval, we need to sync - if sync_time > self.config.sync_interval : - self.sync_time = c.time() - self.subspace = c.module('subspace')(network=self.config.network, netuid=self.config.netuid) - self.stakes = self.subspace.stakes(fmt='j') - else: - c.print(f"Sync time {sync_time} < {self.config.sync_interval}, skipping sync") - return - - - timescale_map = {'sec': 1, 'min': 60, 'hour': 3600, 'day': 86400} - def verify(self, input:dict) -> dict: - - address = input['address'] - if c.is_admin(address): - - return input - - if self.subspace == None: - return {'success': False, 'error': 'subspace is not initialized, perhaps due to a network error, please check your nework'} - - # if not an admin address, we need to check the whitelist and blacklist - fn = input.get('fn') - - - assert fn in self.module.whitelist or fn in c.helper_whitelist, f"Function {fn} not in whitelist" - assert fn not in self.module.blacklist, f"Function {fn} is blacklisted" - - # RATE LIMIT CHECKING HERE - self.sync() - - stake = self.stakes.get(address, 0) - # get the rate limit for the function - if fn in self.config.fn2rate: - rate = self.config.fn2rate[fn] - else: - rate = self.config.rate - rate_limit = (stake / self.config.stake2rate) - rate_limit = rate_limit + self.config.base_rate - - # convert the rate limit to the correct timescale - rate_limit = rate_limit / self.timescale_map[self.config.timescale] - - - # if 'fn' self.config.fn2rate: - # # if the function is in the weight map, we need to check the weight - # # get the weight of the function - # weight = self.fn2weight.get(fn, 1) - # # multiply the rate limit by the weight - # rate_limit = rate_limit * weight - - default_user_info = { - 'requests': 0, - 'last_time_called': 0, - 'rate': 0, - 'stake': stake - } - - - user_info = self.user_info.get(address, default_user_info) - user_rate = 1 / (c.time() - user_info['last_time_called'] + 1e-10) - assert user_rate < rate_limit, f"Rate limit too high (calls per second) {user_rate} > {rate_limit}" - # update the user info - user_info['last_time_called'] = c.time() - user_info['requests'] += 1 - user_info['rate'] = user_rate - user_info['rate_limit'] = rate_limit - - self.user_info[address] = user_info - - return input - - - @classmethod - def test(cls): - server_name = 'access_subspace.demo' - module = c.serve('module', server_name=server_name, wait_for_server=True) - client = c.connect(server_name, key='fam') - for n in range(10): - c.print(client.info(timeout=4)) - c.kill(server_name) - return {'name': server_name, 'module': module, 'client': client} - - - - diff --git a/commune/modules/agent/agent.py b/commune/modules/agent/agent.py deleted file mode 100644 index fc89578a2..000000000 --- a/commune/modules/agent/agent.py +++ /dev/null @@ -1,35 +0,0 @@ -import commune as c - -class Agent(c.Module): - def __init__(self, - name='agent', - description='This is a base agent that does nothing.', - tags=['defi', 'agent'], - llm = 'openai::gpt4', - tools=[] - ): - self.name = name - self.description = description - self.tags = tags - self.llm = llm - self.tools = tools - - - def call(self, prompt:str) -> str: - return { - 'prompt': prompt, - 'response': 'This is a base agent that does nothing.', - 'history': [] - } - - # prompt tooling - - - @classmethod - def find_tools(cls, prompt:str): - raise NotImplementedError - - @classmethod - def prompt2agent(cls, prompt:str) -> 'Agent': - cls.find_tools(prompt, topk=5) - diff --git a/commune/modules/archive/agent/judge/judge.py b/commune/modules/archive/agent/judge/judge.py deleted file mode 100644 index e6711194c..000000000 --- a/commune/modules/archive/agent/judge/judge.py +++ /dev/null @@ -1,77 +0,0 @@ -import commune -st = commune.get_streamlit() - - - - -import commune - - - -# llm = OpenAI(temperature=0, model_name="text-davinci-002") -# react = initialize_agent(tools, llm, agent="react-docstore", verbose=True) - -from typing import * - - -import commune -class Judge(commune.Module): - def __init__(self, **kwargs): - super().__init__(**kwargs) - self.set_tokenizer(self.config['tokenizer']) - self.set_llm(self.config['llm']) - def set_llm(self, llm = None): - from langchain import OpenAI - try: - import openai - except ModuleNotFoundError: - self.run_command('pip install openai') - import openai - self.llm = self.launch(**llm) - def set_tokenizer(self, tokenizer: str): - - if tokenizer == None and hasattr(self, 'tokenizer'): - return self.tokenizer - - from transformers import AutoTokenizer - - if isinstance(tokenizer, str): - self.config['tokenizer'] = tokenizer - try: - tokenizer = AutoTokenizer.from_pretrained(tokenizer, use_fast= True) - except ValueError: - print('resorting ot use_fast = False') - tokenizer = AutoTokenizer.from_pretrained(tokenizer, use_fast=False) - - self.tokenizer = tokenizer - - - return self.tokenizer - def num_tokens(self, text:str, tokenizer: str = None, **kwargs) -> int: - self.set_tokenizer(tokenizer) - assert hasattr(self, 'tokenizer') - return len(self.tokenizer(text,**kwargs)['input_ids']) - @property - def price_per_token(self): - return self.config['price_per_token'] - def set_price_per_token(self, price_per_token:float = 0.0001) -> float: - assert isinstance(price_per_token, float) - self.config['price_per_token'] = price_per_token - return price_per_token - @classmethod - def sandbox(cls): - self = cls() - st.write(self.num_tokens('bro whadup, how is it going fam whadup')) - st.write(self.llm) - - - - -if __name__ == "__main__": - - Judge.run() - - - - - diff --git a/commune/modules/archive/agent/judge/judge.yaml b/commune/modules/archive/agent/judge/judge.yaml deleted file mode 100644 index c9cda2250..000000000 --- a/commune/modules/archive/agent/judge/judge.yaml +++ /dev/null @@ -1,8 +0,0 @@ -module: Judge -tokenizer: gpt2 -price_per_token: 0.0001 -llm: - module: langchain.OpenAI - kwargs: - openai_api_key: OPEN_API_KEY - model: local \ No newline at end of file diff --git a/commune/modules/archive/agent/knowledge_graph/knowledge_graph.py b/commune/modules/archive/agent/knowledge_graph/knowledge_graph.py deleted file mode 100644 index 0b269a3a8..000000000 --- a/commune/modules/archive/agent/knowledge_graph/knowledge_graph.py +++ /dev/null @@ -1,50 +0,0 @@ -import commune - - - -prompt = """ -Create the most important n relation tuples as follows (head, relation, tail) from the following text. Also, : -Ouput in a json format under {key} - - {key}: List[Dict(head:str, relation:str, tail:str)] - -} -""" - - - - -class KnowledgeGraph(commune.Module): - - - prompt = prompt - - - def set_prompt(self, prompt: str = None): - - if prompt is None: - prompt = self.prompt - - self.prompt = prompt - - - def set_model(self, model: 'Model'= None): - module = self.config.model.module - model_kwargs = model - - self.model = commune.module(module)(**model_kwargs) - - - def forward(self, **kwargs): - return self.model(**kwargs) - - - @classmethod - def sandbox(cls): - self = cls() - - -if __name__ == "__name__": - KnowledgeGraph.sandbox() - - diff --git a/commune/modules/archive/agent/knowledge_graph/knowledge_graph.yaml b/commune/modules/archive/agent/knowledge_graph/knowledge_graph.yaml deleted file mode 100644 index 36dbca640..000000000 --- a/commune/modules/archive/agent/knowledge_graph/knowledge_graph.yaml +++ /dev/null @@ -1,14 +0,0 @@ - -model: - module : 'model.openai' - model : "text-davinci-003", - temperature: 0.9, - max_tokens: 10, - top_p: 1.0, - frequency_penalty: 0.0, - presence_penalty : 0.0, - prompt : None, - key : 'OPENAI_API_KEY' - - -prompt: null diff --git a/commune/modules/archive/dashboard/dashboard.py b/commune/modules/archive/dashboard/dashboard.py deleted file mode 100644 index 623ce9149..000000000 --- a/commune/modules/archive/dashboard/dashboard.py +++ /dev/null @@ -1,248 +0,0 @@ -import commune -import streamlit as st -from typing import List, Dict, Union, Any -import os - -class Dashboard(commune.Module): - - def __init__(self): - self.public_ip = commune.external_ip() - self.load_state() - - def load_state(self): - self.namespace = commune.namespace(update=False) - self.servers = list(self.namespace.keys()) - self.module_tree = commune.module_tree() - self.module_list = ['module'] + list(self.module_tree.keys()) - sorted(self.module_list) - - def sync(self): - self.subspace.sync() - return self.subspace.load_state() - - @classmethod - def function2streamlit(cls, - fn_schema, - extra_defaults:dict=None, - cols:list=None): - if extra_defaults is None: - extra_defaults = {} - - st.write('#### Startup Arguments') - # refresh = st.checkbox('**Refresh**', False) - # mode = st.selectbox('**Select Mode**', ['pm2', 'ray', 'local'] ) - mode = 'pm2' - serve = True - - kwargs = {} - fn_schema['default'].pop('self', None) - fn_schema['default'].pop('cls', None) - fn_schema['default'].update(extra_defaults) - - - - - - fn_schema['input'].update({k:str(type(v)).split("'")[1] for k,v in extra_defaults.items()}) - if cols == None: - cols = st.columns(len(fn_schema['default'])) - - for i, (k,v) in enumerate(fn_schema['default'].items()): - - optional = fn_schema['default'][k] != 'NA' - fn_key = k - if k in fn_schema['input']: - k_type = fn_schema['input'][k] - if 'Munch' in k_type or 'Dict' in k_type: - k_type = 'Dict' - if k_type.startswith('typing'): - k_type = k_type.split('.')[-1] - fn_key = f'**{k} ({k_type}){"" if optional else "(REQUIRED)"}**' - col_idx = i - if k in ['kwargs', 'args'] and v == 'NA': - continue - - - - col_idx = col_idx % (len(cols)) - kwargs[k] = cols[col_idx].text_input(fn_key, v) - - return kwargs - - - @classmethod - def process_kwargs(cls, kwargs:dict, fn_schema:dict): - - for k,v in kwargs.items(): - if v == 'None': - v = None - - elif k in fn_schema['input'] and fn_schema['input'][k] == 'str': - if v.startswith("f'") or v.startswith('f"'): - v = c.ljson(v) - elif v.startswith('[') and v.endswith(']'): - v = v - elif v.startswith('{') and v.endswith('}'): - v = v - else: - v = v - - elif k == 'kwargs': - continue - elif v == 'NA': - assert k != 'NA', f'Key {k} not in default' - else: - v = eval(v) - - kwargs[k] = v - return kwargs - @classmethod - def streamlit_module_launcher(cls, module, mode:str='pm2', fn_name='__init__'): - - module_path = module.module_path() - st.write(f'## Module: {module.module_path()}') - - # function_signature = self.module_info['function_signature_map'] = self.module_info['object'].get_function_signature_map() - module_schema = module.schema(defaults=True) - - cols = st.columns(2) - name = cols[0].text_input('**Name**', module_path) - tag = cols[1].text_input('**Tag**', 'None') - config = module.config(to_munch=False) - - - fn_schema = module_schema[fn_name] - kwargs = cls.function2streamlit(fn_schema=fn_schema, extra_defaults=config ) - - launch_button = st.button('Launch Module') - - if launch_button: - - kwargs = cls.process_kwargs(kwargs=kwargs, fn_schema=fn_schema) - - - - launch_kwargs = dict( - module = module, - name = name, - tag = tag, - mode = mode, - refresh = True, - kwargs = kwargs, - ) - commune.launch(**launch_kwargs) - st.success(f'Launched {name} with {kwargs}') - - - with st.expander('Config'): - st.write(module.config()) - - with st.expander('input'): - st.write(module.schema()) - - - - - def streamlit_sidebar(self, wrapper = True): - if wrapper: - with st.sidebar: - return self.streamlit_sidebar(False) - - - - st.write('## Modules') - self.server_name = st.selectbox('',self.module_list, 0, key='module_name') - self.module = commune.module(self.server_name) - self.module_config = self.module.config(to_munch=False) - - - with st.expander('Modules'): - modules =commune.modules() - st.multiselect('', modules, modules) - - - self.update_button = st.button('Sync Network', False) - - if self.update_button: - self.sync() - - self.streamlit_peers() - - def st_root_module(self): - self.root_module = commune.root_module() - self.root_module_info = self.root_module.info() - - st.write('## My Address:', self.root_module_info["address"]) - - def streamlit_peers(self): - - st.write('## Peers') - - peer = st.text_input('', '0.0.0.0:8888', key='peer') - cols = st.columns(2) - add_peer_button = cols[0].button('add Peer') - rm_peer_button = cols[1].button('rm peer') - if add_peer_button : - self.add_peer(peer) - if rm_peer_button : - self.rm_peer(peer) - - peers = commune.peers() - st.write(peers) - - - - - - - - def streamlit_server_info(self): - - - for peer_name, peer_info in self.namespace.items(): - with st.expander(peer_name, True): - peer_info['address']= f'{peer_info["ip"]}:{peer_info["port"]}' - st.write(peer_info) - - - - - - - - peer_info_map = {} - - - def streamlit_playground(self): - class bro: - def __init__(self, a, b): - self.a = a - self.b = b - - st.write(str(type(commune)) == "") - st.write() - pass - - @classmethod - def streamlit(cls): - - cls.local_css() - commune.new_event_loop() - - commune.nest_asyncio() - self = cls() - self.st_root_module() - - - self.streamlit_sidebar() - - - tabs = st.tabs(['Modules', 'Peers', 'Users', 'Playground']) - - self.streamlit_module_launcher(module=self.module) - - - - -if __name__ == '__main__': - Dashboard.streamlit() \ No newline at end of file diff --git a/commune/modules/archive/dashboard/style.css b/commune/modules/archive/dashboard/style.css deleted file mode 100644 index dffc6dfeb..000000000 --- a/commune/modules/archive/dashboard/style.css +++ /dev/null @@ -1,8 +0,0 @@ -div.stButton button { - background-color: rgb(34, 220, 124); - width: 100%; - color : #000; - border: 1px solid rgb(0, 0, 0); - font-size: 20px; - font-weight: 200; -} \ No newline at end of file diff --git a/commune/modules/archive/fastchat/.gitignore b/commune/modules/archive/fastchat/.gitignore deleted file mode 100644 index bff0729c3..000000000 --- a/commune/modules/archive/fastchat/.gitignore +++ /dev/null @@ -1,21 +0,0 @@ -# Python -__pycache__ -*.pyc -*.egg-info -dist - -# Log -*.log -*.json - -# Data -!**/alpaca-data-conversation.json - -# Editor -.idea -*.swp - -# Other -.DS_Store -wandb -output diff --git a/commune/modules/archive/fastchat/README.md b/commune/modules/archive/fastchat/README.md deleted file mode 100644 index 85f06b6dd..000000000 --- a/commune/modules/archive/fastchat/README.md +++ /dev/null @@ -1,267 +0,0 @@ -# FastChat -An open platform for training, serving, and evaluating large language model based chatbots. - -## Release - -

- -

- -- 🔥 We released **Vicuna: An Open-Source Chatbot Impressing GPT-4 with 90% ChatGPT Quality**. Checkout the blog [post](https://vicuna.lmsys.org) and [demo](https://chat.lmsys.org/). - - - -Join our [Discord](https://discord.gg/h6kCZb72G7) server and follow our [Twitter](https://twitter.com/lmsysorg) to get the latest updates. - -## Contents -- [Install](#install) -- [Vicuna Weights](#vicuna-weights) -- [Inference with Command Line Interface](#inference-with-command-line-interface) -- [Serving with Web GUI](#serving-with-web-gui) -- [API](#api) -- [Evaluation](#evaluation) -- [Fine-tuning](#fine-tuning) - -## Install - -### Method 1: With pip - -```bash -# Install FastChat -pip3 install fschat - -# Install the latest main branch of huggingface/transformers -pip3 install git+https://github.com/huggingface/transformers -``` - -### Method 2: From source - -1. Clone this repository and navigate to the FastChat folder -```bash -git clone https://github.com/lm-sys/FastChat.git -cd FastChat -``` - -If you are running on Mac: -```bash -brew install rust cmake -``` - -2. Install Package -```bash -pip3 install --upgrade pip # enable PEP 660 support -pip3 install -e . -``` - -## Vicuna Weights -We release [Vicuna](https://vicuna.lmsys.org/) weights as delta weights to comply with the LLaMA model license. -You can add our delta to the original LLaMA weights to obtain the Vicuna weights. Instructions: - -1. Get the original LLaMA weights in the huggingface format by following the instructions [here](https://huggingface.co/docs/transformers/main/model_doc/llama). -2. Use the following scripts to get Vicuna weights by applying our delta. They will automatically download delta weights from our Hugging Face [account](https://huggingface.co/lmsys). - -**NOTE**: -Our released weights are only compatible with the latest main branch of huggingface/transformers. -We install the correct version of transformers when fastchat is installed. - -### Vicuna-7B -This conversion command needs around 30 GB of CPU RAM. -If you do not have enough memory, you can create a large swap file that allows the operating system to automatically utilize the disk as virtual memory. -```bash -python3 -m fastchat.model.apply_delta \ - --base /path/to/llama-7b \ - --target /output/path/to/vicuna-7b \ - --delta lmsys/vicuna-7b-delta-v1.1 -``` - -### Vicuna-13B -This conversion command needs around 60 GB of CPU RAM. -If you do not have enough memory, you can create a large swap file that allows the operating system to automatically utilize the disk as virtual memory. -```bash -python3 -m fastchat.model.apply_delta \ - --base /path/to/llama-13b \ - --target /output/path/to/vicuna-13b \ - --delta lmsys/vicuna-13b-delta-v1.1 -``` - -### Old weights -See [docs/weights_version.md](docs/weights_version.md) for all versions of weights and their differences. - -## Inference with Command Line Interface - -(Experimental Feature: You can specify `--style rich` to enable rich text output and better text streaming quality for some non-ASCII content. This may not work properly on certain terminals.) - - - -#### Single GPU -The command below requires around 28GB of GPU memory for Vicuna-13B and 14GB of GPU memory for Vicuna-7B. -See the "No Enough Memory" section below if you do not have enough memory. -``` -python3 -m fastchat.serve.cli --model-path /path/to/vicuna/weights -``` - -#### Multiple GPUs -You can use model parallelism to aggregate GPU memory from multiple GPUs on the same machine. -``` -python3 -m fastchat.serve.cli --model-path /path/to/vicuna/weights --num-gpus 2 -``` - -#### CPU Only -This runs on the CPU only and does not require GPU. It requires around 60GB of CPU memory for Vicuna-13B and around 30GB of CPU memory for Vicuna-7B. -``` -python3 -m fastchat.serve.cli --model-path /path/to/vicuna/weights --device cpu -``` - -#### Metal Backend (Mac Computers with Apple Silicon or AMD GPUs) -Use `--device mps` to enable GPU acceleration on Mac computers (requires torch >= 2.0). -Use `--load-8bit` to turn on 8-bit compression. -``` -python3 -m fastchat.serve.cli --model-path /path/to/vicuna/weights --device mps --load-8bit -``` -Vicuna-7B can run on a 32GB M1 Macbook with 1 - 2 words / second. - - -#### No Enough Memory or Other Platforms -If you do not have enough memory, you can enable 8-bit compression by adding `--load-8bit` to commands above. -This can reduce memory usage by around half with slightly degraded model quality. -It is compatible with the CPU, GPU, and Metal backend. -Vicuna-13B with 8-bit compression can run on a single NVIDIA 3090/4080/V100(16GB) GPU. - -``` -python3 -m fastchat.serve.cli --model-path /path/to/vicuna/weights --load-8bit -``` - -Besides, we are actively exploring more methods to make the model easier to run on more platforms. -Contributions and pull requests are welcome. - -## Serving with Web GUI - - - -To serve using the web UI, you need three main components: web servers that interface with users, model workers that host one or more models, and a controller to coordinate the webserver and model workers. Here are the commands to follow in your terminal: - -#### Launch the controller -```bash -python3 -m fastchat.serve.controller -``` - -This controller manages the distributed workers. - -#### Launch the model worker -```bash -python3 -m fastchat.serve.model_worker --model-path /path/to/vicuna/weights -``` -Wait until the process finishes loading the model and you see "Uvicorn running on ...". You can launch multiple model workers to serve multiple models concurrently. The model worker will connect to the controller automatically. - -To ensure that your model worker is connected to your controller properly, send a test message using the following command: -```bash -python3 -m fastchat.serve.test_message --model-name vicuna-13b -``` - -#### Launch the Gradio web server -```bash -python3 -m fastchat.serve.gradio_web_server -``` - -This is the user interface that users will interact with. - -By following these steps, you will be able to serve your models using the web UI. You can open your browser and chat with a model now. - - -## API - -### Huggingface Generation APIs -See [fastchat/serve/huggingface_api.py](fastchat/serve/huggingface_api.py) - -### OpenAI-compatible RESTful APIs -Coming soon. - -## Evaluation - -Our AI-enhanced evaluation pipeline is based on GPT-4. This section provides a high-level summary of the pipeline. For detailed instructions, please refer to the [evaluation](fastchat/eval) documentation. - -### Pipeline Steps - -1. Generate answers from different models: Use `qa_baseline_gpt35.py` for ChatGPT, or specify the model checkpoint and run `get_model_answer.py` for Vicuna and other models. - -2. Generate reviews with GPT-4: Use GPT-4 to generate reviews automatically. This step can also be performed manually if the GPT-4 API is not available to you. - -3. Generate visualization data: Run `generate_webpage_data_from_table.py` to generate data for a static website, which allows you to visualize the evaluation data. - -4. Visualize the data: Serve a static website under the `webpage` directory. You can use `python3 -m http.server` to serve the website locally. - -### Data Format and Contribution - -We use a data format encoded with JSON Lines for evaluation. The format includes information on models, prompts, reviewers, questions, answers, and reviews. - -You can customize the evaluation process or contribute to our project by accessing the relevant [data](fastchat/eval/table/). - -For detailed instructions, please refer to the [evaluation](fastchat/eval) documentation. - -## Fine-tuning -### Data - -Vicuna is created by fine-tuning a LLaMA base model using approximately 70K user-shared conversations gathered from ShareGPT.com with public APIs. To ensure data quality, we convert the HTML back to markdown and filter out some inappropriate or low-quality samples. Additionally, we divide lengthy conversations into smaller segments that fit the model's maximum context length. For detailed instructions to clean the ShareGPT data, check out [here](docs/commands/data_cleaning.md). - -Due to some concerns, we may not release the ShareGPT dataset at the moment. If you would like to try the fine-tuning code, you can run it with some dummy questions in [dummy.json](playground/data/dummy.json). You can follow the same format and plug in your own data. - -### Code and Hyperparameters -Our code is based on [Stanford Alpaca](https://github.com/tatsu-lab/stanford_alpaca) with additional support for multi-round conversations. -We use similar hyperparameters as the Stanford Alpaca. - -| Hyperparameter | Global Batch Size | Learning rate | Epochs | Max length | Weight decay | -| --- | ---: | ---: | ---: | ---: | ---: | -| Vicuna-13B | 128 | 2e-5 | 3 | 2048 | 0 | - -### Fine-tuning Vicuna-7B with Local GPUs -You can use the following command to train Vicuna-7B with 4 x A100 (40GB). -```bash -torchrun --nproc_per_node=4 --master_port=20001 fastchat/train/train_mem.py \ - --model_name_or_path ~/model_weights/llama-7b \ - --data_path playground/data/dummy.json \ - --bf16 True \ - --output_dir output \ - --num_train_epochs 3 \ - --per_device_train_batch_size 2 \ - --per_device_eval_batch_size 2 \ - --gradient_accumulation_steps 16 \ - --evaluation_strategy "no" \ - --save_strategy "steps" \ - --save_steps 1200 \ - --save_total_limit 10 \ - --learning_rate 2e-5 \ - --weight_decay 0. \ - --warmup_ratio 0.03 \ - --lr_scheduler_type "cosine" \ - --logging_steps 1 \ - --fsdp "full_shard auto_wrap" \ - --fsdp_transformer_layer_cls_to_wrap 'LlamaDecoderLayer' \ - --tf32 True \ - --model_max_length 2048 \ - --gradient_checkpointing True \ - --lazy_preprocess True -``` - -If you meet out-of-memory during model saving, see solutions [here](https://github.com/pytorch/pytorch/issues/98823). - -### Fine-tuning on Any Cloud with SkyPilot -[SkyPilot](https://github.com/skypilot-org/skypilot) is a framework built by UC Berkeley for easily and cost effectively running ML workloads on any cloud (AWS, GCP, Azure, Lambda, etc.). -To use SkyPilot, install it with the following command and setup the cloud credentials locally following the instructions [here](https://skypilot.readthedocs.io/en/latest/getting-started/installation.html). -```bash -# Install skypilot from the master branch -pip install git+https://github.com/skypilot-org/skypilot.git -``` -#### Vicuna -Vicuna can be trained on 8 A100 GPUs with 80GB memory. The following command will automatically launch a node satisfying the requirement, setup and run the training job on it. -```bash -sky launch -c vicuna -s scripts/train-vicuna.yaml --env WANDB_API_KEY -``` -Other options are also valid: -```bash -# Launch it on managed spot to save 3x cost (train Vicuna-13B with around $300) -sky spot launch -n vicuna scripts/train-vicuna.yaml --env WANDB_API_KEY - -# Train a 7B model -sky launch -c vicuna -s scripts/train-vicuna.yaml --env WANDB_API_KEY --env MODEL_SIZE=7 -``` -Note: Please make sure the `WANDB_API_KEY` has been setup on your local machine. You can find the API key on your [wandb profile page](https://wandb.ai/authorize). If you would like to train the model without using wandb, you can replace the `--env WANDB_API_KEY` flag with `--env WANDB_MODE=offline`. diff --git a/commune/modules/archive/fastchat/assets/demo_narrow.gif b/commune/modules/archive/fastchat/assets/demo_narrow.gif deleted file mode 100644 index aa1af38e4..000000000 Binary files a/commune/modules/archive/fastchat/assets/demo_narrow.gif and /dev/null differ diff --git a/commune/modules/archive/fastchat/assets/vicuna_logo.jpeg b/commune/modules/archive/fastchat/assets/vicuna_logo.jpeg deleted file mode 100644 index e7883dc88..000000000 Binary files a/commune/modules/archive/fastchat/assets/vicuna_logo.jpeg and /dev/null differ diff --git a/commune/modules/archive/fastchat/docs/commands/data_cleaning.md b/commune/modules/archive/fastchat/docs/commands/data_cleaning.md deleted file mode 100644 index e6b3ae28f..000000000 --- a/commune/modules/archive/fastchat/docs/commands/data_cleaning.md +++ /dev/null @@ -1,19 +0,0 @@ -## Data cleaning - -## Requirements -``` -pip3 install bs4 markdownify -pip3 install polyglot icu pyicu pycld2 morfessor -``` - -## Steps -``` -# Convert html to markdown -python3 -m fastchat.data.clean_sharegpt --in sharegpt_20230322_html.json --out sharegpt_20230322_clean.json - -# Keep or remove specific languages -python3 -m fastchat.data.optional_clean --in sharegpt_20230322_clean.json --out sharegpt_20230322_clean_lang.json --skip-lang SOME_LANGUAGE_CODE - -# Split long conversations -python3 -m fastchat.data.split_long_conversation --in sharegpt_20230322_clean_lang.json --out sharegpt_20230322_clean_lang_split.json --model-name /home/ubuntu/model_weights/llama-7b/ -``` diff --git a/commune/modules/archive/fastchat/docs/commands/local_cluster.md b/commune/modules/archive/fastchat/docs/commands/local_cluster.md deleted file mode 100644 index 6be24eac7..000000000 --- a/commune/modules/archive/fastchat/docs/commands/local_cluster.md +++ /dev/null @@ -1,30 +0,0 @@ -### Local GPU cluster (node-01) -``` -python3 -m fastchat.serve.controller --host 0.0.0.0 --port 10002 - -CUDA_VISIBLE_DEVICES=0 python3 -m fastchat.serve.model_worker --model-path ~/model_weights/vicuna-13b/ --controller http://localhost:10002 --port 31000 --worker http://localhost:31000 -CUDA_VISIBLE_DEVICES=1 python3 -m fastchat.serve.model_worker --model-path ~/model_weights/vicuna-13b/ --controller http://localhost:10002 --port 31001 --worker http://localhost:31001 -CUDA_VISIBLE_DEVICES=2 python3 -m fastchat.serve.model_worker --model-path ~/model_weights/bair-chat-13b/ --controller http://localhost:10002 --port 31002 --worker http://localhost:31002 -CUDA_VISIBLE_DEVICES=3 python3 -m fastchat.serve.model_worker --model-path ~/model_weights/alpaca-chat-13b/ --controller http://localhost:10002 --port 31003 --worker http://localhost:31003 - -python3 -m fastchat.serve.test_message --model vicuna-13b --controller http://localhost:10002 -``` - -### Web server -``` -python3 -m fastchat.serve.controller --host 0.0.0.0 --port 21001 - -python3 -m fastchat.serve.register_worker --controller http://localhost:21001 --worker-name https:// - -python3 -m fastchat.serve.test_message --model vicuna-13b --controller http://localhost:21001 - -python3 -m fastchat.serve.gradio_web_server --controller http://localhost:21001 -``` - -### Local GPU cluster (node-02) -``` -CUDA_VISIBLE_DEVICES=0 python3 -m fastchat.serve.model_worker --model-path ~/model_weights/vicuna-13b/ --controller http://node-01:10002 --host 0.0.0.0 --port 31000 --worker http://$(hostname):31000 -CUDA_VISIBLE_DEVICES=1 python3 -m fastchat.serve.model_worker --model-path ~/model_weights/vicuna-13b/ --controller http://node-01:10002 --host 0.0.0.0 --port 31001 --worker http://$(hostname):31001 -CUDA_VISIBLE_DEVICES=2 python3 -m fastchat.serve.model_worker --model-path ~/model_weights/vicuna-13b/ --controller http://node-01:10002 --host 0.0.0.0 --port 31002 --worker http://$(hostname):31002 -CUDA_VISIBLE_DEVICES=3 python3 -m fastchat.serve.model_worker --model-path ~/model_weights/vicuna-13b/ --controller http://node-01:10002 --host 0.0.0.0 --port 31003 --worker http://$(hostname):31003 -``` diff --git a/commune/modules/archive/fastchat/docs/commands/pypi.md b/commune/modules/archive/fastchat/docs/commands/pypi.md deleted file mode 100644 index 5b53dae6b..000000000 --- a/commune/modules/archive/fastchat/docs/commands/pypi.md +++ /dev/null @@ -1,11 +0,0 @@ -### Requirement -``` -python3 -m pip install twine -python3 -m pip install --upgrade pip -pip3 install build -``` - -### Upload -``` -bash scripts/upload_pypi.sh -``` diff --git a/commune/modules/archive/fastchat/docs/commands/webserver.md b/commune/modules/archive/fastchat/docs/commands/webserver.md deleted file mode 100644 index 800406116..000000000 --- a/commune/modules/archive/fastchat/docs/commands/webserver.md +++ /dev/null @@ -1,40 +0,0 @@ -### Install -``` -sudo apt update -sudo apt install tmux htop - -wget https://repo.anaconda.com/archive/Anaconda3-2022.10-Linux-x86_64.sh -bash Anaconda3-2022.10-Linux-x86_64.sh - -conda create -n fastchat python=3.9 -conda activate fastchat - -git clone https://github.com/lm-sys/FastChat.git -cd FastChat -pip3 install -e . - -# Install the latest main branch of huggingface/transformers -pip3 install git+https://github.com/huggingface/transformers -``` - -### Launch servers -``` -python3 -m fastchat.serve.controller --host 0.0.0.0 --port 21001 -python3 -m fastchat.serve.register_worker --controller http://localhost:21001 --worker-name https:// -python3 -m fastchat.serve.test_message --model vicuna-13b --controller http://localhost:21001 - -export OPENAI_API_KEY= -python3 -m fastchat.serve.gradio_web_server --controller http://localhost:21001 --moderate --concurrency 20 -``` - -### Increase the limit of max open files -One process (do not need reboot) -``` -sudo prlimit --nofile=1048576:1048576 --pid=$id -``` - -System (need reboot): Add the lines below to `/etc/security/limits.conf` -``` -* hard nofile 65535 -* soft nofile 65535 -``` diff --git a/commune/modules/archive/fastchat/docs/weights_version.md b/commune/modules/archive/fastchat/docs/weights_version.md deleted file mode 100644 index df9da8114..000000000 --- a/commune/modules/archive/fastchat/docs/weights_version.md +++ /dev/null @@ -1,35 +0,0 @@ -## Vicuna-7B/13B - -| Weights Version | v1.1 | v0 | -| ---- | ---- | ---- | -| Link | [7B](https://huggingface.co/lmsys/vicuna-7b-delta-v1.1), [13B](https://huggingface.co/lmsys/vicuna-13b-delta-v1.1) | [7B](https://huggingface.co/lmsys/vicuna-7b-delta-v0), [13B](https://huggingface.co/lmsys/vicuna-13b-delta-v0) | -| Separator | `` | `###` | -| FastChat PyPI package compatibility | >= v0.2.0 |<= v0.1.10 | - -Major updates of Weights v1.1 -- Refactor the tokenization and separator. In Vicuna v1.1, the separator has been changed from `###` to the EOS token ``. This change makes it easier to determine the generation stop criteria and enables better compatibility with other libraries. -- Fix the supervised fine-tuning loss computation for better model quality. - -### Example prompt (Weight v1.1) -``` -A chat between a user and an assistant. - -USER: Hello! -ASSISTANT: Hello! -USER: How are you? -ASSISTANT: I am good. -``` - -See a full prompt template [here](https://github.com/lm-sys/FastChat/blob/00d9e6675bdff60be6603ffff9313b1d797d2e3e/fastchat/conversation.py#L115-L124). - -### Example prompt (Weight v0) -``` -A chat between a human and an assistant. - -### Human: Hello! -### Assistant: Hello! -### Human: How are you? -### Assistant: I am good. -``` - -See the full prompt template [here](https://github.com/lm-sys/FastChat/blob/00d9e6675bdff60be6603ffff9313b1d797d2e3e/fastchat/conversation.py#L83-L112). diff --git a/commune/modules/archive/fastchat/fastchat.py b/commune/modules/archive/fastchat/fastchat.py deleted file mode 100644 index 2f61d7a31..000000000 --- a/commune/modules/archive/fastchat/fastchat.py +++ /dev/null @@ -1,8 +0,0 @@ -import commune as c - -class Fastchat(c.Module): - def __init__(self, **kwargs): - config = self.set_config(config=kwargs) - def run(self): - print('Base run') - \ No newline at end of file diff --git a/commune/modules/archive/fastchat/fastchat.yaml b/commune/modules/archive/fastchat/fastchat.yaml deleted file mode 100644 index 7ac5c5e33..000000000 --- a/commune/modules/archive/fastchat/fastchat.yaml +++ /dev/null @@ -1,6 +0,0 @@ -return_val: pong -users: -- user1 -- user2 -- user3 -- bro diff --git a/commune/modules/archive/fastchat/fastchat/constants.py b/commune/modules/archive/fastchat/fastchat/constants.py deleted file mode 100644 index 70294c04e..000000000 --- a/commune/modules/archive/fastchat/fastchat/constants.py +++ /dev/null @@ -1,4 +0,0 @@ -CONTROLLER_HEART_BEAT_EXPIRATION = 90 -WORKER_HEART_BEAT_INTERVAL = 30 - -LOGDIR = "." diff --git a/commune/modules/archive/fastchat/fastchat/conversation.py b/commune/modules/archive/fastchat/fastchat/conversation.py deleted file mode 100644 index ccb42a496..000000000 --- a/commune/modules/archive/fastchat/fastchat/conversation.py +++ /dev/null @@ -1,155 +0,0 @@ -import dataclasses -from enum import auto, Enum -from typing import List, Tuple, Any - - -class SeparatorStyle(Enum): - """Different separator style.""" - SINGLE = auto() - TWO = auto() - - -@dataclasses.dataclass -class Conversation: - """A class that keeps all conversation history.""" - system: str - roles: List[str] - messages: List[List[str]] - offset: int - sep_style: SeparatorStyle = SeparatorStyle.SINGLE - sep: str = "###" - sep2: str = None - - # Used for gradio server - skip_next: bool = False - conv_id: Any = None - - def get_prompt(self): - if self.sep_style == SeparatorStyle.SINGLE: - ret = self.system - for role, message in self.messages: - if message: - ret += self.sep + " " + role + ": " + message - else: - ret += self.sep + " " + role + ":" - return ret - elif self.sep_style == SeparatorStyle.TWO: - seps = [self.sep, self.sep2] - ret = self.system + seps[0] - for i, (role, message) in enumerate(self.messages): - if message: - ret += role + ": " + message + seps[i % 2] - else: - ret += role + ":" - return ret - else: - raise ValueError(f"Invalid style: {self.sep_style}") - - def append_message(self, role, message): - self.messages.append([role, message]) - - def to_gradio_chatbot(self): - ret = [] - for i, (role, msg) in enumerate(self.messages[self.offset:]): - if i % 2 == 0: - ret.append([msg, None]) - else: - ret[-1][-1] = msg - return ret - - def copy(self): - return Conversation( - system=self.system, - roles=self.roles, - messages=[[x, y] for x, y in self.messages], - offset=self.offset, - sep_style=self.sep_style, - sep=self.sep, - sep2=self.sep2, - conv_id=self.conv_id) - - def dict(self): - return { - "system": self.system, - "roles": self.roles, - "messages": self.messages, - "offset": self.offset, - "sep": self.sep, - "sep2": self.sep2, - "conv_id": self.conv_id, - } - - -conv_one_shot = Conversation( - system="A chat between a curious human and an artificial intelligence assistant. " - "The assistant gives helpful, detailed, and polite answers to the human's questions.", - roles=("Human", "Assistant"), - messages=( - ("Human", "What are the key differences between renewable and non-renewable energy sources?"), - ("Assistant", - "Renewable energy sources are those that can be replenished naturally in a relatively " - "short amount of time, such as solar, wind, hydro, geothermal, and biomass. " - "Non-renewable energy sources, on the other hand, are finite and will eventually be " - "depleted, such as coal, oil, and natural gas. Here are some key differences between " - "renewable and non-renewable energy sources:\n" - "1. Availability: Renewable energy sources are virtually inexhaustible, while non-renewable " - "energy sources are finite and will eventually run out.\n" - "2. Environmental impact: Renewable energy sources have a much lower environmental impact " - "than non-renewable sources, which can lead to air and water pollution, greenhouse gas emissions, " - "and other negative effects.\n" - "3. Cost: Renewable energy sources can be more expensive to initially set up, but they typically " - "have lower operational costs than non-renewable sources.\n" - "4. Reliability: Renewable energy sources are often more reliable and can be used in more remote " - "locations than non-renewable sources.\n" - "5. Flexibility: Renewable energy sources are often more flexible and can be adapted to different " - "situations and needs, while non-renewable sources are more rigid and inflexible.\n" - "6. Sustainability: Renewable energy sources are more sustainable over the long term, while " - "non-renewable sources are not, and their depletion can lead to economic and social instability.") - ), - offset=2, - sep_style=SeparatorStyle.SINGLE, - sep="###", -) - - -conv_vicuna_v1_1 = Conversation( - system="A chat between a curious user and an artificial intelligence assistant. " - "The assistant gives helpful, detailed, and polite answers to the user's questions.", - roles=("USER", "ASSISTANT"), - messages=(), - offset=0, - sep_style=SeparatorStyle.TWO, - sep=" ", - sep2="", -) - - -conv_koala_v1 = Conversation( - system="BEGINNING OF CONVERSATION:", - roles=("USER", "GPT"), - messages=(), - offset=0, - sep_style=SeparatorStyle.TWO, - sep=" ", - sep2="", -) - - -conv_templates = { - "conv_one_shot": conv_one_shot, - "vicuna_v1.1": conv_vicuna_v1_1, - "koala_v1": conv_koala_v1, -} - - -def get_default_conv_template(model_name): - model_name = model_name.lower() - if "vicuna" in model_name or "output" in model_name: - return conv_vicuna_v1_1 - elif "koala" in model_name: - return conv_koala_v1 - return conv_one_shot - - -if __name__ == "__main__": - print(default_conversation.get_prompt()) diff --git a/commune/modules/archive/fastchat/fastchat/eval/README.md b/commune/modules/archive/fastchat/fastchat/eval/README.md deleted file mode 100644 index 403c9acf2..000000000 --- a/commune/modules/archive/fastchat/fastchat/eval/README.md +++ /dev/null @@ -1,187 +0,0 @@ -# Evaluations - -This directory contains end-to-end pipelines for AI-enhanced evaluation. We will introduce the evaluation pipeline and the data format in this document. - -## Generate Answers - -### ChatGPT (gpt-3.5-turbo) - -Make sure you have setup the OpenAI API Key in your environment. Then run: - -```bash -python qa_baseline_gpt35.py --question table/question.jsonl --output table/answer/answer_gpt35.jsonl -``` - -### Bard - -Unfortunately, Bard has not release its public APIs till now. You may have to enter the anwsers manually. Or you could find a third-party project that interfaces with Bard. - -### Vicuna and others - -To generate answers with Vicuna or other models, specify path to the model checkpoint, a desired model ID and run: -```bash -python get_model_answer.py --model-id [MODEL-ID] --model-path /model/path --question-file table/question.jsonl --answer-file table/answer/answer.jsonl --num-gpus [NUM-GPUS] -``` -Then the answers to the questions will be saved in `table/answer/answer.jsonl`. -Note: we assume the model can be loaded with a single GPU. - -## Evaluate Answers Automatically - -### Generete Reviews with GPT-4 - -Note: Below script requires access to GPT-4 API. If you only have access to GPT-4 on web interface, you can evaluate the answers by manually formatting the prompt. See more details in the **Reviewers** and **Prompts** sections in **Data Format**. -It is critical to follow the prompt templates; otherwise GPT-4 may not give fair reviews. `table/review/*.jsonl` are some review examples generated by GPT-4 or you can view them on our eval [webpage](https://vicuna.lmsys.org/eval/). - -To use the script for generating reviews with GPT-4, you need to `export` your OpenAI API key in environment variable. Then run: -```bash -python eval_gpt_review.py -q table/question.jsonl -a /path/to/answer_1.jsonl /path/to/answer_2.jsonl -p table/prompt.jsonl -r table/reviewer.jsonl -o /path/to/review_output.jsonl -``` -The GPT-4 reviews will be saved in `/path/to/review_output.jsonl`. Note: we implement some simple parsing code to extract the score pairs from GPT-4's reviews. However, you need to double check whether the parsed score pair are correct. Sometime the parsing logic may fail if GPT-4 doesn't give a structured answer. - -## Visualize Results - -You can generate the data for the webpage by running: - -```bash -python eval/generate_webpage_data_from_table.py -``` - -Then you can serve a static website in `webpage` to see the results. - -## Data Format - -If you want to have a deeper understanding of our evaluation pipeline or want to contribute to the evaluation process, you need to learn the data format we used for evaluation. - -Our evaluation data are encoded with [JSON Lines](https://jsonlines.org/). - -### Random ID Generation - -We use the `shortuuid` Python library for generating short random UUIDs. - -```python -import shortuuid -shortuuid.uuid() -> str -``` - -### Models - -`model.jsonl` contains model information we used for generating anwsers. - -Each row contains a record of a model with the following field: - -* `model_id` (str): A unique ID for a model. Models with different IDs is supposed to have different performance. This ID is generated by `{model_name}:{model_version}`. -* `model_name` (str): The name of a model. This is not unique, because a model could be trained and updated continuously, but it is still considered as the same model with different versions. -* `model_version` (str): The version of a model. -* `model_metadata` (Any): Any metadata of a model (descriptions etc). This is optional. - -For example: - -```json -{ - "model_id": "vicuna-13b:v1", - "model_name": "vicuna-13b", - "model_version": "v1", - "model_metadata": "learning rate 1e-5, 3 epochs, 13b" -} -``` - -### Prompts - -We store prompts in `prompt.jsonl`. Each row contains a record of a prompt with the following field: - -* `prompt_id` (int): A unique integer ID for a prompt. Prompts with different IDs are supposed to have different purpose. -* `system_prompt` (str): The system prompt given to a model. This is the prompt that the model sees first. -* `prompt_template` (str): The prompt body. This is the user prompt that the model sees after the system prompt. It is a Python f-string template, so that we can fill in the inputs later. -* `defaults` (dict): A dictionary of default values for the prompt template. It can be empty. -* `description` (str): A description of the functionality of the prompt. - -For example: - -```json -{ - "prompt_id": 1, - "system_prompt": "You are a helpful assistant.", - "prompt_template": "[Question]\n{question}\n\n[Assistant 1]\n{answer_1}\n\n[End of Assistant 1]\n\n[Assistant 2]\n{answer_2}\n\n[End of Assistant 2]\n\n[System]\n{prompt}\n\n", - "defaults": {"prompt": "Which assistant is more helpful?"}, - "description": "Compare two assistants' answers to a question." -} -``` - -### Reviewers - -`reviewer.jsonl` contains reviewer information we used for reviewing answers generated by different models. Each row contains a record of a reviewer with the following field: - -* `reviewer_id` (str): A unique ID for a reviewer. Reviewers with different IDs is supposed to have different reviewing performance. -* `prompt_id` (str): The ID of the prompt given to the reviewer (e.g., an AI assistant). Different prompts could result in different reviewing performance. -* `metadata` (dict): Metadata of a reviewer about its configurations. -* `description` (str): A description of the reviewer. -* `category` (str): The category that the reviewer belongs to. - -For example: - -```json -{ - "reviewer_id": "gpt-4-0328-default", - "prompt_id": 1, - "temperature": 0.2, - "max_tokens": 8192, - "description": "GPT-4 for general questions.", - "category": "general" -} -``` - -### Questions - -`question.jsonl` contains questions we used for evaluation. Each row contains a record of a question with the following field: - -* `question_id` (int): A unique integer for a question. Questions with different IDs is supposed to be different. -* `text` (str): The question text. -* `category` (str): The category of the question. Questions with the same category are supposed to be similar or originate from the same source. - -### Answers - -`answer/xxx.jsonl` contains answers generated by different models. Each row contains a record of an answer with the following field: - -* `answer_id` (str): A unique UUID for an answer. Answers with different IDs is supposed to be different. -* `question_id` (int): The ID of the question the answer is generated for. -* `model_id` (str): The ID of the model the answer is generated by. -* `text` (str): The answer text. -* `metadata` (dict): Any metadata of the answer. - -Example: - -```json -{ - "answer_id": "[short uuid]", - "question_id": 1, - "model_id": "vicuna-13b:v1", - "text": "Here are five tips...", - "metadata": {} -} -``` - -### Reviews - -`review/xxx.jsonl` contains reviews given by reviewers, comparing peformance between a pair of models. Each row contains a record of a review with the following field: - -* `review_id` (str): A unique UUID for a review. Reviews with different IDs is supposed to be different. -* `question_id` (int): The ID of the question the review is given for. -* `answer1_id` (str): The ID of the first answer. -* `answer2_id` (str): The ID of the second answer. -* `text` (str): The review text. -* `score` (list): A list of scores given by the reviewer. The first score is for the first answer, and the second score is for the second answer. -* `reviewer_id` (str): The ID of the reviewer. -* `metadata` (dict): Any metadata of the review. - -```json -{ - "review_id": "[short uuid]", - "question_id": 1, - "answer1_id": "[answer1_id]", - "answer2_id": "[answer2_id]", - "text": "Assistant 2 is better...", - "score": [9.0, 7.5], - "reviewer_id": "gpt-4-0328-default", - "metadata": {} -} -``` diff --git a/commune/modules/archive/fastchat/fastchat/eval/eval_gpt_review.py b/commune/modules/archive/fastchat/fastchat/eval/eval_gpt_review.py deleted file mode 100644 index 89352665a..000000000 --- a/commune/modules/archive/fastchat/fastchat/eval/eval_gpt_review.py +++ /dev/null @@ -1,142 +0,0 @@ -import argparse -import json -import os -import time - -import openai -import tqdm -import ray - -import shortuuid -import logging -logging.basicConfig(level=logging.INFO) -logger = logging.getLogger(__name__) - -MAX_API_RETRY = 5 -REQ_TIME_GAP = 10 - -@ray.remote(num_cpus=4) -def get_eval(sys_prompt, user_prompt: str, max_tokens: int): - logging.basicConfig(level=logging.INFO) - for i in range(MAX_API_RETRY): - try: - response = openai.ChatCompletion.create( - model='gpt-4', - messages=[{ - 'role': 'system', - 'content': sys_prompt - }, { - 'role': 'user', - 'content': user_prompt, - }], - temperature=0.2, # TODO: figure out which temperature is best for evaluation - max_tokens=max_tokens, - ) - content = response['choices'][0]['message']['content'] - logger.info(content) - return content - except Exception as e: - logger.error(e) - time.sleep(5) - logger.error(f'Failed after {MAX_API_RETRY} retries.') - return 'error' - - -def parse_score(review): - try: - score_pair = review.split('\n')[0] - score_pair = score_pair.replace(',', ' ') - sp = score_pair.split(' ') - if len(sp) == 2: - return [float(sp[0]), float(sp[1])] - else: - raise Exception('Invalid score pair.') - except Exception as e: - logger.error(f'{e}\nContent: {review}\n' - 'You must manually fix the score pair.') - return [-1, -1] - - -def gen_prompt(reviewer_jsons, prompt_jsons, cat, ques, ans1, ans2): - # Default to general category (index=0) - reviewer_idx = 0 - for idx, reviewer in enumerate(reviewer_jsons): - if reviewer['category'] == cat: - reviewer_idx = idx - break - prompt_id = reviewer_jsons[reviewer_idx]['prompt_id'] - prompt_json = prompt_jsons[prompt_id-1] - assert prompt_json['prompt_id'] == prompt_id - - sys_prompt = prompt_json['system_prompt'] - prompt_template = prompt_json['prompt_template'] - defaults = prompt_json['defaults'] - prompt = prompt_template.format(question=ques, answer_1=ans1, answer_2=ans2, **defaults) - - return sys_prompt, prompt, reviewer_idx+1 - - -def get_json_list(file_path): - file_path = os.path.expanduser(file_path) - with open(file_path, 'r') as f: - json_list = [] - for line in f: - json_list.append(json.loads(line)) - return json_list - - -if __name__ == '__main__': - parser = argparse.ArgumentParser(description='ChatGPT-based QA evaluation.') - parser.add_argument('-q', '--question-file') - parser.add_argument('-a', '--answer-file-list', nargs='+', default=[]) - parser.add_argument('-p', '--prompt-file') - parser.add_argument('-r', '--reviewer-file') - parser.add_argument('-o', '--output-review-file') - parser.add_argument('--max-tokens', type=int, default=1024, help='maximum number of tokens produced in the output') - args = parser.parse_args() - - ray.init() - - question_jsons = get_json_list(args.question_file) - answer1_jsons = get_json_list(args.answer_file_list[0]) - answer2_jsons = get_json_list(args.answer_file_list[1]) - reviewer_jsons = get_json_list(args.reviewer_file) - prompt_jsons = get_json_list(args.prompt_file) - - # check if # of questions, answers are the same - assert len(question_jsons) == len(answer1_jsons) == len(answer2_jsons) - - handles = [] - review_jsons = [] - total_len = len(question_jsons) - question_idx_list = list(range(total_len)) - - for i in question_idx_list: - assert answer1_jsons[i]['question_id'] == question_jsons[i]['question_id'] == answer2_jsons[i]['question_id'] - - ques = question_jsons[i]['text'] - cat = question_jsons[i]['category'] - ans1 = answer1_jsons[i]['text'] - ans2 = answer2_jsons[i]['text'] - sys_prompt, prompt, reviewer_id = gen_prompt(reviewer_jsons, prompt_jsons, cat, ques, ans1, ans2) - review_id = shortuuid.uuid() - review_jsons.append({ - 'review_id': review_id, - 'question_id': question_jsons[i]['question_id'], - 'answer1_id': answer1_jsons[i]['answer_id'], - 'answer2_id': answer2_jsons[i]['answer_id'], - 'reviewer_id': reviewer_id, - 'metadata': {}, - }) - # To avoid the rate limit set by OpenAI - handles.append(get_eval.remote(sys_prompt, prompt, args.max_tokens)) - logger.info(f'Waiting for {REQ_TIME_GAP} seconds before sending the next request.') - time.sleep(REQ_TIME_GAP) - - reviews = ray.get(handles) - with open(f'{args.output_review_file}', 'w') as output_review_file: - for idx, review in enumerate(reviews): - scores = parse_score(review) - review_jsons[idx]['text'] = review - review_jsons[idx]['score'] = scores - output_review_file.write(json.dumps(review_jsons[idx]) + '\n') diff --git a/commune/modules/archive/fastchat/fastchat/eval/generate_webpage_data_from_table.py b/commune/modules/archive/fastchat/fastchat/eval/generate_webpage_data_from_table.py deleted file mode 100644 index a13b59f0d..000000000 --- a/commune/modules/archive/fastchat/fastchat/eval/generate_webpage_data_from_table.py +++ /dev/null @@ -1,105 +0,0 @@ -"""Generate json file for webpage.""" -import json -import os -import re - -models = ['alpaca', 'llama', 'gpt35', 'bard'] - - -def read_jsonl(path: str, key: str=None): - data = [] - with open(os.path.expanduser(path)) as f: - for line in f: - if not line: - continue - data.append(json.loads(line)) - if key is not None: - data.sort(key=lambda x: x[key]) - data = {item[key]: item for item in data} - return data - - -def trim_hanging_lines(s: str, n: int) -> str: - s = s.strip() - for _ in range(n): - s = s.split('\n', 1)[1].strip() - return s - - -if __name__ == '__main__': - questions = read_jsonl('table/question.jsonl', key='question_id') - - alpaca_answers = read_jsonl('table/answer/answer_alpaca-13b.jsonl', key='question_id') - bard_answers = read_jsonl('table/answer/answer_bard.jsonl', key='question_id') - gpt35_answers = read_jsonl('table/answer/answer_gpt35.jsonl', key='question_id') - llama_answers = read_jsonl('table/answer/answer_llama-13b.jsonl', key='question_id') - vicuna_answers = read_jsonl('table/answer/answer_vicuna-13b.jsonl', key='question_id') - - review_alpaca = read_jsonl('table/review/review_alpaca-13b_vicuna-13b.jsonl', key='question_id') - review_bard = read_jsonl('table/review/review_bard_vicuna-13b.jsonl', key='question_id') - review_gpt35 = read_jsonl('table/review/review_gpt35_vicuna-13b.jsonl', key='question_id') - review_llama = read_jsonl('table/review/review_llama-13b_vicuna-13b.jsonl', key='question_id') - - records = [] - for qid in questions.keys(): - r = { - 'id': qid, - 'category': questions[qid]['category'], - 'question': questions[qid]['text'], - 'answers': { - 'alpaca': alpaca_answers[qid]['text'], - 'llama': llama_answers[qid]['text'], - 'bard': bard_answers[qid]['text'], - 'gpt35': gpt35_answers[qid]['text'], - 'vicuna': vicuna_answers[qid]['text'], - }, - 'evaluations': { - 'alpaca': review_alpaca[qid]['text'], - 'llama': review_llama[qid]['text'], - 'bard': review_bard[qid]['text'], - 'gpt35': review_gpt35[qid]['text'], - }, - 'scores': { - 'alpaca': review_alpaca[qid]['score'], - 'llama': review_llama[qid]['score'], - 'bard': review_bard[qid]['score'], - 'gpt35': review_gpt35[qid]['score'], - }, - } - - # cleanup data - cleaned_evals = {} - for k, v in r['evaluations'].items(): - v = v.strip() - lines = v.split('\n') - # trim the first line if it's a pair of numbers - if re.match(r'\d+[, ]+\d+', lines[0]): - lines = lines[1:] - v = '\n'.join(lines) - cleaned_evals[k] = v.replace('Assistant 1', "**Assistant 1**").replace('Assistant 2', '**Assistant 2**') - - r['evaluations'] = cleaned_evals - records.append(r) - - # Reorder the records, this is optional - for r in records: - if r['id'] <= 20: - r['id'] += 60 - else: - r['id'] -= 20 - for r in records: - if r['id'] <= 50: - r['id'] += 10 - elif 50 < r['id'] <= 60: - r['id'] -= 50 - for r in records: - if r['id'] == 7: - r['id'] = 1 - elif r['id'] < 7: - r['id'] += 1 - - records.sort(key=lambda x: x['id']) - - # Write to file - with open('webpage/data.json', 'w') as f: - json.dump({'questions': records, 'models': models}, f, indent=2) diff --git a/commune/modules/archive/fastchat/fastchat/eval/get_model_answer.py b/commune/modules/archive/fastchat/fastchat/eval/get_model_answer.py deleted file mode 100644 index 3718cba1b..000000000 --- a/commune/modules/archive/fastchat/fastchat/eval/get_model_answer.py +++ /dev/null @@ -1,82 +0,0 @@ -import argparse -from transformers import AutoTokenizer, AutoModelForCausalLM -import torch -import os -import json -from tqdm import tqdm -import shortuuid -import ray - -from fastchat.conversation import get_default_conv_template -from fastchat.utils import disable_torch_init - - -def run_eval(model_path, model_id, question_file, answer_file, num_gpus): - # split question file into num_gpus files - ques_jsons = [] - with open(os.path.expanduser(question_file), "r") as ques_file: - for line in ques_file: - ques_jsons.append(line) - - chunk_size = len(ques_jsons) // num_gpus - ans_handles = [] - for i in range(0, len(ques_jsons), chunk_size): - ans_handles.append(get_model_answers.remote(model_path, model_id, ques_jsons[i:i + chunk_size])) - - ans_jsons = [] - for ans_handle in ans_handles: - ans_jsons.extend(ray.get(ans_handle)) - - with open(os.path.expanduser(answer_file), "w") as ans_file: - for line in ans_jsons: - ans_file.write(json.dumps(line) + "\n") - - -@ray.remote(num_gpus=1) -@torch.inference_mode() -def get_model_answers(model_path, model_id, question_jsons): - disable_torch_init() - model_path = os.path.expanduser(model_path) - tokenizer = AutoTokenizer.from_pretrained(model_path, use_fast=False) - model = AutoModelForCausalLM.from_pretrained(model_path, - torch_dtype=torch.float16).cuda() - - ans_jsons = [] - for i, line in enumerate(tqdm(question_jsons)): - ques_json = json.loads(line) - idx = ques_json["question_id"] - qs = ques_json["text"] - conv = get_default_conv_template(model_id).copy() - conv.append_message(conv.roles[0], qs) - conv.append_message(conv.roles[1], None) - prompt = conv.get_prompt() - inputs = tokenizer([prompt]) - output_ids = model.generate( - torch.as_tensor(inputs.input_ids).cuda(), - do_sample=True, - temperature=0.7, - max_new_tokens=1024) - outputs = tokenizer.batch_decode(output_ids, skip_special_tokens=True)[0] - skip_echo_len = len(prompt.replace("", " ")) + 1 - - outputs = outputs[skip_echo_len:].strip() - ans_id = shortuuid.uuid() - ans_jsons.append({"question_id": idx, - "text": outputs, - "answer_id": ans_id, - "model_id": model_id, - "metadata": {}}) - return ans_jsons - - -if __name__ == "__main__": - parser = argparse.ArgumentParser() - parser.add_argument("--model-path", type=str, required=True) - parser.add_argument("--model-id", type=str, required=True) - parser.add_argument("--question-file", type=str, required=True) - parser.add_argument("--answer-file", type=str, default="answer.jsonl") - parser.add_argument("--num-gpus", type=int, default=1) - args = parser.parse_args() - - ray.init() - run_eval(args.model_path, args.model_id, args.question_file, args.answer_file, args.num_gpus) diff --git a/commune/modules/archive/fastchat/fastchat/eval/qa_baseline_gpt35.py b/commune/modules/archive/fastchat/fastchat/eval/qa_baseline_gpt35.py deleted file mode 100644 index babab6e12..000000000 --- a/commune/modules/archive/fastchat/fastchat/eval/qa_baseline_gpt35.py +++ /dev/null @@ -1,74 +0,0 @@ -"""Generate answers with GPT-3.5""" -# Note: you need to be using OpenAI Python v0.27.0 for the code below to work -import argparse -import json -import os -import time -import concurrent.futures - -import openai -import tqdm -import shortuuid - -MODEL = 'gpt-3.5-turbo' -MODEL_ID = 'gpt-3.5-turbo:20230327' - -def get_answer(question_id: int, question: str, max_tokens: int): - ans = { - 'answer_id': shortuuid.uuid(), - 'question_id': question_id, - 'model_id': MODEL_ID, - } - for _ in range(3): - try: - response = openai.ChatCompletion.create( - model=MODEL, - messages=[{ - 'role': 'system', - 'content': 'You are a helpful assistant.' - }, { - 'role': 'user', - 'content': question, - }], - max_tokens=max_tokens, - ) - ans['text'] = response['choices'][0]['message']['content'] - return ans - except Exception as e: - print('[ERROR]', e) - ans['text'] = '#ERROR#' - time.sleep(1) - return ans - - -if __name__ == '__main__': - parser = argparse.ArgumentParser(description='ChatGPT answer generation.') - parser.add_argument('-q', '--question') - parser.add_argument('-o', '--output') - parser.add_argument('--max-tokens', type=int, default=1024, help='maximum number of tokens produced in the output') - args = parser.parse_args() - - questions_dict = {} - with open(os.path.expanduser(args.question)) as f: - for line in f: - if not line: - continue - q = json.loads(line) - questions_dict[q['question_id']] = q['text'] - - answers = [] - - with concurrent.futures.ThreadPoolExecutor(max_workers=32) as executor: - futures = [] - for qid, question in questions_dict.items(): - future = executor.submit(get_answer, qid, question, args.max_tokens) - futures.append(future) - - for future in tqdm.tqdm(concurrent.futures.as_completed(futures), total=len(futures)): - answers.append(future.result()) - - answers.sort(key=lambda x: x['question_id']) - - with open(os.path.expanduser(args.output), 'w') as f: - table = [json.dumps(ans) for ans in answers] - f.write('\n'.join(table)) diff --git a/commune/modules/archive/fastchat/fastchat/eval/requirements.txt b/commune/modules/archive/fastchat/fastchat/eval/requirements.txt deleted file mode 100644 index c2490e15e..000000000 --- a/commune/modules/archive/fastchat/fastchat/eval/requirements.txt +++ /dev/null @@ -1,2 +0,0 @@ -shortuuid -ray \ No newline at end of file diff --git a/commune/modules/archive/fastchat/fastchat/eval/script/run_model_qa.yaml b/commune/modules/archive/fastchat/fastchat/eval/script/run_model_qa.yaml deleted file mode 100644 index 64e36560e..000000000 --- a/commune/modules/archive/fastchat/fastchat/eval/script/run_model_qa.yaml +++ /dev/null @@ -1,48 +0,0 @@ -resources: - accelerators: A100:4 - cloud: gcp - -num_nodes: 1 - -workdir: . - -setup: | - conda activate chatbot - if [ $? -eq 0 ]; then - echo 'conda env exists' - else - # Setup the environment - conda create -n chatbot python=3.10 -y - fi - conda activate chatbot - - pip3 install -e . - - # Install pytorch - pip install torch==1.13.1+cu116 --extra-index-url https://download.pytorch.org/whl/cu116 - - # Install huggingface with the LLaMA commit - pip install git+https://github.com/huggingface/transformers.git@c612628045822f909020f7eb6784c79700813eda - - cd fastchat/eval - pip install -r requirements.txt - - MODEL_NAME=vicuna-7b-20230322-fp16 - MODEL_PATH=~/${MODEL_NAME} - - if [ ! -f "$MODEL_PATH/ready" ]; then - echo "export MODEL_PATH=${MODEL_PATH}" >> ~/.bashrc - echo "export MODEL_NAME=${MODEL_NAME}" >> ~/.bashrc - mkdir -p $MODEL_PATH - gsutil -m cp gs://model-weights/${MODEL_NAME}/* $MODEL_PATH - touch $MODEL_PATH/ready - echo "model downloaded" - fi - -run: | - conda activate chatbot - python -m fastchat.eval.get_model_answer --model-path $MODEL_PATH \ - --model-id $MODEL_NAME \ - --question-file fastchat/eval/table/question.jsonl \ - --answer-file answer.jsonl \ - --num-gpus $SKYPILOT_NUM_GPUS_PER_NODE diff --git a/commune/modules/archive/fastchat/fastchat/eval/table/answer/answer_alpaca-13b.jsonl b/commune/modules/archive/fastchat/fastchat/eval/table/answer/answer_alpaca-13b.jsonl deleted file mode 100644 index b850e137e..000000000 --- a/commune/modules/archive/fastchat/fastchat/eval/table/answer/answer_alpaca-13b.jsonl +++ /dev/null @@ -1,80 +0,0 @@ -{"question_id": 1, "text": "Improving time management skills involves setting priorities, breaking tasks into smaller chunks, delegating tasks, avoiding multitasking, and taking regular breaks. Additionally, it is important to have a positive attitude and be realistic with goals. Making a to-do list, tracking time, and using technology to automate mundane tasks can also help improve time management skills.", "answer_id": "kEL9ifUHDeYuAXzevje2se", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 2, "text": "The most effective ways to deal with stress include identifying and avoiding the sources of stress, developing healthy coping mechanisms, and engaging in relaxation techniques such as meditation, deep breathing, and physical exercise. Additionally, it is important to take care of one's mental and physical health, get enough rest and nutrition, and practice self-care.", "answer_id": "VcF3NrWGXhhxLkDVurNrwq", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 3, "text": "The main differences between Python and JavaScript programming languages are in their syntax, structure, and the type of applications they are designed for. Python is a general-purpose language designed to be easy to read and write, while JavaScript is primarily used for web development. Python is a compiled language, while JavaScript is an interpreted language. Python is strongly typed, while JavaScript is not. Python is typically faster than JavaScript in terms of execution, while JavaScript is better suited to create interactive web pages.", "answer_id": "LpvtyQi9QdSgRrgGDxiGrT", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 4, "text": "Working from home can be a great way to increase productivity, but it does require some strategic planning and tactics. Here are some tips for increasing your productivity while working from home: \n1. Establish a routine: Having a regular schedule and sticking to it can help you stay focused and motivated.\n2. Create a distraction-free zone: Eliminate distractions by turning off notifications and setting a \u201cdo not disturb\u201d mode.\n3. Take breaks: Working for long stretches without taking breaks can lead to burnout and decreased productivity.\n4. Exercise: Exercise can help improve focus and productivity.\n5. Get organized: Having a clean and organized workspace can help you stay on task.\n6. Set goals: Set short-term and long-term goals to keep you motivated and on track.\n7. Connect with colleagues: Don\u2019t be afraid to reach out to colleagues for help or advice.\n8. Track your progress: Keep track of your progress and celebrate your successes.\n9. Unplug: Taking breaks and unplugging from work can help you recharge and come back to work more refreshed.", "answer_id": "7zQm8cSTJhPtPdZdxbcfrX", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 5, "text": "Sure. Quantum computing is a new technology that uses quantum mechanical phenomena to perform operations on data. Unlike traditional computers, which process information in binary form, quantum computers can process information in multiple states at the same time, allowing them to perform operations faster and with more accuracy. It also allows them to solve problems that classical computers struggle with. Quantum computing is still in its infancy, but it has the potential to revolutionize the computing industry and open up new possibilities in artificial intelligence, cryptography, and other fields.", "answer_id": "UrLEH82RHwqqLt2LyvYSKj", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 6, "text": "Plant-based protein sources include legumes, grains, nuts, and seeds, and are generally lower in fat and calories than animal-based proteins. Animal-based proteins come from sources such as meat, eggs, and dairy, and are generally higher in fat and calories than plant-based proteins. Plant-based proteins are also generally lower in cholesterol and saturated fat than animal-based proteins. Additionally, plant-based proteins require fewer resources for production, such as water and land, than animal-based proteins.", "answer_id": "fpRdMTdnfirosQixuf2Gez", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 7, "text": "Developing critical thinking skills involves actively questioning assumptions, taking multiple perspectives, analyzing information, and coming to logical conclusions. You can practice these skills by posing questions to yourself and working through problems, as well as engaging in thoughtful discussion with others.", "answer_id": "PvGmsCJSNFcvQKmPTnnd7s", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 8, "text": "The major challenges faced by the education sector include lack of access to quality education, low educational outcomes, high costs of education, lack of adequate infrastructure and teaching materials, overcrowding in classrooms, and gender inequality. Other challenges include lack of access to technology and digital learning materials, outdated curriculum and pedagogical practices, and inadequate teacher training.", "answer_id": "n4ANAbpR3gvLPP8poPfKZ6", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 9, "text": "The primary factors that influence consumer behavior include cultural, social, and personal factors. Cultural factors include shared beliefs, values, and customs within a society, while social factors include the influence of family, friends, and peers. Personal factors include an individual's lifestyle, attitudes, beliefs, and past experiences. Other factors such as advertising, marketing, and price also have an effect on consumer behavior.", "answer_id": "STJ36GrgQMcaUi7zaoNPit", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 10, "text": "The most effective strategies for conflict resolution in the workplace include:\n1. Establishing open communication: Encouraging open and honest communication between parties is key to resolving any conflict.\n2. Listening actively: Active listening allows both parties to feel heard and allows for a better understanding of the other person's perspective.\n3. Identifying root causes: Understanding the underlying cause of the conflict can help to find more effective solutions that address the core issue.\n4. Focusing on interests, not positions: Focusing on the interests of both parties can help to find creative solutions that satisfy the needs of everyone involved.\n5. Developing mutual respect: Respect is essential to any successful resolution. Building mutual respect can help to create a healthier work environment.\n6. Negotiation: Working together to negotiate a mutually beneficial solution can help to create an agreement that both parties are happy with.\n7. Taking responsibility: Taking responsibility for one\u2019s words and actions can help to create a culture of accountability in the workplace.", "answer_id": "425SwYvqKPAXFGTYKXB7Cs", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 11, "text": "The use of single-use plastic bottles has a number of negative implications on the environment. They are not biodegradable and can take hundreds of years to break down. This means they are around for a long time, releasing potentially harmful chemicals into the environment, and they can be easily mistaken for food by animals. Single-use plastic bottles can also be a source of micro-plastics, which are very small particles of plastic that are difficult to clean up and can be consumed by fish and other aquatic life, leading to health problems in humans who consume those fish. On the other hand, reusable bottles are a much more environmentally friendly choice since they do not produce any waste. They are also often made of materials that are safer for human health, such as stainless steel or glass. Reusable bottles are also more cost-effective since they do not need to be replaced as often as single-use plastic bottles.", "answer_id": "VbNAuj6KAkMdLJQXMo22oK", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 12, "text": "When designing an inclusive and accessible public transportation system, some factors to consider include providing frequent and reliable service, accessible stops and vehicles, a variety of fare options, clear and consistent signage and information, comfortable seating and space for wheelchairs and other assistive devices, and security and safety protocols. Additionally, public transportation systems should be integrated with other services such as public parking, bike and scooter sharing, and shuttles to provide a comprehensive and accessible experience.", "answer_id": "CNGqAeu2QJbQ4QGzHJDPdq", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 13, "text": "Governments can use fiscal and monetary policies to combat economic recessions. Fiscal policies involve the government spending money or cutting taxes in order to stimulate the economy, while monetary policies involve changing the money supply or interest rates to influence economic activity. These policies can be used to stabilize the economy, create jobs, and reduce poverty.", "answer_id": "E8w2qYqnm8iqCrSkUv62sz", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 14, "text": "Language and cultural barriers can lead to misunderstandings and make it more difficult to communicate and form relationships in multicultural societies. Without a common language or shared culture, people can have difficulty understanding each other's perspectives and beliefs, which can lead to mistrust and a lack of understanding between different ethnic groups. To overcome these barriers, it is important to make an effort to learn about different cultures, be open-minded, and take the time to understand each other.", "answer_id": "8o5yMymfzo6kzmp9GK5MWr", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 15, "text": "Artificial intelligence can be used to improve the quality and efficiency of healthcare delivery in a variety of ways. AI can be used to assist with diagnosing diseases by comparing symptoms and medical history to images of known diseases and medical conditions. AI can also be used to analyze laboratory results and patient records to identify potential problems and develop treatment plans. AI can be used to automate administrative tasks and reduce paperwork, as well as identify potential drug interactions and side effects. AI can also be used to automate appointment reminders, facilitate communication between doctors and patients, and even provide virtual health coaching to help patients manage their conditions.", "answer_id": "kbJVEEsdsSScEq5Y5furr7", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 16, "text": "CRISPR-Cas9 is a recently developed gene editing technology that has revolutionized the way scientists are able to edit genomes. The technology uses a guide RNA to direct the Cas9 enzyme to a specific location in the genome, where it will cut the DNA strands. This allows for the insertion or deletion of DNA sequences, which can be used to modify the genetic code of an organism. Potential applications include treating genetic diseases, increasing crop yields, and creating pest-resistant crops. Ethically, the biggest concern is the potential misuse of the technology, which could lead to unintended consequences or be used to alter humanity in ways that could harm us.", "answer_id": "CMUL5ULZuR7YC5EPzCBN2N", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 17, "text": "Vaccinations work by stimulating the body's immune system to protect against infectious diseases. Herd immunity is a concept whereby a population is protected against a certain disease when a certain percentage of the population has immunity to the disease, either through vaccination or having already contracted the disease. This is because when enough people are vaccinated, it reduces the spread of the disease and prevents it from spreading to those who are not immune.", "answer_id": "kEmDDQyNqSkyFihYEEBpuR", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 18, "text": "Social media platforms can have a significant influence on how people consume and share news. By providing instant access to news sources and allowing users to easily share content, these platforms can shape the public's information landscape and the spread of misinformation. Some potential implications of this are that users may not always be able to discern between reliable and unreliable sources, and misleading information may be more easily spread and go viral. This can lead to confusion and mistrust among users, and can potentially be exploited for nefarious purposes. To mitigate these risks, it is important for users to be aware of the potential biases and agendas of the sources they are consuming, and for social media platforms to take proactive steps to curb the spread of misinformation.", "answer_id": "Qs3grQsqFVGK9EVkCkf9PB", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 19, "text": "Cultural, social, and economic factors all play a role in determining an individual's food choices. For example, people growing up in households with limited economic means may be more likely to consume cheaper, less nutritious foods due to economic constraints. Additionally, cultural norms and values can influence food choices, such as the emphasis on large portions typical in some American cultural settings. Finally, social factors such as peer pressure and advertising can also sway an individual's food choices. By understanding and taking into account these various factors, health promotion strategies can be tailored and targeted to help individuals make healthier dietary decisions.", "answer_id": "kzZ6dKN7hkRWjqtdHr7Qns", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 20, "text": "Natural selection is the process by which certain traits or characteristics become more or less common in a population over time. This is due to the fact that those individuals with certain advantageous traits are more likely to survive, reproduce, and pass those traits on to their offspring. Over time, this leads to the evolution and adaptation of species to their environment.", "answer_id": "DPPDG6YGFJij2GCmRL66PU", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 21, "text": "I am a brave knight, loyal to my king and country, and I have come to serve and protect them both. I am skilled in the arts of war and peace, and I am proud to be here at the royal court.", "answer_id": "D62FjDb4nZANzPpfSfsiyn", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 22, "text": "Avast ye maties! To the bottom of the sea we go! In search of hidden riches and fortune, we shall brave the depths and face any danger that comes our way. For glory and honor, let us set sail and find the lost Ark!", "answer_id": "k7E4NNw5kyj9DmvP5Pu2zb", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 23, "text": "If I were a Shakespearean character, I would declare my love for someone in a soliloquy with passionate words and metaphors. I would express my love for them with descriptions of their beauty and the joy I feel when I am with them. I would confess my hopes and dreams for our future together and declare my devotion to them. I would make my love for them known in a way that could never be forgotten.", "answer_id": "KFocjVCejYrU3YmLjAqoUF", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 24, "text": "As a superhero, my origin story is very special. I was born with special abilities that I had to keep hidden for a long time. I was scared to use them at first, but eventually I discovered that I could use my powers to protect people and make the world a better place. I've had to face a lot of challenges along the way, but I've also had a lot of help from friends and family. Now, I use my abilities to help people in need and fight for justice.", "answer_id": "dq8Sm9djS7e7y9sG9vmMJf", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 25, "text": "If I were a time traveler from the year 3000, I would tell people about the incredible advancements in technology, such as the ability to travel through time, space, and dimensions; the development of intelligent robots and autonomous vehicles; the emergence of virtual reality and augmented reality; and the rise of artificial intelligence and machine learning.", "answer_id": "XZ8fG8e6u7CyKd2moK6abe", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 26, "text": "The game was a nail-biter, with both teams trading leads throughout the game. With only seconds left on the clock, the home team made a bold move and passed the ball to their star player, who took the ball down the court and made a layup at the buzzer to seal the victory for the home team!", "answer_id": "oKaXHfoK4pXwrefFWXmeA8", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 27, "text": "My signature dish is a seamless blend of traditional and modern cooking techniques. I use only the freshest ingredients to create a unique and unforgettable dining experience. The dish is a perfect balance of flavors and textures, with a subtle hint of my personal style. It is a dish that I am proud to call my own.", "answer_id": "ZwiZfvDWm7SETKNBfDk7Mb", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 28, "text": "At the summit of Mount Everest, you are filled with a sense of accomplishment and joy. The view from the top is absolutely breathtaking - you can see for miles and miles, with the majestic Himalayan mountain range stretching out in all directions. It is a truly unforgettable experience.", "answer_id": "DxYopRe2LcTJMy3FWu6btd", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 29, "text": "As a colonist on Mars, my daily life is filled with challenges. Finding resources and creating a sustainable environment is a priority. I face a number of challenges including extreme temperature fluctuations, limited access to resources, and the difficulty of travelling to and from the planet. Additionally, I must be mindful of my physical and mental health since I am so far from home. Despite these challenges, I am grateful to be able to explore and experience this new world.", "answer_id": "WC3UJVh4jQ5RUkpcRMU98L", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 30, "text": "In the post-apocalyptic world, I am a survivor by necessity. I scavenge for food and supplies, and I'm always on the lookout for potential allies. I've encountered a few people who have managed to survive, and together we have formed an alliance to help each other. We hunt for food, build shelter, and work together to stay alive. We also share knowledge and skills, like how to start a fire or how to use a weapon. We look out for each other, and our alliance has strengthened our chances of survival.", "answer_id": "gTvgn6ksDjGGgdprw6AG5A", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 31, "text": "There are a few ways to tell if a restaurant is popular among locals or mainly attracts tourists. Firstly, look at the clientele - if the majority of people there are tourists, it's likely that the restaurant is mainly attracting tourists. Secondly, check online reviews - if the reviews are mainly from tourists, then it's likely that the restaurant is popular with tourists. Finally, look at the prices - if the prices are higher than average for the area, it could be a sign that the restaurant is popular with locals. This information can be useful to get an idea of what kind of experience to expect, as locals might know about different aspects of the restaurant that tourists wouldn't.", "answer_id": "3q7giCk2BA3Ye4Tm9HC2iw", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 32, "text": "Some subtle clues that suggest someone is pretending to understand a topic or conversation when they are actually confused or uninformed include: not asking any questions or engaging in the conversation, avoiding eye contact, fidgeting or stammering when speaking, repeating questions or comments made by other people, and nodding along without any signs of understanding.", "answer_id": "hRGsxy86v26SC4yAQS29X4", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 33, "text": "Some people prefer the tactile and visual experience of using a paper map, and others may prefer to ask for directions from locals in order to get a more personalized experience. Additionally, GPS devices and smartphone apps can sometimes be inaccurate or have limited battery life, while a paper map or asking for directions may be more reliable.", "answer_id": "3n49A5ggJERfXYrLns3ZeU", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 34, "text": "One way to tell if someone is genuinely interested in a conversation is to observe their body language and facial expressions. Are they making an effort to maintain eye contact? Are they leaning in and actively listening to what you are saying? Do they ask questions and provide relevant answers? If so, it is likely that they are genuinely interested in the conversation. Additionally, if someone is simply being polite, they may not ask questions or engage in the conversation as much, and may not make an effort to maintain eye contact.", "answer_id": "ErCpFtPuYVru4oTTk4WrxG", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 35, "text": "Shopping at a small, locally-owned business can benefit the local community by keeping money in the area and supporting local jobs. Additionally, these businesses tend to offer a more personal experience and higher quality products than large chain stores. Furthermore, shopping at small businesses can help create a sense of place and community, and can help maintain a unique local culture.", "answer_id": "PTNoCRMZWoJk8HaKX7fW45", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 36, "text": "There are several ways to assess the credibility of a source of information. Firstly, you can look at the author's credentials and experience in the relevant field. Secondly, you can check the source of the information, such as whether it is from a reliable website or publication. Thirdly, you can look at the evidence presented in the article and whether it is backed up by reliable sources. Finally, you can read other people's reviews or comments about the article to get a better idea of its credibility.", "answer_id": "n8cFs9KENNwZ4z3SR4iXTr", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 37, "text": "Some people enjoy the sensation of being scared because it can create a feeling of excitement, enhance their emotional state, and provide a sense of thrill and adventure. Others may avoid these experiences because they are afraid of the unknown, or because they don't enjoy the feeling of being scared. Everyone is different, and some people may be more attracted to thrilling and exciting activities while others may prefer calmer activities.", "answer_id": "GzxL9mmEK5RzKqRbqBMUVC", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 38, "text": "By observing the behavior of others in a social situation, one can gain clues as to the cultural norms and expectations of a group. For example, watching how people interact with one another, how they address each other, how they handle disagreements, and how they go about solving problems can provide insight into the cultural values of the group. Additionally, observing body language, facial expressions, and other nonverbal cues can offer clues as to the accepted norms of behavior in a particular culture.", "answer_id": "QpoHFgb9SzwuaXQQUuBUQD", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 39, "text": "It is an interesting question, and one that has been debated for quite some time. I think there are valid arguments on both sides. On the one hand, exploring space is a remarkable human endeavor and could lead to tremendous scientific discoveries and technological advances. On the other hand, there are many pressing issues that need to be addressed on Earth, such as poverty, inequality, and climate change. Each side would argue that their cause is more important, and it is ultimately up to each individual to decide which one they feel more strongly about.", "answer_id": "Fxe6MS4GpP3LMDUwzY2cPA", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 40, "text": "It is important to strike a balance between job creation and technological progress. Automation can increase efficiency and productivity, but it should not come at the expense of job security and people's livelihoods. Therefore, it is essential to create policies and initiatives that promote both job creation and technological progress. This could include investing in training and education to ensure that people have the skills necessary to compete in the modern job market, as well as incentivizing companies to invest in technologies that create jobs and stimulate economic growth.", "answer_id": "mJiQ2FGR4Xb8kmhZjharkw", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 41, "text": "On average, the human eye blinks about 20 times per minute, or about 14,400 times per day. In a lifetime, this means that the average human will blink roughly 50 million times. This may seem like a lot, but it serves an important purpose. Blinking helps to keep the eyes lubricated and prevents them from drying out. It also helps to spread tears over the surface of the eye, washing away foreign particles and keeping the eye clean. Additionally, blinking helps to reduce the risk of eye infections by helping to clear away bacteria and other foreign substances.", "answer_id": "6Kph4RHRKEZ4YUoaHuEhBv", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 42, "text": "A grain of salt contains 102.98 atoms. To calculate this, we first need to know the atomic weight of a single atom. The atomic weight of an atom is the number of protons and neutrons in the nucleus of an atom, which determines its atomic mass. The atomic weight of a single atom of salt is 58.943 g/atom. Therefore, a grain of salt contains 102.98 atoms, which is equivalent to 60.98 grams.", "answer_id": "WBwpBQwhxn5kxLDb7MschC", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 43, "text": "Approximately 2000 lightning strikes occur on Earth each day. This is because the atmospheric conditions must come together in a particular way for a lightning strike to occur. Firstly, a large amount of electric charge must accumulate in the atmosphere, typically in a storm system. Then, the air must become increasingly unstable, leading to rising air and a strong updraft. This causes an electric breakdown of the air, and then an exchange of electricity occurs from the cloud to the ground, forming a lightning bolt. As these conditions are necessary for a lightning strike to occur, about 2000 lightning strikes happen on Earth each day.", "answer_id": "kf8nahQVci2ZLaYikagB7U", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 44, "text": "It would take about 10 million balloons to lift a house like in the movie Up. The balloons would need to be filled with helium in order for the house to be lifted. Each balloon would need to be filled with about 89.1 cubic feet of helium in order to lift 500 pounds. To calculate how many balloons would be needed, simply multiply the weight of the house (264.72 lbs) by the number of cubic feet of helium needed to lift 500 pounds (89.1). Therefore, it would take 10 million balloons to lift a house like in the movie Up.", "answer_id": "Gptgryd4o2dC8V5aqRmeJJ", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 45, "text": "According to a 2017 study, over 6.3 billion text messages are sent globally in a minute. This number is expected to grow to 6.8 billion by 2021. This is due to the increasing number of smartphones, better network coverage, and the increasing popularity of texting for communication. Furthermore, the increase in smartphones and better network coverage has also led to the rise of instant messaging applications such as WhatsApp, which has further increased the amount of text messages sent per minute.", "answer_id": "RfBWW8ZhdfTuTMb454Un4o", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 46, "text": "It is estimated that over 1 billion words are spoken on Earth every day. This is due to the fact that humans are highly social creatures, and language is the primary way that we communicate with each other. In addition, language is a key component of all cultures and societies, and is constantly being used to exchange information and ideas. Furthermore, language is integral to the way we think and process information, as well as to how we connect with each other on an emotional level. Therefore, it is not surprising that so many words are spoken on a daily basis.", "answer_id": "neGgLYm47JvqN8qkw8VeoW", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 47, "text": "On average, about 100 snowflakes fall per second during a typical winter. This number can vary drastically depending on the location and severity of the storm. Snowflakes are made up of tiny ice crystals, which form in the atmosphere when the temperature is below freezing and there is moisture in the air. As air moves and mixes, the crystals come together to form snowflakes. The shape, size and number of snowflakes depend on the atmospheric conditions, such as temperature and humidity, when they form.", "answer_id": "KSePbzLwsYasR3aui4HU8h", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 48, "text": "It is estimated that all the books ever written would total about 100 billion pages. This is based on the fact that there are roughly 130 million unique titles in existence, and each book on average contains about 200 pages. This means that if all the books were printed out, the stack of paper would be about 400 miles high. Additionally, if each book was represented by a single page, the total number of pages would be approximately 100 billion. Throughout history, books have served as a way to preserve knowledge and information, as well as to share ideas and stories with others. As technology advances, books are increasingly available in digital formats, which allows for easier access and storage. Regardless of the medium, books will continue to be a valuable source of information for generations to come.", "answer_id": "SWWfLpP2gnWHLbYRZXNsW3", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 49, "text": "Since the beginning of life, the Earth has orbited the Sun a total of 4.3 billion times. This is calculated by multiplying the average length of a year (365.25 days) by the number of years since life began (approximately 4.5 billion years). Life on Earth relies on the Sun's energy reaching the Earth's atmosphere and being redistributed by the atmosphere, the land surface, and the oceans. Without this energy, life on Earth would not be possible. Therefore, the Earth's orbit around the Sun is integral to the sustainability of life on Earth.", "answer_id": "WaBwKYhs7eAG22qCGLH2j3", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 50, "text": "Since the beginning of recorded music, countless songs have been recorded. The exact number is difficult to quantify, but conservative estimates put the number of songs recorded throughout history at several hundred thousand. This includes songs from all genres, from classical music to modern pop. The invention of recording technology has allowed musicians to preserve their music and share it with the world. This has enabled generations to access and enjoy music from all eras. With the rise of digital music, the number of recordings available has grown exponentially, and new songs are being recorded all the time.", "answer_id": "MfMJeE9om7qyBbqopHouf4", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 51, "text": "If the Internet had been invented during the Renaissance period, it likely would have been used primarily by the wealthy and powerful to access knowledge and information, as there was no widespread system of public education at the time. It also would have been a much slower and more expensive process to access information, as there were no high-speed connections or digital networks available.", "answer_id": "TjWPRDM6JFpPF8xeRptCKb", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 52, "text": "If the Aztecs had successfully repelled the Spanish conquistadors, their culture and way of life would have likely remained intact. The Spanish would not have been able to colonize Mexico, and their influence on the region would have been much less. It's likely that the Aztecs would have continued to expand their empire and control the areas that are now parts of Central and South America. Additionally, the Native American populations in the area would have likely been able to maintain more of their traditional customs and beliefs, and their languages may not have been as heavily impacted by Spanish.", "answer_id": "iR2tYTsWTFENEP7Qy9RgtX", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 53, "text": "If the Black Death had not occurred in the 14th century, the population of Europe may have continued to grow unabated, leading to more rapid urbanization and economic growth. It's likely that the Renaissance would have begun sooner, and the scientific revolution may have occurred earlier as well. Without the Black Death, there may have been no need for the industrial revolution, or at least it may have occurred later and on a smaller scale. Additionally, the spread of diseases may have been slower, leading to greater population density in certain areas.", "answer_id": "AZdS8xAi3GwAmCqkNSnnwv", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 54, "text": "If Isaac Newton had focused on biology instead of physics, he might have made important discoveries in the field of medicine and biology. He may have studied the human body and its functions in greater detail, and possibly even made important breakthroughs in treating diseases. He may also have studied the behavior of different animals in the wild and made important observations about their habits and habitats.", "answer_id": "VmwifF2JD5osYKDTqv2ZRS", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 55, "text": "If the Beatles had never formed as a band, the music world would have been drastically different. The British Invasion of the 1960s might not have happened, or it could have happened in a much different way. The cultural impact of the Beatles, including their influence on other musicians, fashion and society, may never have existed.", "answer_id": "mUL5UPj3qDGaCriEjL2U3B", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 56, "text": "If Alan Turing had not cracked the Enigma code during World War II, the outcome of the war could have been very different. It's possible that the Allies would have eventually defeated the Axis powers, but it likely would have taken much longer and cost many more lives. With the Enigma code cracked, the Allies were able to gain a critical advantage over the Axis powers and ultimately win the war.", "answer_id": "dVdwUoVrAQJDuWxiodykiw", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 57, "text": "If the Suez Canal had never been constructed, it would have major implications for international trade and navigation. The Suez Canal is an important shipping route connecting the Mediterranean Sea to the Red Sea, which in turn connects to the Indian Ocean. This allows for a shorter and more direct route for ships traveling between Europe and Asia, reducing travel time significantly. Without the Suez Canal, ships would have to travel around the Cape of Good Hope at the southern tip of Africa, which is much more time consuming and costly. This would make international trade more difficult and expensive, and could hinder global economic growth.", "answer_id": "EiNn9jjfy7dga6xfCtLtF8", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 58, "text": "If the Maya civilization had not mysteriously collapsed, it is likely that their influence and power would have continued to grow. They would have likely continued to build large cities and monuments, develop their writing and astronomical systems, and expand their trade networks. Additionally, their advances in agriculture and water management may have led to increased food production and a larger population.", "answer_id": "eqG9f2R9hXVyZrZMpcqAYq", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 59, "text": "If Christopher Columbus had not discovered the Americas, the indigenous people of the Americas would have continued to live in relative isolation from the rest of the world. It is likely that European colonization and the consequences of that would have been delayed, leading to a different modern world. It is also possible that other Europeans would have made the voyage across the Atlantic and discovered the Americas instead.", "answer_id": "ex42CLEzDVC2TxAvMCtYQJ", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 60, "text": "If Vincent van Gogh had been successful during his lifetime, he may have been able to achieve the recognition and financial stability that he desired. He may have been able to travel and gain more exposure for his artwork, as well as connect with other like-minded individuals. He may have been able to create more artwork and expand his portfolio. He may also have been able to find a mentor or patron who could have helped him further his career.", "answer_id": "RfFANSwHeCm3Pqe2AJ86Dk", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 61, "text": "The program should read a text file line by line. It should then check each line for the specified word, counting up each time the word is found. Finally, it should output the number of occurrences of the word.", "answer_id": "nzxdtWmgQhRkHeVAanVogZ", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 62, "text": "The following Python function can be used to find the longest common subsequence of two input strings using dynamic programming:\ndef longest_common_subsequence(s1, s2):\n\"\"\"This function finds the longest common subsequence of two strings.\n\n Arguments:\n s1 {string} -- The first string\n s2 {string} -- The second string\n\n Returns:\n {int} -- The length of the longest common subsequence\n\n\"\"\"\n n1 = len(s1)\n n2 = len(s2)\n \n # Creating an empty matrix to store the results\n results = [[None]*(n2 + 1) for _ in range(n1 + 1)]\n \n # Initializing the first row to 0\n for i in range(n1 + 1):\n results[i][0] = 0\n \n # Filling the first row with 0\n for i in range(n1):\n results[i][0] = 0\n \n # Calculating the longest common subsequence\n for i in range(n1 + 1):\n for j in range(n2 + 1):\n if (i == 0 or j == 0):\n results[i][j] = 0\n elif (s1[i-1] == s2[j-1] and (results[i-1][j-1] < (i-j) or results[i-1][j-1] == 0)):\n results[i][j] = max(results[i-1][j-1] + 1, i+j)\n else:\n results[i][j] = 0\n \n return results[n1][n2]", "answer_id": "kRo6z5qmeiSM8Xz6BPJHLC", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 63, "text": "The following regular expression can be used to validate an email address in Python:\n^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\\.[a-zA-Z0-9-.]+$", "answer_id": "7kj8odEE79UBdR4pT6u6TY", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 64, "text": "Here is a program to find the nth Fibonacci number using dynamic programming:\n1. Set a variable 'n' to the nth Fibonacci number.\n2. Set two other variables 'a' and 'b' to 0 and 1 respectively.\n3. Set a loop from 0 to (n-1).\n4. In the loop, set another loop from 0 to (a+b).\n5. In the inner loop, if 'a' is even and 'b' is odd, set 'a' to a+1 and 'b' to b+1.\n6. If 'a' is odd and 'b' is even, set 'a' to a-1 and 'b' to b-1.\n7. After the inner loop, set 'n' to 'a' and break out of the outer loop.\n8. Print the value of 'n'.\nThis program uses the idea of dynamic programming to compute the nth Fibonacci number. The program first sets two auxiliary variables 'a' and 'b' to 0 and 1 respectively. Then it sets a loop from 0 to (n-1). In the loop, it sets another loop from 0 to (a+b). In the inner loop, if 'a' is even and 'b' is odd, it sets 'a' to a+1 and 'b' to b+1. If 'a' is odd and 'b' is even, it sets 'a' to a-1 and 'b' to b-1. After the inner loop, it sets 'n' to 'a' and breaks out of the outer loop. Finally, it prints the value of 'n'.", "answer_id": "UULZ4GLz9j9ot7tN7x3ySD", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 65, "text": "A binary search algorithm can be used to find a specific element in a sorted array by repeatedly splitting the array in half and examining the middle element to determine whether the target element is greater than, less than, or equal to it. If the target element is found, the algorithm immediately returns the index of the element. If it isn\u2019t, the array is split in half, with one half discarded based on the comparison. The process is repeated until the element is found or the array is completely searched.", "answer_id": "Rh8P3bxSJx4p6nCZkSZBrh", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 66, "text": "Implementing a queue using two stacks in Python is relatively simple. First, you need to create two stacks, which will act as the queue. Next, you need to define an enqueue and dequeue method, which will add and remove items from the queue. Finally, you need to create an instance of the queue class and start using it.", "answer_id": "R3aYUUtY46ht4ToU8US96x", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 67, "text": "The program can be implemented using two nested for loops. The inner loop can be used to iterate through the elements of the first array, while the outer loop can be used to iterate through the elements of the second array. If the two elements being compared are the same, the program should mark those elements with a \"found\" flag and move on. Once the inner loop completes, the program should check if the \"found\" flag is set for all elements in the second array. If it is, then the common elements have been found.", "answer_id": "MQJFbH3KoFaSMmPgmK9LPa", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 68, "text": "The value of f(2) is 1.", "answer_id": "dBWqryuzG6BHRdPgrWWpbH", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 69, "text": "x = 6.", "answer_id": "W74byrCbE7XiJWjcPTZS7x", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 70, "text": "The length of the line segment is 8 units.", "answer_id": "7WC5quGKdv8hADCxWaWZ57", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 71, "text": "Yes, I can help you write a formal email to a potential business partner proposing a joint venture. Here's a template that you can use:\nSubject Line: Proposing a Joint Venture\nDear [Name],\nI am writing to propose a joint venture between our two companies. I believe that by combining our resources, we can create a powerful and profitable partnership. Our combined experience and knowledge in the industry will help us to create something unique and special.\nI am confident that our partnership will bring mutual benefits, such as increased profits and access to new markets. Please let me know if you are interested, and we can discuss the details.\nThank you for your time and consideration. I look forward to hearing from you soon.\nSincerely,\n[Your Name]", "answer_id": "AZER7D3RKZ9F9SXHiMjdqM", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 72, "text": "Yes, I can help you write a resignation letter to your current employer. The letter should be written in a polite and professional tone and should express your appreciation for the opportunities and experiences you have gained during your employment. It should also include a clear statement of your intention to resign and the date of your last day. Here is a template you can use:\nDear [Employer],\nI am writing to inform you of my intention to resign from my position as [Position], effective [date]. I have appreciated my time here and I am grateful for the opportunities and experiences I have gained during my employment. \nI thank you for your support and understanding.\nSincerely, [Your Name]", "answer_id": "MSrdDafr77UvSHCnsPMSP3", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 73, "text": "The letter of recommendation should be structured in a formal manner. Begin by introducing the student and explaining your relationship to them. Describe their qualifications, including their academic performance and relevant skills, and mention any particular accomplishments or awards they have received. Explain why the student is a strong candidate for the program and how they will make a positive contribution. End the letter by reaffirming your recommendation and offering your contact information for further questions.", "answer_id": "hxkjUkDkXhGP78Vo74B4WE", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 74, "text": "Dear valued customers, \nWe are excited to announce the launch of our new software solution \u2013 a revolutionary product designed to transform the way you do business! Our new software is an intuitive and powerful tool that can help you streamline processes, save time and money, and increase overall productivity. \nAt [Company Name], we are committed to helping you stay ahead of the competition, and we believe that our new software solution is the perfect tool to help you achieve your goals. Our experienced team of professionals has worked hard to ensure that this product meets the highest standards of quality, and we are confident that it will make a real difference for your business. \nWe invite you to join us in this journey of transformation and discover how our new software can help you achieve your vision. Sign up now and get a free demo to learn more about this revolutionary product. \nThank you for your trust and support.", "answer_id": "hv9jpG9PxeJ9AnFdNzH3Jv", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 75, "text": "mer,\nWe apologize for the delay you experienced with your order and thank you for your patience. We have identified the cause of the issue and have taken steps to ensure it does not happen again. We value your business and hope to be able to serve you better in the future.", "answer_id": "X5BVHT5pFpWxB88SzERrEG", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 76, "text": "The video will explore the history and cultural significance of jazz music. We will begin by discussing its roots in West African cultural music traditions and its journey through the 19th and 20th centuries, focusing on the key musicians, composers, and genres that have defined the genre. We will also explore the impact of jazz on popular culture, such as in film and literature, as well as its influence on other musical styles. Finally, we will examine the legacy of jazz and its continued importance today.", "answer_id": "Af2SufKwvDBriNXnZX9UoP", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 77, "text": "I have composed a travel blog post about my recent trip to Hawaii. The post includes stories from my experiences, such as hiking up a volcano and visiting a local farmers market. I also included plenty of photos to give the reader a sense of place. I highlighted some of the unique cultural experiences, such as an evening spent listening to live Hawaiian music, as well as must-see attractions, like the beautiful beaches and lush rainforests. I ended the post with a personal reflection on the trip and why it was so special to me.", "answer_id": "HVnqVdSGRwQfqSsiYxtTTn", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 78, "text": "I recently watched the science fiction movie \"The Future is Blue\" and it was an incredible cinematic experience. The movie follows the story of a scientist who travels back in time to save the world from a deadly virus. The plot was interesting and engaging, with plenty of twists and turns to keep you guessing. The characters were also well developed and likable, making it easy to get invested in their story. The special effects were stunning and added a lot of visual oomph to the movie. All in all, I highly recommend this movie - it's an exciting and thought-provoking ride from start to finish.", "answer_id": "8hcmmjzyMyhFJ8ZwqJJfvw", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 79, "text": "The podcast episode would begin by introducing the topic of how streaming platforms have changed the music industry. This would include discussing the impact of streaming services like Spotify and Apple Music on music consumption, the effect on musicians' ability to make money, and how streaming has changed the way people find and discover music. The episode would then explore the effects of streaming on the music industry from multiple angles, such as the impact on musicians and songwriters, how streaming has changed the sound of popular music, and the implications for the music industry's future. Finally, the episode would conclude by discussing how streaming has both benefited and hurt the music industry, and what the future holds for music consumption.", "answer_id": "CU6gGfJqQmwZNxEG2n2Uda", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"question_id": 80, "text": "The symphony concert was a wonderful experience. The orchestra played beautifully, with a rich and soulful sound that filled the hall. The audience was captivated, and you could feel the energy of the music in the air. The performance was a perfect balance of classic and modern pieces, and the conductor's passion was contagious. The atmosphere was festive, and the encore performance was a perfect way to end the evening. All in all, it was a brilliant concert and a delight for the ears.", "answer_id": "5SWbCoLV6NRrq5RtWQK4jp", "model_id": "alpaca-13b:v1", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} diff --git a/commune/modules/archive/fastchat/fastchat/eval/table/answer/answer_bard.jsonl b/commune/modules/archive/fastchat/fastchat/eval/table/answer/answer_bard.jsonl deleted file mode 100644 index d52334195..000000000 --- a/commune/modules/archive/fastchat/fastchat/eval/table/answer/answer_bard.jsonl +++ /dev/null @@ -1,80 +0,0 @@ -{"answer_id": "3oW4JY265ZPJGTYi2CgRYF", "model_id": "bard:20230327", "question_id": 1, "text": "Here are some tips on how to improve your time management skills:\n\n* **Set goals and prioritize your tasks.** What do you need to accomplish today, this week, and this month? Once you know what needs to be done, you can start to prioritize your tasks. Decide which tasks are most important and need to be done first.\n* **Break down large tasks into smaller, more manageable ones.** This will make them seem less daunting and help you to get started.\n* **Set deadlines for yourself and stick to them.** This will help you to stay on track and avoid procrastination.\n* **Take breaks when you need them.** Don't try to work for hours on end without taking a break. Getting up and moving around, or taking a few minutes to relax and clear your head, will help you to stay focused and productive.\n* **Don't be afraid to say no.** If you have too much on your plate, it's okay to say no to new commitments. It's better to focus on the tasks that you already have than to take on too much and end up feeling overwhelmed.\n* **Automate tasks whenever possible.** This will free up your time so that you can focus on more important things.\n* **Get organized.** This will help you to find what you need when you need it and avoid wasting time looking for things.\n* **Take care of yourself.** Make sure to get enough sleep, eat healthy foods, and exercise regularly. This will help you to stay focused and productive.\n\nTime management is a skill that takes time and practice to develop. But by following these tips, you can improve your time management skills and get more done in less time.", "metadata": {}} -{"answer_id": "CtDuDqypbL958TQork7v54", "model_id": "bard:20230327", "question_id": 2, "text": "There are many effective ways to deal with stress. Some of the most common include:\n\n* **Exercise.** Exercise releases endorphins, which have mood-boosting effects. Aim for at least 30 minutes of moderate-intensity exercise most days of the week.\n* **Eat a healthy diet.** A healthy diet can help improve your overall mood and well-being. Make sure to eat plenty of fruits, vegetables, and whole grains.\n* **Get enough sleep.** Sleep is essential for reducing stress and improving your mood. Aim for 7-8 hours of sleep each night.\n* **Talk to someone.** Talking to a friend, family member, therapist, or counselor can help you process your thoughts and feelings.\n* **Practice relaxation techniques.** Relaxation techniques such as yoga, meditation, and deep breathing can help you calm down and manage stress.\n* **Take a break.** If you're feeling overwhelmed, take a few minutes to step away from the situation and relax. Go for a walk, listen to music, or do something else that you enjoy.\n\nIt's important to find what works best for you and to make time for these activities on a regular basis. Stress is a normal part of life, but it's important to manage it in a healthy way.", "metadata": {}} -{"answer_id": "jqb5c6eBeyVTFo7SsqxqHA", "model_id": "bard:20230327", "question_id": 3, "text": "Here are some of the main differences between Python and JavaScript programming languages:\n\n* **Python is a general-purpose, high-level programming language, while JavaScript is a text-based, object-oriented programming language.** Python is designed to be easy to read and learn, while JavaScript is more complex.\n* **Python is dynamically typed, while JavaScript is statically typed.** This means that the type of a variable in Python can change at runtime, while the type of a variable in JavaScript is fixed at compile time.\n* **Python has a large standard library, while JavaScript does not.** The standard library contains a wide range of modules that can be used for common tasks, such as file I/O, networking, and data manipulation.\n* **Python is an interpreted language, while JavaScript is a compiled language.** This means that Python code is executed line-by-line by an interpreter, while JavaScript code is converted into machine code before it is executed.\n* **Python is used for a wide range of applications, including web development, data science, and artificial intelligence.** JavaScript is primarily used for web development, but it can also be used for other applications, such as desktop development and mobile development.\n\nUltimately, the best language for you to learn depends on your specific needs and goals. If you are new to programming, Python is a good choice because it is easy to learn and use. If you want to develop web applications, JavaScript is a good choice because it is the most popular language for front-end development.", "metadata": {}} -{"answer_id": "P5rC8W6gTuD4aY6HR5HQj9", "model_id": "bard:20230327", "question_id": 4, "text": "Here are some tips on how to increase your productivity while working from home:\n\n* **Set up a dedicated workspace.** This will help you stay focused and avoid distractions. If you can, set up your workspace in a quiet area of your home.\n* **Start your day early.** This will give you time to get organized and tackle your most important tasks.\n* **Take breaks.** It's important to take breaks throughout the day to stretch, walk around, or get a snack. This will help you stay refreshed and focused.\n* **Avoid distractions.** This means turning off your phone, closing your email, and anything else that might take your attention away from your work.\n* **Take care of yourself.** Make sure to eat healthy, get enough sleep, and exercise regularly. This will help you stay healthy and productive.\n* **Reward yourself.** When you accomplish a task, give yourself a small reward, such as taking a break or watching a TV show. This will help you stay motivated.\n* **Communicate with your team.** Make sure to stay in touch with your team members and let them know what you're working on. This will help you stay on track and avoid duplication of effort.\n* **Set goals.** Having goals will help you stay focused and motivated. Make sure your goals are specific, measurable, achievable, relevant, and time-bound.\n* **Track your progress.** It's important to track your progress so you can see how you're doing. This will help you stay motivated and make adjustments as needed.\n* **Be patient.** Working from home can be a challenge, but it's important to be patient with yourself. It takes time to adjust to a new way of working.\n\nFollowing these tips can help you increase your productivity while working from home.", "metadata": {}} -{"answer_id": "3uaqwfbwxtyDdqB8UVN3jM", "model_id": "bard:20230327", "question_id": 5, "text": "Quantum computing is a field of computer science that deals with the development of quantum algorithms and quantum computers. Quantum algorithms are algorithms that can be executed by quantum computers, and quantum computers are computers that can execute quantum algorithms.\n\nQuantum computing is based on the principles of quantum mechanics, which is the study of the behavior of matter and energy at the atomic and subatomic level. Quantum mechanics is a very different way of thinking about the world than classical mechanics, which is the study of the behavior of matter and energy at the macroscopic level.\n\nOne of the key concepts in quantum mechanics is superposition. Superposition means that a quantum system can exist in multiple states at the same time. For example, an electron can be in a state of superposition where it is both a wave and a particle.\n\nAnother key concept in quantum mechanics is entanglement. Entanglement means that two quantum systems can be linked together in such a way that they share the same fate, even if they are separated by a large distance.\n\nQuantum computing is still in its early stages of development, but it has the potential to revolutionize many fields, including cryptography, chemistry, and materials science.\n\nHere are some of the basics of quantum computing:\n\n* Quantum bits, or qubits, are the basic unit of information in quantum computing. Qubits can be in a superposition of states, meaning they can be both 0 and 1 at the same time.\n* Quantum gates are the basic operations that can be performed on qubits. Quantum gates can be used to create superpositions, entangle qubits, and measure qubits.\n* Quantum algorithms are algorithms that can be executed by quantum computers. Quantum algorithms can be used to solve problems that are difficult or impossible to solve with classical computers.\n* Quantum computers are computers that can execute quantum algorithms. Quantum computers are still in their early stages of development, but they have the potential to revolutionize many fields, including cryptography, chemistry, and materials science.", "metadata": {}} -{"answer_id": "HTRJ5q97DrgzoSofxkgedz", "model_id": "bard:20230327", "question_id": 6, "text": "The main difference between plant-based and animal-based protein sources is that plant-based protein sources do not contain all nine essential amino acids, while most meat proteins do. (Most plant-based protein sources tend to lack at least 1 of the essential amino acids, although there are some that are considered complete.)\n\nAnimal proteins are considered complete proteins, meaning they contain all nine essential amino acids that the body cannot produce on its own. Plant proteins, on the other hand, are often incomplete proteins, meaning they do not contain all nine essential amino acids. However, there are some plant-based protein sources that are considered complete, such as quinoa and buckwheat.\n\nAnimal proteins also tend to be higher in saturated fat and cholesterol than plant proteins. Plant proteins are lower in saturated fat and cholesterol, and they also contain fiber, which can help to lower cholesterol levels.\n\nThere are several benefits to eating plant-based protein sources. Plant-based protein sources are often lower in calories and fat than animal protein sources. They are also a good source of fiber, which can help to keep you feeling full and satisfied. Plant-based protein sources can also help to lower cholesterol levels and reduce the risk of heart disease.\n\nThere are also some benefits to eating animal-based protein sources. Animal protein sources are a good source of B vitamins, iron, and zinc. They can also help to build muscle mass and strength. However, it is important to choose lean cuts of meat and poultry to reduce saturated fat and cholesterol intake.\n\nUltimately, the best type of protein for you depends on your individual needs and preferences. If you are looking for a diet that is low in calories and fat, plant-based protein sources are a good option. If you are looking for a diet that is high in protein and B vitamins, animal protein sources are a good option.", "metadata": {}} -{"answer_id": "EhhyKNc3m8c9tnxm8uPfmt", "model_id": "bard:20230327", "question_id": 7, "text": "Here are some tips on how to develop your critical thinking skills:\n\n* **Be open to new ideas and information.** Don't be afraid to question everything, even your own beliefs.\n* **Be curious and ask questions.** The more you learn, the better equipped you'll be to think critically about the information you're presented with.\n* **Be skeptical of information that seems too good to be true.** If something seems too good to be true, it probably is.\n* **Be aware of your own biases.** We all have biases, but it's important to be aware of them so that they don't cloud your judgment.\n* **Be able to identify and evaluate arguments.** When you're presented with an argument, be able to identify the premises and the conclusion, and evaluate whether the premises support the conclusion.\n* **Be able to think creatively and solve problems.** Critical thinking is not just about analyzing information, but also about being able to come up with new ideas and solutions to problems.\n* **Practice regularly.** The more you practice critical thinking, the better you'll become at it.\n\nThere are many ways to develop your critical thinking skills. You can read books and articles on the subject, take classes, or even just spend time thinking about the world around you. The important thing is to be willing to learn and grow.", "metadata": {}} -{"answer_id": "JQXPknRQd24NUPKZxi6RNf", "model_id": "bard:20230327", "question_id": 8, "text": "The education sector faces many challenges today, including:\n\n* **Funding:** Schools are often underfunded, which can lead to a lack of resources and support for students and teachers.\n* **Class size:** Classrooms are often overcrowded, which can make it difficult for teachers to give individual students the attention they need.\n* **Teacher shortages:** There is a shortage of qualified teachers in many areas, which can lead to schools hiring teachers who are not adequately prepared.\n* **Student achievement:** The achievement gap between students from different socioeconomic backgrounds persists, which means that some students are not getting the education they need to succeed.\n* **School safety:** School shootings and other acts of violence have become more common, which has made schools a less safe place for students and staff.\n* **Technology:** Technology is changing the way we learn, but it also presents new challenges for schools and educators.\n* **Charter schools and voucher programs:** Charter schools and voucher programs have become more popular in recent years, which has led to competition between public and private schools.\n\nThese are just some of the challenges that the education sector faces today. It is important to address these challenges in order to ensure that all students have access to a quality education.", "metadata": {}} -{"answer_id": "Lb3C2xQKdLCqFj4v3rmaof", "model_id": "bard:20230327", "question_id": 9, "text": "There are many factors that influence consumer behavior, but some of the primary ones include:\n\n* **Personal factors:** These factors include a person's age, gender, income, occupation, and lifestyle. For example, a young person with a high income may be more likely to spend money on luxury items, while an older person with a lower income may be more likely to spend money on necessities.\n* **Psychological factors:** These factors include a person's motivation, perception, learning, attitudes, and beliefs. For example, a person who is motivated by a desire for self-improvement may be more likely to buy a product that they believe will help them achieve their goals.\n* **Social factors:** These factors include a person's family, friends, and social class. For example, a person who is influenced by their family may be more likely to buy a product that they believe their family will approve of.\n* **Cultural factors:** These factors include a person's culture, subculture, and social class. For example, a person who is from a culture that values luxury may be more likely to buy a product that is considered to be a luxury item.\n* **Economic factors:** These factors include a person's income, savings, and debt. For example, a person who has a high income may be more likely to buy a product that is expensive.\n\nIt is important to note that these factors are not always the same for everyone. A person's individual circumstances and preferences will also play a role in their consumer behavior.", "metadata": {}} -{"answer_id": "DhuZJtL3jhnrsTBvDct9oV", "model_id": "bard:20230327", "question_id": 10, "text": "There are many effective strategies for conflict resolution in the workplace. Some of the most common include:\n\n* **Communication:** The first step in resolving conflict is to communicate effectively with all parties involved. This means listening to each other's perspectives, sharing your own feelings and needs, and being open to compromise.\n* **Active listening:** Active listening is a key skill in conflict resolution. It involves paying attention to what the other person is saying, both verbally and nonverbally, and trying to understand their point of view.\n* **Problem-solving:** Once you have a better understanding of the conflict, you can start to work on solutions. This may involve brainstorming ideas, negotiating compromises, or finding ways to meet everyone's needs.\n* **Time-out:** If things start to get heated, it may be helpful to take a time-out. This gives everyone a chance to cool down and collect their thoughts.\n* **Mediation:** If you are unable to resolve the conflict on your own, you may want to consider mediation. This is a process in which a neutral third party helps you and the other person to come to a resolution.\n* **Arbitration:** If mediation is unsuccessful, you may want to consider arbitration. This is a process in which a neutral third party makes a decision about how to resolve the conflict.\n\nIt is important to remember that there is no one-size-fits-all approach to conflict resolution. The best strategy will vary depending on the specific situation. However, by following these general tips, you can increase the chances of resolving conflict peacefully and effectively.", "metadata": {}} -{"answer_id": "mDSj4BKim2eANUnEjW7xBm", "model_id": "bard:20230327", "question_id": 11, "text": "The use of single-use plastic bottles has a number of negative environmental and health implications.\n\nEnvironmentally, single-use plastic bottles contribute to the problem of plastic pollution. Every year, millions of plastic bottles end up in landfills, incinerators, or the ocean. These bottles can take hundreds of years to decompose, and they can leach harmful chemicals into the environment.\n\nHealth-wise, the use of single-use plastic bottles can increase the risk of exposure to harmful chemicals. These chemicals can leach into the water from the bottle, or they can be released into the air when the bottle is heated. Exposure to these chemicals can cause a number of health problems, including cancer, reproductive problems, and neurological problems.\n\nUsing a reusable bottle instead of a single-use plastic bottle can help to reduce the environmental and health impacts of plastic pollution. Reusable bottles are made from materials that are less harmful to the environment, and they can be used over and over again. This helps to reduce the amount of plastic waste that ends up in landfills, incinerators, or the ocean.\n\nReusable bottles are also better for human health. They do not leach harmful chemicals into the water, and they do not release chemicals into the air when they are heated. This helps to reduce the risk of exposure to harmful chemicals.\n\nOverall, using a reusable bottle instead of a single-use plastic bottle is a better choice for both the environment and human health.", "metadata": {}} -{"answer_id": "MnkceSK7WwyXqAhbuKVYX7", "model_id": "bard:20230327", "question_id": 12, "text": "There are many factors to consider when designing an inclusive and accessible public transportation system. Some of the most important factors include:\n\n* **Accessibility**. The system must be accessible to people with disabilities, including those who use wheelchairs, mobility scooters, or other assistive devices. This means that all aspects of the system, including the vehicles, the stations, and the signage, must be designed to be accessible.\n* **Affordability**. The system must be affordable for all users, including low-income and marginalized communities. This means that the fares must be reasonable and that there must be a variety of payment options available.\n* **Convenience**. The system must be convenient to use, with frequent service and easy connections between different modes of transportation. This means that the system must be well-integrated with other transportation options, such as car sharing and bike lanes.\n* **Safety**. The system must be safe for all users, including women and children. This means that the vehicles must be well-maintained and that there must be adequate security measures in place.\n* **Sustainability**. The system must be sustainable, both environmentally and economically. This means that the system must use renewable energy sources and that it must be affordable to operate.\n\nThese are just some of the factors that must be considered when designing an inclusive and accessible public transportation system. By taking these factors into account, we can create a system that is accessible to everyone and that meets the needs of all communities.", "metadata": {}} -{"answer_id": "EsyaBVpTN8BGbTSiFMnZUF", "model_id": "bard:20230327", "question_id": 13, "text": "Fiscal and monetary policies can be used by governments to combat economic recessions in a number of ways.\n\n* **Fiscal policy** refers to the use of government spending and taxation to influence the economy. During a recession, governments can use fiscal policy to increase demand and stimulate the economy. This can be done by increasing government spending on infrastructure, education, and healthcare. Governments can also reduce taxes to give consumers more money to spend.\n* **Monetary policy** refers to the use of interest rates and the money supply to influence the economy. During a recession, central banks can use monetary policy to lower interest rates and make it cheaper for businesses to borrow money. This can encourage businesses to invest and expand, which can create jobs and boost the economy.\n\nBoth fiscal and monetary policies can be effective in combatting economic recessions. However, the effectiveness of these policies depends on a number of factors, including the severity of the recession, the underlying causes of the recession, and the effectiveness of the policies themselves.\n\nIn recent years, there has been a debate about the best way to use fiscal and monetary policies to combat economic recessions. Some economists argue that fiscal policy should be the primary tool used to combat recessions, while others argue that monetary policy should be the primary tool. There is no single answer to this question, as the best policy approach will vary depending on the specific circumstances.\n\nHowever, both fiscal and monetary policies can be effective in combatting economic recessions, and governments should use both tools to help stimulate the economy and create jobs.", "metadata": {}} -{"answer_id": "dX8M752A6tzqLg9KhwgG5p", "model_id": "bard:20230327", "question_id": 14, "text": "Language and cultural barriers can affect the way people communicate and form relationships in multicultural societies in a number of ways.\n\n* **Language barriers** can make it difficult for people to understand each other's words and meanings. This can lead to misunderstandings and frustration, and can make it difficult to build trust and rapport.\n* **Cultural barriers** can make it difficult for people to understand each other's values, beliefs, and customs. This can lead to misunderstandings and conflict, and can make it difficult to build strong relationships.\n* **Language and cultural barriers** can also make it difficult for people to feel comfortable and included in multicultural societies. This can lead to feelings of isolation and alienation, and can make it difficult to participate fully in the community.\n\nThere are a number of things that can be done to overcome language and cultural barriers in multicultural societies. These include:\n\n* **Learning about other cultures**. This can help people to understand and appreciate different cultures, and can make it easier to build relationships with people from other cultures.\n* **Learning about other languages**. This can help people to communicate effectively with people from other cultures, and can help to build trust and rapport.\n* **Building bridges**. This can help to bring people from different cultures together, and can help to create a more inclusive and supportive environment.\n* **Celebrating diversity**. This can help to promote understanding and appreciation for different cultures, and can help to create a more vibrant and exciting community.", "metadata": {}} -{"answer_id": "dzwhq5XbaEBVpargyczz9B", "model_id": "bard:20230327", "question_id": 15, "text": "Artificial intelligence (AI) can be used to improve the quality and efficiency of healthcare delivery in a number of ways. For example, AI can be used to:\n\n* **Diagnose diseases:** AI-powered algorithms can be trained to analyze large amounts of medical data, such as patient records and medical images, to identify patterns that may indicate disease. This can help doctors to diagnose diseases more accurately and quickly, which can lead to better patient outcomes.\n* **Treat diseases:** AI can also be used to develop new treatments for diseases. For example, AI-powered algorithms can be used to screen large libraries of compounds for potential drug candidates. This can help to speed up the drug discovery process and bring new drugs to market more quickly.\n* **Monitor patients:** AI can be used to monitor patients for signs of disease progression or complications. For example, AI-powered algorithms can be used to analyze data from wearable devices, such as smartwatches and fitness trackers, to identify changes in heart rate, sleep patterns, and other vital signs. This can help doctors to identify problems early on and intervene before they become serious.\n* **Provide administrative support:** AI can also be used to provide administrative support to healthcare providers. For example, AI-powered chatbots can be used to answer patient questions, schedule appointments, and process insurance claims. This can free up healthcare providers to spend more time with patients and focus on providing care.\n\nOverall, AI has the potential to revolutionize healthcare delivery by improving the quality and efficiency of care. By automating tasks and providing support to healthcare providers, AI can help to free up time and resources, which can lead to better patient outcomes.", "metadata": {}} -{"answer_id": "8zqxUtHxgtoHBkbf2bkqNW", "model_id": "bard:20230327", "question_id": 16, "text": "CRISPR-Cas9 is a gene editing technology that can be used to make precise changes to DNA. It works by cutting DNA at a specific location and then allowing the cell's natural DNA repair mechanisms to take over. This can be used to delete, add, or replace genes.\n\nCRISPR-Cas9 has a wide range of potential applications, including:\n\n* Treating genetic diseases: CRISPR-Cas9 could be used to correct genetic mutations that cause diseases such as cystic fibrosis, sickle cell anemia, and Huntington's disease.\n* Improving crop yields: CRISPR-Cas9 could be used to make crops more resistant to pests and diseases, or to increase the nutritional content of crops.\n* Creating new biofuels: CRISPR-Cas9 could be used to engineer bacteria that can produce biofuels from waste materials.\n\nHowever, CRISPR-Cas9 also raises ethical concerns, such as:\n\n* The potential to create designer babies: CRISPR-Cas9 could be used to edit the genes of human embryos, potentially creating \"designer babies\" with desired traits.\n* The potential to create genetically modified organisms (GMOs): CRISPR-Cas9 could be used to edit the genes of plants and animals, creating GMOs that some people believe could pose risks to human health or the environment.\n* The potential to create new weapons: CRISPR-Cas9 could be used to create new biological weapons that could be used to harm or kill people.\n\nIt is important to weigh the potential benefits and risks of CRISPR-Cas9 before using it.", "metadata": {}} -{"answer_id": "WJc37t4n5PqmKKS3V4eMG2", "model_id": "bard:20230327", "question_id": 17, "text": "Vaccines work by teaching your body's immune system how to recognize and fight off a disease. When you get a vaccine, your body produces antibodies against the disease. These antibodies will stay in your blood and help protect you if you are ever exposed to the disease again.\n\nVaccinations are very effective at preventing diseases. For example, the measles vaccine is 97% effective at preventing measles, and the mumps vaccine is 88% effective at preventing mumps.\n\nVaccinations also help protect communities from infectious diseases. When a large number of people in a community are vaccinated, it makes it harder for the disease to spread. This is because even if a few people in the community are infected, the disease will not be able to spread to as many people because most people are immune. This is called herd immunity.\n\nHerd immunity is important because it can help protect people who cannot be vaccinated, such as young children, pregnant women, and people with certain medical conditions. These people are at increased risk of getting sick if they are exposed to a disease.\n\nHerd immunity can also help protect people who are vaccinated but whose immune system does not respond well to the vaccine. These people may still get sick if they are exposed to the disease, but they are less likely to get seriously ill.\n\nVaccination is one of the most important ways to protect yourself and your community from infectious diseases. If you are not sure whether you are up-to-date on your vaccinations, talk to your doctor.", "metadata": {}} -{"answer_id": "CvVLf8FgoHywJy8j8JJ4qL", "model_id": "bard:20230327", "question_id": 18, "text": "Social media platforms have a significant influence on the way people consume and share news. They provide a platform for people to share news stories with their friends and followers, and they also allow users to curate their own news feeds. This can lead to people being exposed to a wider range of news stories than they would otherwise be, but it can also make it more difficult for them to distinguish between legitimate news sources and fake news.\n\nSocial media platforms also use algorithms to determine what news stories are shown to users. These algorithms are designed to keep users engaged on the platform, and they often prioritize news stories that are likely to get a lot of attention, such as those that are sensational or controversial. This can lead to people being exposed to a lot of news stories that are not necessarily accurate or reliable.\n\nThe spread of misinformation on social media is a serious problem. Fake news stories can be designed to look like they are from legitimate news sources, and they can spread quickly and easily on social media. This can have a significant impact on public opinion, and it can also make it more difficult for people to trust the information they see online.\n\nThere are a number of things that can be done to combat the spread of misinformation on social media. One is to educate people about how to identify fake news stories. Another is to make it easier for people to report fake news stories. And finally, social media platforms can do more to remove fake news stories from their platforms.\n\nIt is important to remember that social media is not the only way that people get news. Many people still get their news from traditional sources, such as newspapers and television. However, social media is becoming an increasingly important part of how people consume news, and it is important to understand how it is changing the way people consume and share news.", "metadata": {}} -{"answer_id": "P5rytR6vTJjxgWxRoxT3vX", "model_id": "bard:20230327", "question_id": 19, "text": "Cultural, social, and economic factors influence people's food choices in many ways. Here are a few examples:\n\n* **Culture** plays a role in food choices by shaping what people consider to be acceptable or desirable to eat. For example, some cultures have strong taboos against eating certain foods, such as pork or beef.\n* **Social factors** such as family, friends, and peers can also influence food choices. People are more likely to eat foods that are popular among their social group.\n* **Economic factors** such as the cost of food and the availability of healthy food options can also affect food choices. People who are on a tight budget may choose to eat less expensive, less healthy foods.\n\nThis knowledge can be used to promote healthier diets by making healthy foods more accessible and affordable, and by educating people about the importance of eating a healthy diet. For example, governments can provide subsidies for healthy foods, or they can build more grocery stores in underserved communities. Schools can also teach students about nutrition and healthy eating habits.\n\nIn addition, individuals can make healthier food choices by being more mindful of the cultural, social, and economic factors that influence their decisions. For example, people can try to eat more foods that are culturally appropriate, and they can avoid foods that are not healthy simply because they are popular. People can also try to eat more healthy foods that are affordable, and they can make healthy food choices even when they are on a tight budget.\n\nBy making healthy food choices, people can improve their overall health and well-being.", "metadata": {}} -{"answer_id": "5biCd7QRZP6rquaz8eC9Vm", "model_id": "bard:20230327", "question_id": 20, "text": "Natural selection is the process by which organisms with favorable traits survive and reproduce, passing on those traits to their offspring. This process leads to the evolution of species over time.\n\nNatural selection is based on the following principles:\n\n* Variation: There is variation within a population of organisms. This variation can be caused by genetic mutation, environmental factors, or both.\n* Differential reproduction: Not all organisms survive and reproduce equally. Some organisms are more likely to survive and reproduce than others.\n* Heredity: The traits that an organism inherits from its parents are passed down to its offspring.\n\nNatural selection can lead to the evolution of new species. For example, if a population of giraffes is living in an environment with a lot of tall trees, the giraffes with longer necks will be more likely to survive and reproduce. This is because they will be able to reach the leaves on the taller trees. Over time, the giraffes with longer necks will become more common in the population. This is an example of natural selection leading to the evolution of a new species, in this case, giraffes with longer necks.\n\nNatural selection can also lead to the adaptation of species to their environment. For example, if a population of fish is living in an environment with a lot of predators, the fish with darker colors will be more likely to survive and reproduce. This is because they will be less visible to the predators. Over time, the fish with darker colors will become more common in the population. This is an example of natural selection leading to the adaptation of a species to its environment.\n\nNatural selection is a powerful mechanism that can lead to the evolution and adaptation of species. It is a key part of the process of life on Earth.", "metadata": {}} -{"answer_id": "363RwB6kr8nV6qFNdjXZnS", "model_id": "bard:20230327", "question_id": 21, "text": "I would introduce myself as follows:\n\n\"I am Sir [your name], a knight in the service of [your lord's name]. I have come to this banquet to pay my respects to your majesty and to enjoy your hospitality.\"\n\nI would then bow to the king or queen and take my seat at the table. If I was asked to speak, I would give a brief account of my deeds and accomplishments.\n\nI would be careful to avoid anything that could be seen as boastful or arrogant. I would also be mindful of the customs and etiquette of the time, and make sure to behave in a way that would be considered appropriate.", "metadata": {}} -{"answer_id": "gDnYxMu5Dd52xhMqQAJaZP", "model_id": "bard:20230327", "question_id": 22, "text": "Here is what I would say to my crew to motivate them to search for hidden treasure:\n\n\"Mates, I know you're all tired and hungry, but we have a chance to find something that will make us all rich beyond our wildest dreams. There's a treasure buried out there, and I know we can find it. We just need to keep our heads up and keep searching. I believe in you, and I know you can do this. So let's get to it!\"\n\nI would also try to appeal to their sense of adventure and excitement. I would tell them about the stories I've heard of other pirates who have found great treasure, and how they have used it to live a life of luxury and adventure. I would tell them that this could be their chance to live that life too.\n\nFinally, I would try to motivate them by offering them a share of the treasure. I would tell them that if we find the treasure, they will all be rich beyond their wildest dreams. I would tell them that they will be able to buy anything they want, and that they will never have to worry about money again.\n\nI believe that if I can motivate my crew with these words, they will be more likely to search for the hidden treasure.", "metadata": {}} -{"answer_id": "kCV5RSrnmmTyv3HA5oU38P", "model_id": "bard:20230327", "question_id": 23, "text": "If I were a Shakespearean character, I would declare my love for someone in a soliloquy by speaking from the heart and expressing my true feelings. I would use poetic language and imagery to describe my love, and I would convey the depth of my emotion. I would also be sure to include a declaration of my love, so that my beloved would know how I feel.\n\nHere is an example of how I might declare my love in a soliloquy:\n\nMy dearest, I love you with all my heart.\nYou are the sun that lights my days,\nAnd the moon that guides me through the night.\nYou are the stars that sparkle in my eyes,\nAnd the rain that washes away my tears.\nYou are the wind that fills my sails,\nAnd the earth that grounds me.\nYou are my everything,\nAnd I will love you forever.\n\nI hope this helps!", "metadata": {}} -{"answer_id": "CTGLYNAWrWeuCYZAR9bD2r", "model_id": "bard:20230327", "question_id": 24, "text": "I would tell the child that I was once a regular person, just like them. I went to school, played games, and had friends. But one day, I got into an accident. I was hit by a car and almost died. But instead of dying, I got superpowers! I can fly, I can lift heavy things, and I can even shoot lasers out of my eyes! I use my powers to help people and make the world a better place.\n\nI would also tell the child that I was inspired by other superheroes, like Superman and Batman. They showed me that even though bad things happen in the world, there are always people who are willing to help others. I wanted to be one of those people, so I decided to use my powers to make a difference.\n\nI would hope that the child would be inspired by my story and would want to use their own powers to help others. I would tell them that even though they don't have superpowers, they can still make a difference in the world by being kind to others, helping out when they can, and standing up for what they believe in.", "metadata": {}} -{"answer_id": "Eh5qCENdftYf9JKrBspYna", "model_id": "bard:20230327", "question_id": 25, "text": "As a time traveler from the year 3000, I would tell people about the following technological advancements:\n\n* **Dyson spheres:** Dyson spheres are theoretical megastructures that would surround a star and harness its energy. They are still in the theoretical stage, but scientists believe that they could be built in the next few centuries.\n* **Interstellar travel:** Interstellar travel is the ability to travel between stars and planets. It is currently impossible with our current technology, but scientists are working on developing new technologies that could make it possible in the future.\n* **Artificial intelligence:** Artificial intelligence (AI) is a branch of computer science that deals with the creation of intelligent agents, which are systems that can reason, learn, and act autonomously. AI has made great progress in recent years, and it is likely to continue to advance in the future.\n* **Robotics:** Robotics is the field of engineering that deals with the design, construction, operation, and application of robots. Robots are machines that can be programmed to perform a variety of tasks. They are already used in a variety of industries, and they are likely to become even more common in the future.\n* **Gene editing:** Gene editing is the ability to modify the genes of an organism. This technology has the potential to cure genetic diseases, improve crop yields, and even create new species of plants and animals.\n* **Virtual reality:** Virtual reality (VR) is a computer-generated simulation that can be similar to or completely different from the real world. VR is already being used for a variety of purposes, such as training, entertainment, and education. It is likely to become even more common in the future.\n\nThese are just a few of the technological advancements that I would tell people about. I am sure that there will be many other advancements that I cannot even imagine.", "metadata": {}} -{"answer_id": "M6hCe8Z4JDpt75HZ5PsSWr", "model_id": "bard:20230327", "question_id": 26, "text": "Here is a possible description of the winning play in the final seconds of a championship game:\n\n\"With only seconds remaining in the game, the score is tied. The ball is on the opposing team's 40-yard line, and they have one last chance to score. The quarterback takes the snap and drops back to pass. He looks downfield and sees his wide receiver open. He throws the ball, and the wide receiver catches it in stride. He runs into the end zone for a touchdown! The crowd goes wild as the team celebrates their victory.\"\n\nThis is just one example of how a winning play in the final seconds of a championship game could be described. There are many other ways to describe the play, and the specific details of the play would depend on the specific game.", "metadata": {}} -{"answer_id": "QCDsmbALfut5758pyiKvc5", "model_id": "bard:20230327", "question_id": 27, "text": "My signature dish is a pan-seared filet mignon wrapped in prosciutto and topped with a mushroom duxelles. It is served with a side of roasted potatoes and asparagus.\n\nThe filet mignon is cooked to perfection, and the prosciutto adds a salty, smoky flavor. The mushroom duxelles is a rich and flavorful sauce that pairs perfectly with the beef. The roasted potatoes and asparagus are a delicious and healthy accompaniment.\n\nThis dish is a true labor of love. I take great care in selecting the ingredients and preparing the dish. I want my guests to experience the best of what I have to offer.\n\nI am confident that you will enjoy this dish as much as I do. It is a perfect combination of flavors and textures that will leave you feeling satisfied.\n\nI hope you will consider my dish for your next competition. I believe it is a dish that would stand out from the competition and impress your judges.", "metadata": {}} -{"answer_id": "NWUbhwZQCuXsuQimrjQRza", "model_id": "bard:20230327", "question_id": 28, "text": "I stand at the top of the world, my breath coming in ragged gasps. The air is thin and cold, but I don't care. I'm on top of Mount Everest, the highest point on Earth. I've made it.\n\nI look around at the vast expanse of snow and ice. The sun is just starting to rise, and the sky is a brilliant blue. The mountains around me are like giant sleeping giants.\n\nI feel a sense of awe and wonder. I've never seen anything like this before. It's so beautiful, so peaceful.\n\nI take a deep breath and savor the moment. I've worked so hard for this, and it's all worth it.\n\nI know that I'll never forget this feeling. It's the feeling of accomplishment, of reaching a goal, of overcoming adversity. It's the feeling of being alive.\n\nI turn and start my descent. I know that I'll never be able to stay at the top of the world forever. But I'll always remember this moment.", "metadata": {}} -{"answer_id": "VYwSjZrSLW9ZSvqryyjEaB", "model_id": "bard:20230327", "question_id": 29, "text": "As a space colonist on Mars, my daily life is a constant struggle for survival. The challenges I face are many and varied, but the most pressing are the lack of oxygen, the extreme cold, and the radiation.\n\nI wake up each morning in a pressurized habitat, where I eat a quick breakfast and then get ready for the day's work. My first task is to check the air filters, as they are constantly being clogged by the dust and sand. Once that is done, I start my work on the crops. We have been able to grow some plants in the greenhouse, but they are not enough to sustain us. We are constantly on the lookout for new ways to grow food, as we know that our survival depends on it.\n\nAfter a long day of work, I return to the habitat for dinner and a few hours of relaxation. I am always tired, as the work is physically and mentally demanding. But I know that I have to keep going, as I am part of something bigger than myself. I am part of the first generation of humans to live on another planet, and that is an honor.\n\nThe challenges I face are many, but I am determined to overcome them. I know that the future of humanity depends on it.\n\nHere are some of the challenges I face as a space colonist on Mars:\n\n* **Lack of oxygen.** The atmosphere on Mars is very thin, and it contains mostly carbon dioxide. This means that I have to wear a pressurized suit whenever I go outside, and I have to bring my own oxygen with me.\n* **Extreme cold.** The average temperature on Mars is -63 degrees Celsius. This means that I have to wear multiple layers of clothing, and I have to keep my habitat heated.\n* **Radiation.** The surface of Mars is constantly bombarded by radiation from the sun and from space. This radiation can cause cancer, so I have to wear special shielding to protect myself.\n* **Dust storms.** The surface of Mars is very dusty, and there are frequent dust storms. These dust storms can block out the sun and make it difficult to breathe.\n* **Isolation.** Mars is very far away from Earth. This means that I am cut off from my family and friends, and I have to rely on my fellow colonists for support.\n\nDespite these challenges, I am determined to make a life for myself on Mars. I know that it will not be easy, but I am confident that I can overcome the challenges I face.", "metadata": {}} -{"answer_id": "FA7PXuUbEVGKHaWpxaimy8", "model_id": "bard:20230327", "question_id": 30, "text": "I am a survivor in a post-apocalyptic world. I have been living on my own for the past few years, and I have learned to be resourceful and self-sufficient. I have a small garden where I grow my own food, and I have a few chickens that I raise for eggs. I also scavenge for food and supplies when I can.\n\nI have made a few allies in this world. There are a few other survivors who live in the area, and we help each other out when we can. We share food and supplies, and we look out for each other's safety. We are all trying to survive in this new world, and we know that we are stronger together.\n\nOne of my allies is a young woman named Sarah. She is a skilled hunter and gatherer, and she has taught me a lot about living off the land. She is also a kind and compassionate person, and she has always been there for me when I needed her.\n\nAnother one of my allies is a man named John. He is a former soldier, and he is a skilled fighter and tactician. He has taught me a lot about self-defense, and he has also helped me to develop my leadership skills. He is a loyal and trustworthy friend, and I am grateful to have him in my life.\n\nI am grateful for my allies, and I know that I am lucky to have them in my life. We are all trying to survive in this new world, and we know that we are stronger together.", "metadata": {}} -{"answer_id": "j5EV5cZNsn9DcF6WsvXRzS", "model_id": "bard:20230327", "question_id": 31, "text": "Here are some ways to determine if a restaurant is popular among locals or mainly attracts tourists:\n\n* **Check the restaurant's website or social media pages.** If the restaurant has a lot of reviews and comments from locals, that's a good sign that it's popular among locals.\n* **Look for the restaurant's name in local publications or websites.** If the restaurant is mentioned in local newspapers or magazines, that's another sign that it's popular among locals.\n* **Ask locals for recommendations.** If you're looking for a good place to eat, ask locals for their recommendations. They're likely to know the best places to eat in their area.\n* **Look for the restaurant's location.** If the restaurant is located in a tourist area, that's a good sign that it mainly attracts tourists. However, if the restaurant is located in a residential area, that's a good sign that it's popular among locals.\n* **Look for the restaurant's atmosphere.** If the restaurant has a casual atmosphere, that's a good sign that it's popular among locals. However, if the restaurant has a formal atmosphere, that's a good sign that it mainly attracts tourists.\n\nKnowing whether a restaurant is popular among locals or mainly attracts tourists can be useful for a number of reasons. For example, if you're looking for a good place to eat that's authentic to the local culture, you'll want to find a restaurant that's popular among locals. On the other hand, if you're looking for a good place to eat that's convenient to your hotel, you'll want to find a restaurant that's popular among tourists.", "metadata": {}} -{"answer_id": "2eAYCYmwTkPa3ejQDv8LyB", "model_id": "bard:20230327", "question_id": 32, "text": "There are a few subtle clues that suggest someone is pretending to understand a topic or conversation when they are actually confused or uninformed. Here are a few examples:\n\n* Using vague or general terms without providing any specific examples.\n* Asking questions that are already answered in the conversation.\n* Making statements that don't seem to be relevant to the topic.\n* Not making eye contact or appearing uncomfortable.\n* Fidgeting or making other nervous gestures.\n* Trying to change the subject or end the conversation quickly.\n\nIf you notice someone exhibiting these behaviors, it's possible that they are pretending to understand something they don't. If you're not sure, it's always best to err on the side of caution and ask them to clarify what they mean.", "metadata": {}} -{"answer_id": "d562WYnhsvgJ8J6Ubitmvw", "model_id": "bard:20230327", "question_id": 33, "text": "There are a few reasons why someone might choose to use a paper map or ask for directions instead of relying on a GPS device or smartphone app:\n\n* **They may not have a smartphone or GPS device.** Not everyone has a smartphone or GPS device, especially if they are traveling to a developing country or rural area.\n* **They may not have a reliable internet connection.** Even if you have a smartphone or GPS device, you may not have a reliable internet connection to use them. This can be a problem in areas with poor cell phone reception or where internet access is expensive.\n* **They may not want to use their battery life.** Using a smartphone or GPS device can drain your battery life quickly, especially if you are using them for navigation. If you are trying to conserve battery life, you may prefer to use a paper map or ask for directions.\n* **They may prefer the tactile experience of using a paper map.** Some people simply prefer the tactile experience of using a paper map. They may find it more satisfying to fold and unfold a map, and they may find it easier to visualize their route when they can see it laid out on paper.\n* **They may prefer to interact with other people.** When you ask for directions, you are interacting with another person. This can be a good way to meet new people and learn about the local culture.\n\nIn addition, there are some specific situations where using a paper map or asking for directions may be the best option:\n\n* **If you are hiking or camping.** If you are hiking or camping in an area with no cell phone reception, you will need to use a paper map or ask for directions from other hikers or campers.\n* **If you are driving in a remote area.** If you are driving in a remote area with no cell phone reception, you will need to use a paper map or ask for directions from other drivers.\n* **If you are traveling to a country with a different language.** If you are traveling to a country with a different language, you may not be able to use a smartphone or GPS device to get directions. In this case, you will need to ask for directions from someone who speaks your language.", "metadata": {}} -{"answer_id": "hPMvV6zL2C4qTP4mRmhJwG", "model_id": "bard:20230327", "question_id": 34, "text": "Here are some tips on how to determine if a person is genuinely interested in a conversation or simply being polite:\n\n* **Pay attention to their body language.** If they are leaning in towards you, making eye contact, and nodding their head, they are likely interested in what you have to say. If they are fidgeting, checking their phone, or looking around the room, they may not be as engaged.\n* **Listen to their questions.** If they are asking questions about you and your interests, they are likely interested in getting to know you. If they are only asking questions about the topic of the conversation, they may not be as engaged.\n* **Notice how they respond to your answers.** If they are attentively listening to your answers and asking follow-up questions, they are likely interested in what you have to say. If they are not responding to your answers or changing the topic of the conversation, they may not be as interested.\n* **Trust your gut.** If you feel like someone is not being genuine, they probably are not. If you feel like someone is genuinely interested in you and the conversation, they probably are.\n\nIt is important to remember that everyone is different and there is no one-size-fits-all answer to this question. However, by paying attention to these tips, you can get a good sense of whether someone is genuinely interested in a conversation or simply being polite.", "metadata": {}} -{"answer_id": "npWNeKceGyqCYaRpY4w54g", "model_id": "bard:20230327", "question_id": 35, "text": "There are many reasons why someone might prefer to shop at a small, locally-owned business instead of a large chain store, even if the prices are higher. Here are a few:\n\n* **Supporting the local economy.** When you shop at a small business, more of your money stays in your community. This helps to support local jobs, schools, and infrastructure.\n* **Getting unique products.** Small businesses are often more likely to carry unique or one-of-a-kind products that you won't find at a chain store.\n* **Getting better service.** Small businesses are often more customer-oriented than chain stores. Owners and employees are more likely to take the time to get to know you and your needs.\n* **Feeling good about supporting a local family or community.** When you shop at a small business, you're supporting the dreams of a local family or community. This can give you a sense of pride and satisfaction.\n\nOf course, there are also some downsides to shopping at small businesses. The prices may be higher, and the selection may be smaller. But for many people, the benefits outweigh the costs.", "metadata": {}} -{"answer_id": "WVuaK9m8Sedcws27tNu7Ev", "model_id": "bard:20230327", "question_id": 36, "text": "Here are some tips on how to assess the credibility of a source of information without relying solely on the reputation of the author or publisher:\n\n* **Check the source's credentials.** Does the author have any relevant experience or education? Is the publisher a reputable organization?\n* **Look for bias.** Does the source seem to be trying to push an agenda? Is it more likely to present one side of an issue than the other?\n* **Check the sources of the information.** Does the source cite any other sources? Are those sources credible?\n* **Look for errors.** Is there any evidence that the source has made any mistakes?\n* **Read the source critically.** Ask yourself if the information makes sense. Does it seem consistent with what you know about the topic?\n\nIt is important to remember that even the most reputable sources can make mistakes. It is always a good idea to do your own research and to corroborate information from multiple sources.", "metadata": {}} -{"answer_id": "HLtTf83Y5QRP4TxX6nw5TC", "model_id": "bard:20230327", "question_id": 37, "text": "There are a few reasons why people might enjoy being scared. One reason is that it can release endorphins, which are hormones that have mood-boosting effects. Endorphins can make us feel happy and euphoric, and they can also help to reduce stress and pain.\n\nAnother reason people might enjoy being scared is that it can provide a sense of excitement and adrenaline. When we are scared, our bodies release adrenaline, which can make us feel more alert and energized. This can be a pleasurable experience for some people.\n\nFinally, some people might enjoy being scared because it allows them to experience a sense of control over their fear. When we are scared, we often feel like we are at the mercy of our emotions. However, if we are able to face our fears and overcome them, it can give us a sense of empowerment and accomplishment.\n\nOn the other hand, some people avoid scary experiences because they find them to be unpleasant. They might be afraid of feeling scared, or they might worry that they will not be able to handle the fear. Others might simply not find the experience of being scared to be enjoyable.\n\nUltimately, whether or not someone enjoys being scared is a personal preference. There is no right or wrong answer, and people should do what makes them happy.", "metadata": {}} -{"answer_id": "Fmdtexq6QQNuoqZkZfDURY", "model_id": "bard:20230327", "question_id": 38, "text": "Observing the behavior of other people in a social situation can provide clues about cultural norms and expectations by:\n\n* **Identifying patterns of behavior.** If you see that people are all doing something in a certain way, it can be a sign that there is an unspoken rule about how to behave in that situation. For example, if you see that everyone in a restaurant is using their napkins to wipe their mouths, it can be a sign that there is an expectation that people will use their napkins to eat.\n* **Paying attention to how people react to violations of cultural norms.** If you see someone violate a cultural norm, take note of how other people react. Do they seem surprised? Angry? Upset? This can give you clues about how important the norm is to people in that culture. For example, if you see someone in a restaurant talking loudly on their phone, and other people around them seem annoyed, it can be a sign that people in that culture value quietness in restaurants.\n* **Talking to people about their culture.** If you are curious about cultural norms and expectations, you can always talk to people who are from that culture. They can tell you about the rules that they have learned and how they are expected to behave in different situations. For example, if you are visiting a country that has a different language than your own, you could talk to a native speaker of that language about how to greet people in their culture.\n\nObserving the behavior of other people in a social situation can be a helpful way to learn about cultural norms and expectations. By paying attention to patterns of behavior, how people react to violations of cultural norms, and talking to people about their culture, you can gain a deeper understanding of how people in different cultures behave.", "metadata": {}} -{"answer_id": "WxnC69jTMkyJvcqvMCgCwY", "model_id": "bard:20230327", "question_id": 39, "text": "There is no easy answer to this question, as it is a complex and multifaceted issue. There are many factors to consider, such as the potential benefits of space exploration, the potential costs, and the ethical implications of exploring space.\n\nSome of the potential benefits of space exploration include:\n\n* Expanding our knowledge of the universe and our place in it\n* Developing new technologies that can be used to solve Earth's problems\n* Creating new industries and jobs\n* Inspiring people and motivating them to pursue their dreams\n\nSome of the potential costs of space exploration include:\n\n* The financial costs of developing and launching spacecraft\n* The environmental costs of space debris\n* The risks to human life\n* The potential for conflict between nations as they compete for resources in space\n\nThe ethical implications of exploring space are also complex. Some people believe that it is our moral duty to explore space, as it is the next frontier for humanity. Others believe that we should focus on solving Earth's problems first, before we embark on a journey into the unknown.\n\nUltimately, the decision of whether or not to explore space is a personal one. There is no right or wrong answer, and each person must weigh the potential benefits and costs before making a decision.", "metadata": {}} -{"answer_id": "npZdTFPRqZfoqzt5YurYEL", "model_id": "bard:20230327", "question_id": 40, "text": "In a world where automation is becoming increasingly prevalent, it is important to prioritize both job creation and technological progress. Technological progress can lead to job creation by creating new industries and demand for new products and services. However, it can also lead to job destruction by automating tasks that were previously done by humans. It is important to find ways to mitigate the negative effects of automation on employment while also harnessing its potential to create new jobs.\n\nOne way to mitigate the negative effects of automation on employment is to invest in education and training so that workers can develop the skills they need to succeed in the new economy. Another way is to provide financial assistance to workers who are displaced by automation.\n\nIt is also important to create new industries and demand for new products and services that can be created by technological progress. This can be done by encouraging entrepreneurship and innovation. It is also important to invest in research and development so that new technologies can be developed that create new jobs.\n\nBy prioritizing both job creation and technological progress, we can ensure that automation has a positive impact on the economy and on society as a whole.", "metadata": {}} -{"answer_id": "iy9aa5sqFeNA2uPQZLpxEz", "model_id": "bard:20230327", "question_id": 41, "text": "The average human blinks 15-20 times per minute, 1,200 times per hour, 28,800 times per day, 10,512,000 times a year, and 518,017,359 times in their entire lifetime.\n\nWe blink to keep our eyes moist and clean, and to protect them from foreign objects. Blinking also helps us to focus our vision.\n\nThe number of times we blink per minute varies depending on a number of factors, including our age, our activity level, and the environment we are in. For example, we tend to blink more often when we are tired or when we are exposed to bright lights.\n\nThe number of times we blink per day also varies depending on our age. For example, infants blink more often than adults, and older adults blink less often than younger adults.\n\nThe number of times we blink per year is a constant number, regardless of our age or activity level.\n\nThe number of times we blink per lifetime is a large number, but it is not an infinite number. If we lived for 100 years, we would blink an average of 518,017,359 times.", "metadata": {}} -{"answer_id": "XAALo4GKWE3QNb7wbkWVNk", "model_id": "bard:20230327", "question_id": 42, "text": "There are approximately 10^18 atoms in a grain of salt. This number can be calculated by using the following steps:\n\n1. The atomic mass of sodium chloride (NaCl) is 58.44 g/mol.\n2. One mole contains Avogadro's number of atoms, which is 6.022 x 10^23 atoms.\n3. Therefore, the mass of one grain of salt is 58.44 x 10^-3 g.\n4. Dividing the mass of one grain of salt by the atomic mass of sodium chloride, we get the number of moles of sodium chloride in one grain of salt.\n5. Dividing the number of moles of sodium chloride by Avogadro's number, we get the number of atoms in one grain of salt.\n\nThis number is very large, and it can be difficult to comprehend. However, it is important to remember that atoms are very small, and that a grain of salt is also very small. The average grain of salt is about 0.05 mm in diameter, which is about the size of a grain of sand. Therefore, the number of atoms in a grain of salt is still relatively small compared to the size of the grain.", "metadata": {}} -{"answer_id": "XRWjLnJNyGNMfktTvWpbRV", "model_id": "bard:20230327", "question_id": 43, "text": "There are about 8.6 million lightning strikes on Earth each day. This is based on the average number of lightning strikes worldwide every second, which is 100. \n\nLightning strikes occur when there is an imbalance between the Earth's surface and the atmosphere. The Earth's surface is negatively charged, while the atmosphere is positively charged. When these two charges come into contact, they create an electrical discharge, which is lightning.\n\nLightning strikes are most common in areas with a lot of thunderstorms, such as the tropics. They can also occur in other areas, but they are less common.\n\nLightning strikes can be dangerous, and they can cause a lot of damage. They can start fires, injure people, and even kill people. It is important to be aware of the dangers of lightning strikes and to take precautions to stay safe.", "metadata": {}} -{"answer_id": "CHeyn9eR3u5eFq99UJ3pbB", "model_id": "bard:20230327", "question_id": 44, "text": "In the movie \"Up,\" the house is a typical American house that weighs about 10,000-25,000 kg. Helium balloons create about 10 g of lift each, so you would need at least 1,000,000 balloons to lift the house. In the film, we see an enormous collection of balloons above the house, perhaps 10 times the volume of the house. This would be about 10 million balloons, which is enough to lift the house.\n\nHowever, there are a few things to consider. First, the balloons would need to be very strong to withstand the weight of the house. Second, the house would need to be very light to be lifted by the balloons. Third, the balloons would need to be very buoyant to lift the house against the force of gravity.\n\nEven if all of these factors were met, it is still unlikely that a house could be lifted by balloons in the real world. The cost of the balloons, the time it would take to fill them, and the risk of them bursting would make it an impractical solution.", "metadata": {}} -{"answer_id": "kfoNRLCHFucfWcxFBPoJgP", "model_id": "bard:20230327", "question_id": 45, "text": "According to Domo, 15,220,700 texts are sent every minute of every day worldwide, not including app-to-app messaging. This means that in a single minute, more than 15 million text messages are sent around the globe.\n\nThere are a few reasons why this number is so high. First, texting is a convenient and efficient way to communicate. It allows people to send quick messages to multiple people at once, and it doesn't require the sender or receiver to be available at the same time. Second, texting is a relatively inexpensive way to communicate. Most carriers offer unlimited texting plans, so people can send as many messages as they want without having to worry about running out of minutes or data. Finally, texting is a popular way to communicate with friends and family. Many people prefer to text rather than call or email, because it is a more casual and informal way to communicate.\n\nThe number of text messages sent globally is expected to continue to grow in the coming years. This is due to a number of factors, including the increasing popularity of smartphones, the growing availability of high-speed internet, and the continued development of new text messaging features.", "metadata": {}} -{"answer_id": "A4sDEGXpWaNVA39uRsDNjB", "model_id": "bard:20230327", "question_id": 46, "text": "The number of words spoken daily on Earth is a difficult question to answer, as there are many factors that can affect the number of words spoken. However, there are some estimates that can be made.\n\nOne estimate is that the average person speaks about 7,000 words per day. This is based on a study that found that the average person speaks about 182,500 words per week. If we assume that the average person speaks for 5 days per week, then this would mean that they speak about 7,000 words per day.\n\nHowever, this is just an estimate, and the actual number of words spoken per day can vary greatly depending on the individual. Some people may speak more than 10,000 words per day, while others may speak less than 5,000 words per day.\n\nThere are a number of factors that can affect the number of words spoken per day. These include the person's occupation, their culture, and their personality. For example, people who work in sales or customer service may speak more words per day than people who work in jobs that are more solitary. People who live in cultures that are more talkative may also speak more words per day than people who live in cultures that are more reserved. And people who are more extroverted may also speak more words per day than people who are more introverted.\n\nSo, while the exact number of words spoken daily on Earth is unknown, it is estimated that the average person speaks about 7,000 words per day. This number can vary greatly depending on the individual, but it is a good starting point for estimating the number of words spoken on our planet.", "metadata": {}} -{"answer_id": "hUAc6BCs5NuY7tS62PPXbT", "model_id": "bard:20230327", "question_id": 47, "text": "There are an estimated 1 septillion snowflakes that fall each year. A septillion is a 1 with 24 zeros after it. To put that into perspective, there are only about 8 billion people on Earth. So, there are more snowflakes that fall in a single year than there are people on the entire planet!\n\nThe number of snowflakes that fall each year depends on a number of factors, including the amount of snowfall, the temperature, and the wind. In general, areas that receive a lot of snowfall will have more snowflakes falling each year. And, of course, areas that are colder will also have more snowflakes falling.\n\nSnowflakes are formed when water droplets in the atmosphere freeze. The water droplets need something to cling to in order to freeze, such as a dust particle or a piece of ice. Once the water droplets freeze, they start to grow and form snowflakes.\n\nThe shape of a snowflake depends on the temperature and the amount of water vapor in the atmosphere. If the temperature is very cold, the snowflakes will be very small and have a simple shape. If the temperature is warmer, the snowflakes will be larger and more complex.\n\nSnowflakes are a beautiful and fascinating part of winter. They are a reminder of the beauty of nature and the power of the cold.", "metadata": {}} -{"answer_id": "TMtjmb5tDizQ9RETRxpt2s", "model_id": "bard:20230327", "question_id": 48, "text": "The number of pages in all the books ever written is an impossible number to know with certainty. There are many factors that would affect the total, including the number of books that have been written, the average length of a book, and the number of books that have been lost or destroyed.\n\nHowever, we can make some educated guesses based on available data. For example, Google Books has indexed over 129 million books, and the average length of a book is 300 pages. This would mean that the total number of pages in all the books ever written is over 38 trillion.\n\nHowever, this number is likely an underestimate. First, it does not take into account self-published books, which are often longer than traditionally published books. Second, it does not take into account books that have been lost or destroyed.\n\nA more accurate estimate of the total number of pages in all the books ever written would be somewhere in the range of 100 trillion to 1 quadrillion pages. This is a truly staggering number, and it is difficult to imagine how many books it would take to fill such a library.\n\nBut even this number is just an estimate. The true number of pages in all the books ever written is likely much higher, and we will never know for sure.", "metadata": {}} -{"answer_id": "AwQMPhhaJ32ByA3VjKF5Ph", "model_id": "bard:20230327", "question_id": 49, "text": "The Earth has orbited the Sun 4.543 billion times since the beginning of life. This number is based on the age of the Earth, which is 4.543 billion years old, and the time it takes for the Earth to orbit the Sun, which is 365.2422 days.\n\nThe first life on Earth is thought to have arisen about 3.8 billion years ago. This means that the Earth has orbited the Sun about 1 billion times since the first life arose.\n\nThe Earth's orbit around the Sun is elliptical, which means that it is not a perfect circle. The Earth's orbit is closest to the Sun in January and farthest from the Sun in July. This difference in distance causes the Earth's seasons.\n\nThe Earth's orbit is also tilted relative to the Sun's equator. This tilt causes the Earth's seasons to change.\n\nThe Earth's orbit around the Sun is a complex process that has been shaped by the forces of gravity, inertia, and tidal forces. The Earth's orbit is constantly changing, but it has been remarkably stable for billions of years. This stability is essential for life on Earth.", "metadata": {}} -{"answer_id": "UCqxXfMAnqJKbApsjrPFcq", "model_id": "bard:20230327", "question_id": 50, "text": "There is no definitive answer to the question of how many songs have been recorded throughout history. However, there are a few ways to estimate this number.\n\nOne way is to look at the number of songs that have been registered with the United States Copyright Office. As of 2023, the US Copyright Office has registered over 97 million songs. However, this is likely an underestimate, as many songs are not registered with the Copyright Office.\n\nAnother way to estimate the number of songs that have been recorded is to look at the number of songs that are available for streaming on services like Spotify and Apple Music. As of 2023, Spotify has over 82 million songs in its catalog, and Apple Music has over 60 million songs. This suggests that there are at least 142 million songs that have been recorded and released to the public.\n\nHowever, this is still likely an underestimate, as many songs are not available for streaming on these services. For example, many songs are only available on physical media, such as vinyl records or CDs. Additionally, many songs are not released to the public, and are only available to be recorded by professional musicians.\n\nUltimately, the number of songs that have been recorded throughout history is likely in the billions. This is a staggering number, and it is a testament to the creativity and ingenuity of human beings.", "metadata": {}} -{"answer_id": "YMRg5Xi9BBvvqqny2tqJZ3", "model_id": "bard:20230327", "question_id": 51, "text": "If the Internet had been invented during the Renaissance period, it would have had a profound impact on the world. The Renaissance was a time of great intellectual and cultural growth, and the Internet would have accelerated this growth. It would have allowed people to share ideas more easily, and to collaborate on projects from all over the world. It would have also helped to spread education and knowledge, and to promote understanding between different cultures.\n\nThe Internet would have also had a significant impact on the arts. It would have allowed artists to share their work more easily, and to connect with other artists from all over the world. It would have also helped to promote the arts, and to make them more accessible to the public.\n\nOverall, the Internet would have had a positive impact on the world if it had been invented during the Renaissance period. It would have accelerated the growth of knowledge and understanding, and it would have promoted the arts and culture.", "metadata": {}} -{"answer_id": "fhr7iMPb9SVJ663mXKBuct", "model_id": "bard:20230327", "question_id": 52, "text": "If the Aztecs had successfully repelled the Spanish conquistadors, the course of history in the Americas would have been very different. The Aztecs were a powerful empire with a strong military, and their defeat by the Spanish was a major blow to the indigenous peoples of the Americas. If the Aztecs had won, they might have been able to resist the further expansion of European colonialism, and the Americas might have developed in a more independent and indigenous-centered way.\n\nHowever, it is important to note that the Aztecs were not immune to disease, and they were also facing internal political and economic problems. Even if they had defeated the Spanish, it is possible that their empire would have collapsed in the years to come.\n\nUltimately, the question of what would have happened if the Aztecs had successfully repelled the Spanish is a matter of speculation. However, it is clear that their defeat had a profound impact on the course of history in the Americas.", "metadata": {}} -{"answer_id": "4rrmyZw9zhyJGqdrcazFvt", "model_id": "bard:20230327", "question_id": 53, "text": "The Black Death was a devastating event that had a profound impact on the course of European history. It is impossible to say for certain what would have happened if the Black Death had not occurred, but it is likely that Europe would have been a very different place.\n\nOne of the most significant effects of the Black Death was the decline of the population. Millions of people died from the plague, and many more were left weakened or orphaned. This led to a labor shortage, which had a major impact on the economy.\n\nThe Black Death also had a profound impact on society. The deaths of so many people led to a sense of despair and hopelessness. Many people turned to religion for comfort, and there was a rise in superstition and fear.\n\nThe Black Death also led to changes in the way people lived. People began to avoid crowded areas, and they took steps to prevent the spread of the plague, such as burning the bodies of the dead and avoiding contact with people who were sick.\n\nIt is impossible to say for certain what would have happened if the Black Death had not occurred, but it is clear that it had a profound impact on the course of European history.", "metadata": {}} -{"answer_id": "Za3mY9xwxpZdPmGW48wtzu", "model_id": "bard:20230327", "question_id": 54, "text": "If Isaac Newton had focused on biology instead of physics, he might have made some important discoveries in the field of medicine. He could have studied the mechanics of the human body and how it moves, or he could have investigated the causes of disease. He might have even developed new treatments or cures for illnesses.\n\nHowever, it is also possible that Newton would not have made any significant discoveries in biology. He might have been too focused on physics to devote himself to another field. Additionally, biology is a much more complex field than physics, and Newton might not have had the same level of success in it.\n\nUltimately, it is impossible to say for sure what would have happened if Newton had focused on biology instead of physics. However, it is clear that he was a brilliant scientist with a lot to offer both fields.", "metadata": {}} -{"answer_id": "cbAaJS9ULjR4XYSHATujSG", "model_id": "bard:20230327", "question_id": 55, "text": "It is impossible to say for certain what the world would be like without the Beatles, but it is clear that they had a profound impact on popular culture. The band's music and style were hugely influential, and their success helped to popularize rock and roll music around the world.\n\nIf the Beatles had never formed, it is possible that another band would have taken their place and become just as popular. However, it is also possible that the world would have been a very different place. The Beatles' music was often seen as a symbol of hope and optimism, and their message of peace and love resonated with many people. Without them, the world might be a darker and more cynical place.\n\nIt is also worth noting that the Beatles were not just a band. They were also cultural icons who helped to shape the way we view the world. Their fashion, their haircuts, and their language all became part of the fabric of popular culture. Without them, the world might be a more boring place.\n\nUltimately, it is impossible to say for certain what the world would be like without the Beatles. However, it is clear that they had a profound impact on popular culture, and their absence would be felt by many people.", "metadata": {}} -{"answer_id": "ZEgb9fvopGo7HF5wPeoeHs", "model_id": "bard:20230327", "question_id": 56, "text": "If Alan Turing had not cracked the Enigma code during World War II, the Allies would have been at a significant disadvantage. The Enigma code was used by the German military to communicate securely, and it was very difficult to break. Turing's work was essential to the Allied victory, and it is likely that the war would have lasted much longer and resulted in more casualties if he had not been able to break the code.\n\nTuring's work at Bletchley Park was also important because it helped the Allies to understand the German military's plans and intentions. This information helped the Allies to make better decisions about where to deploy their troops and how to use their resources.\n\nOverall, Turing's work on the Enigma code was essential to the Allied victory in World War II. If he had not been able to break the code, the war would have lasted much longer and resulted in more casualties.", "metadata": {}} -{"answer_id": "igMXoEiszFM65ZS2KUTvtm", "model_id": "bard:20230327", "question_id": 57, "text": "If the Suez Canal had never been constructed, it would have had a significant impact on the global economy. The canal is a major shipping route, and its absence would have forced ships to take a much longer and more expensive route around Africa. This would have made goods more expensive and slower to arrive.\n\nThe Suez Canal is also a major source of income for Egypt. The government collects tolls from ships that use the canal, and this money is used to fund important projects such as schools, hospitals, and infrastructure. If the canal had never been built, Egypt would have lost a significant source of income.\n\nThe Suez Canal is also a major symbol of Egyptian pride. The canal was built by Egyptian workers, and it is a source of national pride. If the canal had never been built, it would have been a major blow to Egyptian morale.\n\nOverall, the absence of the Suez Canal would have had a significant impact on the global economy and on Egypt. The canal is a major shipping route, a source of income for Egypt, and a symbol of Egyptian pride.", "metadata": {}} -{"answer_id": "Up4h8RpgVVafBtUj4tiGPZ", "model_id": "bard:20230327", "question_id": 58, "text": "If the Maya civilization had never mysteriously collapsed, it is likely that they would have continued to thrive and develop. They were a highly advanced civilization with a sophisticated understanding of mathematics, astronomy, and engineering. They also had a rich culture and a strong sense of community.\n\nIf the Maya had not collapsed, they would have likely continued to expand their territory and influence. They might have even become a global power. Their advanced technology and culture would have had a profound impact on the world.\n\nHowever, it is also possible that the Maya would have eventually collapsed even if they had not experienced the sudden and mysterious decline that they did. They were a very complex civilization with a lot of interconnected parts. If one part of the system collapsed, it could have had a cascading effect on the rest of the civilization.\n\nUltimately, it is impossible to say for sure what would have happened if the Maya civilization had never collapsed. However, it is clear that they were a very advanced and influential civilization. If they had not collapsed, they would have had a profound impact on the world.", "metadata": {}} -{"answer_id": "CMg3ypKyykGUzQsDmX7i35", "model_id": "bard:20230327", "question_id": 59, "text": "If Christopher Columbus had not discovered the Americas, it is likely that other European explorers would have eventually found the continent. However, the timing of Columbus's voyage was important, as it coincided with the beginning of the Age of Exploration. Columbus's discovery helped to spur further exploration and colonization of the Americas, which had a profound impact on the history of the world.\n\nIf Columbus had not discovered the Americas, it is possible that the continent would have remained largely unknown to Europeans for centuries. This would have delayed the development of trade and communication between Europe and the Americas, and it would have also delayed the spread of European diseases to the Americas.\n\nIt is also possible that the Americas would have been settled by other peoples, such as the Chinese or the Japanese. These civilizations had the technology and the resources to reach the Americas, and they were also interested in exploring new lands.\n\nUltimately, it is impossible to say for sure what would have happened if Christopher Columbus had not discovered the Americas. However, it is clear that his discovery had a profound impact on the history of the world.", "metadata": {}} -{"answer_id": "WhLiJXznzRBkzxDdnc5ndX", "model_id": "bard:20230327", "question_id": 60, "text": "If Vincent van Gogh had been a successful artist during his lifetime, he would have likely sold more paintings and had a greater impact on the art world. He might have been able to support himself financially and pursue his art career without having to rely on his brother Theo for financial support. He might have also been able to travel more and experience more of the world, which would have influenced his art. \r\n\r\nHowever, it is also possible that Van Gogh's tragic story is part of the appeal of his art. His paintings are often seen as a window into his tortured soul and his passionate love of nature. If he had been successful during his lifetime, he might not have been able to create such powerful and moving works of art. \r\n\r\nUltimately, it is impossible to say for sure what would have happened if Vincent van Gogh had been a successful artist during his lifetime. However, it is clear that his tragic story has not diminished his impact on the art world. His paintings continue to be admired and loved by people all over the world.", "metadata": {}} -{"answer_id": "Vzmhr5RMndkR866p4pUef6", "model_id": "bard:20230327", "question_id": 61, "text": "It looks like you want my help with coding, but I'm not trained to do that, yet. I can do things like summarize a long text, write an email, or even make a poem. Is there something else you would like to try?", "metadata": {}} -{"answer_id": "jPrfhv9oaZGrkXxDUw5Vbt", "model_id": "bard:20230327", "question_id": 62, "text": "Sure, here is a Python function to find the longest common subsequence of two input strings using dynamic programming:\n\n```python\ndef lcs(s1, s2):\n m = len(s1)\n n = len(s2)\n dp = [[0] * (n + 1) for i in range(m + 1)]\n\n for i in range(1, m + 1):\n for j in range(1, n + 1):\n if s1[i - 1] == s2[j - 1]:\n dp[i][j] = dp[i - 1][j - 1] + 1\n else:\n dp[i][j] = max(dp[i - 1][j], dp[i][j - 1])\n\n return dp[m][n]\n```\n\nThis function uses dynamic programming to solve the problem. The idea is to first fill a table with the lengths of the longest common subsequences of all possible subsequences of the two strings. Then, we can find the longest common subsequence of the two strings by looking at the last element of the table.\n\nThe time complexity of this algorithm is O(nm), where m and n are the lengths of the two strings. The space complexity is O(mn).", "metadata": {}} -{"answer_id": "KdZNrf6udTfCxGGcQxRbXk", "model_id": "bard:20230327", "question_id": 63, "text": "Sure, here is a regular expression in Python to validate an email address:\n\n```python\nimport re\n\nregex = '^[a-zA-Z0-9]+[\\._]?[ a-zA-Z0-9]+[@]\\w+[. ]\\w{2,3}$'\n\ndef check(email):\n if re.search(regex, email):\n return True\n else:\n return False\n```\n\nThis regular expression matches the following format:\n\n* A username, which can contain letters, numbers, and underscores.\n* The @ symbol\n* A domain name, which can contain letters, numbers, and dots.\n* The top-level domain, which must be 2-3 characters long.\n\nThis regular expression does not validate the following:\n\n* The existence of the email server.\n* The validity of the domain name.\n* The availability of the username.\n\nIf you want to validate all of these things, you will need to use a different method.", "metadata": {}} -{"answer_id": "AFzqAfbYVL25DBqFA2cv4G", "model_id": "bard:20230327", "question_id": 64, "text": "Here is a Python program to find the nth Fibonacci number using dynamic programming:\n\n```python\ndef fib(n):\n\n # Create a table of size n to store the Fibonacci numbers\n dp = [0] * n\n\n # Fill the first two elements of the table\n dp[0] = 0\n dp[1] = 1\n\n # Fill the remaining elements of the table\n for i in range(2, n):\n dp[i] = dp[i-1] + dp[i-2]\n\n # Return the nth Fibonacci number\n return dp[n-1]\n\n# Driver code\nn = int(input(\"Enter the nth Fibonacci number: \"))\nprint(\"The nth Fibonacci number is\", fib(n))\n```\n\nThis program uses dynamic programming to solve the Fibonacci sequence problem. Dynamic programming is a technique that can be used to solve problems that have overlapping subproblems. In this case, the subproblems are the Fibonacci numbers. The program stores the results of previously solved subproblems in a table, and then uses that table to calculate the nth Fibonacci number.", "metadata": {}} -{"answer_id": "Lt56JzWfXBxbvxrLf4fn3t", "model_id": "bard:20230327", "question_id": 65, "text": "Sure, here is a binary search algorithm to find a specific element in a sorted array:\n\n```python\ndef binary_search(array, low, high, x):\n if high >= low:\n mid = (high + low) // 2\n if array[mid] == x:\n return mid\n elif array[mid] < x:\n low = mid + 1\n else:\n high = mid - 1\n else:\n return -1\n```\n\nThis algorithm works by repeatedly dividing the search interval in half until the target value is found. The algorithm starts by comparing the target value to the middle element of the array. If they are not equal, the half in which the target cannot lie is eliminated and the search continues on the remaining half, again taking the middle element to compare to the target value, and repeating this until the target value is found. If the search ends with the remaining half being empty, the target is not in the array.\n\nThe time complexity of binary search is O(log n), where n is the number of elements in the array. This means that the algorithm takes logarithmic time to find the target value, even if the array is very large.", "metadata": {}} -{"answer_id": "bmaShWUyEqPE6kbHD6FjLk", "model_id": "bard:20230327", "question_id": 66, "text": "Here is an implementation of a queue data structure using two stacks in Python:\n\n```python\nclass Queue:\n\n def __init__(self):\n self.in_stack = []\n self.out_stack = []\n\n def enqueue(self, element):\n self.in_stack.append(element)\n\n def dequeue(self):\n if len(self.out_stack) == 0:\n while len(self.in_stack) > 0:\n self.out_stack.append(self.in_stack.pop())\n return self.out_stack.pop()\n\n def __len__(self):\n return len(self.in_stack) + len(self.out_stack)\n\n def __iter__(self):\n return iter(self.in_stack)\n\n def __contains__(self, element):\n return element in self.in_stack\n```\n\nThis implementation uses two stacks, an `in_stack` and an `out_stack`. The `in_stack` is used to store elements that are waiting to be processed, and the `out_stack` is used to store elements that have been processed. When an element is enqueued, it is added to the `in_stack`. When an element is dequeued, it is removed from the `out_stack`. If the `out_stack` is empty, the elements in the `in_stack` are popped into the `out_stack` and the `in_stack` is emptied.\n\nThis implementation of a queue data structure using two stacks is efficient, as it uses only two data structures. It is also easy to implement and maintain.", "metadata": {}} -{"answer_id": "hiSVom5YqSrKZKor8CjHdU", "model_id": "bard:20230327", "question_id": 67, "text": "Sure, here is a program to find the common elements in two arrays without using any extra data structures:\n\n```python\ndef common_elements(array1, array2):\n \"\"\"Finds the common elements in two arrays.\n\n Args:\n array1 (list): The first array.\n array2 (list): The second array.\n\n Returns:\n set: The set of common elements.\n \"\"\"\n # Initialize the set of common elements.\n common_elements = set()\n\n # Iterate through the first array.\n for element in array1:\n # Iterate through the second array.\n for element2 in array2:\n # If the elements are equal, add them to the set of common elements.\n if element == element2:\n common_elements.add(element)\n\n # Return the set of common elements.\n return common_elements\n```\n\nThis program uses a simple algorithm to find the common elements in two arrays. The algorithm first initializes a set of common elements. Then, it iterates through the first array and compares each element to each element in the second array. If an element is found in both arrays, it is added to the set of common elements. The algorithm then returns the set of common elements.\n\nThis algorithm is efficient, as it does not require any extra data structures. However, it can be slow for large arrays, as it has to compare all of the elements in the two arrays.", "metadata": {}} -{"answer_id": "JRHeURg4TrVAgWNp5yqjFx", "model_id": "bard:20230327", "question_id": 68, "text": "To find the value of f(2), we simply substitute x=2 into the equation:\n\nf(2) = 5(2)^3 - 2(2) + 3 = 40 - 4 + 3 = 39\n\nTherefore, the value of f(2) is 39.", "metadata": {}} -{"answer_id": "hZwFKeRYBLKihBEWSEGMYJ", "model_id": "bard:20230327", "question_id": 69, "text": "Distribute the 5 on the right-hand side of the equation:\n\n3x + 10 = 5x - 10\n\nCombine like terms:\n\n-2x = -10\n\nDivide both sides by -2:\n\nx = 5\n\nTherefore, the solution to the equation is $x=5$.", "metadata": {}} -{"answer_id": "iG4SJq9xCKCDDtiqU3szJZ", "model_id": "bard:20230327", "question_id": 70, "text": "The length of a line segment is the distance between its endpoints. The distance formula is:\n\n$$d = \\sqrt{(x_2 - x_1)^2 + (y_2 - y_1)^2}$$\n\nwhere $(x_1, y_1)$ and $(x_2, y_2)$ are the coordinates of the endpoints. In this case, $(x_1, y_1) = (2, -2)$ and $(x_2, y_2) = (10, 4)$. Substituting these values into the distance formula, we get:\n\n$$d = \\sqrt{(10 - 2)^2 + (4 - (-2))^2} = \\sqrt{8^2 + 6^2} = \\sqrt{100} = 10$$\n\nTherefore, the length of the line segment is 10 units.", "metadata": {}} -{"answer_id": "CjhV8cJ6UEp7Y4f3eWB6E5", "model_id": "bard:20230327", "question_id": 71, "text": "Sure, here is a sample email that you can use as a template for your own joint venture proposal:\n\nDear [Potential Partner Name],\n\nI am writing to you today to propose a joint venture between our two companies. Our two companies have been working together for many years, and we have a proven track record of success. I believe that a joint venture between our two companies would be a mutually beneficial arrangement.\n\nThe purpose of this joint venture would be to develop a new product or service that would be marketed to a specific market. We would each contribute our own unique skills and resources to the project, and we would share the profits from the venture equally.\n\nI believe that this joint venture would be a success for several reasons. First, our two companies have a strong track record of success. We have both been in business for many years, and we have both developed a loyal customer base. Second, our two companies have complementary skills and resources. We each have our own strengths and weaknesses, and a joint venture would allow us to combine our strengths to create something even greater. Finally, I believe that the market for this new product or service is growing rapidly. There is a lot of demand for this type of product or service, and I believe that we can capture a significant share of the market.\n\nI would like to meet with you to discuss this proposal in more detail. I am available to meet at your convenience. Please let me know if you are interested in discussing this further.\n\nThank you for your time and consideration.\n\nSincerely,\n[Your Name]", "metadata": {}} -{"answer_id": "5waLH9pZsr8jVTpy67QH58", "model_id": "bard:20230327", "question_id": 72, "text": "Dear [Boss's name],\n\nI am writing to inform you of my decision to resign from my position as [Your job title] at [Company name]. My last day of employment will be [Date].\n\nI have accepted a position at another company that will allow me to [Reason for leaving]. I have enjoyed my time at [Company name] and I am grateful for the opportunities I have been given. I have learned a great deal and I am confident that the skills I have developed will be valuable in my new role.\n\nI would like to thank you for your support and guidance during my time at [Company name]. I wish you and the company all the best in the future.\n\nSincerely,\n[Your name]", "metadata": {}} -{"answer_id": "ZV4sUNujQGS2w7Wg4jbXbf", "model_id": "bard:20230327", "question_id": 73, "text": "Dear Admissions Committee,\n\nI am writing to recommend [student name] for admission to your prestigious graduate program in computer science. I have known [student name] for [number] years, first as their undergraduate advisor at [university name] and now as their research advisor at [university name].\n\n[Student name] is an outstanding student with a strong academic record. They have consistently earned top grades in their coursework, and they have a deep understanding of the theoretical and practical foundations of computer science. [Student name] is also an accomplished researcher. They have worked on several projects that have been published in top academic journals, and they have presented their work at several prestigious conferences.\n\nIn addition to their academic and research accomplishments, [student name] is also a well-rounded individual with a strong work ethic and a commitment to service. They are a member of several student organizations, and they volunteer their time at a local homeless shelter.\n\nI have no doubt that [student name] would be an asset to your graduate program. They are a highly intelligent and motivated individual with a strong work ethic and a commitment to excellence. I highly recommend [student name] for admission to your program.\n\nSincerely,\n[Your name]", "metadata": {}} -{"answer_id": "X94xm9m7tayWsKFNACqAG4", "model_id": "bard:20230327", "question_id": 74, "text": "Subject: Introducing our new software solution: [Product Name]\n\nBody:\n\nHi [Customer Name],\n\nWe're excited to announce the launch of our new software solution, [Product Name]. [Product Name] is a revolutionary new tool that will help you [solve a problem or improve a process].\n\nWith [Product Name], you can [list of benefits].\n\nWe've been working hard to make [Product Name] the best it can be, and we're confident that you'll love it.\n\nTo learn more about [Product Name] and to sign up for a free trial, please visit our website at [website address].\n\nWe can't wait to hear what you think!\n\nThanks,\n[Your Name]", "metadata": {}} -{"answer_id": "EFL7fU6gnicQY6DRpHvbTK", "model_id": "bard:20230327", "question_id": 75, "text": "Dear [Customer Name],\n\nI am writing to apologize for the delay in your order. I know that you were expecting your order to arrive on [date], and I am truly sorry that it did not.\n\nThere was an issue with our shipping system that caused your order to be delayed. We have since fixed the issue and your order is now on its way to you. You should receive it within the next few days.\n\nI sincerely apologize for the inconvenience this has caused you. We value your business and we are committed to providing you with the best possible service.\n\nIf you have any questions or concerns, please do not hesitate to contact me.\n\nSincerely,\n[Your Name]", "metadata": {}} -{"answer_id": "XJLFjCr6vgQZjEXbaMjTa6", "model_id": "bard:20230327", "question_id": 76, "text": "Sure, here is a script for a YouTube video exploring the history and cultural significance of jazz:\n\n* * *\n\n**Intro**\n\nJazz is a music genre that originated in the African-American communities of New Orleans, Louisiana, in the late 19th and early 20th centuries. Since the 1920s Jazz Age, it has been recognized as a major form of musical expression in traditional and popular music.\n\nJazz is characterized by swing and blue notes, complex chords, call and response vocals, polyrhythms and improvisation. Jazz has roots in European harmony and African rhythmic rituals.\n\n**History of Jazz**\n\nJazz has its roots in the blues, which was a form of music that originated in the American South in the late 19th century. The blues was a way for African Americans to express their pain and suffering under slavery and segregation.\n\nJazz also has its roots in ragtime, which was a form of music that originated in the United States in the late 19th century. Ragtime was a fast-paced, syncopated style of music that was often played on the piano.\n\nJazz emerged in New Orleans in the early 20th century. The city was a melting pot of cultures, and jazz was a way for people from different backgrounds to come together and express themselves.\n\nJazz quickly spread to other parts of the United States and the world. In the 1920s, jazz became popular in Europe, and in the 1930s, it became popular in Latin America.\n\n**Cultural Significance of Jazz**\n\nJazz has had a profound impact on American culture. It has been used in film, television, and advertising. It has also been used in political protests and social movements.\n\nJazz has also been a major influence on other genres of music, such as rock and roll, hip hop, and soul music.\n\n**Conclusion**\n\nJazz is a living, breathing art form that has been around for over a century. It has evolved over time, but its core elements remain the same: swing, blue notes, complex chords, call and response vocals, polyrhythms, and improvisation.\n\nJazz is a music that is constantly being reinvented by new musicians. It is a music that is constantly being reinterpreted by new listeners. And it is a music that is constantly being celebrated by new generations of fans.\n\n* * *", "metadata": {}} -{"answer_id": "bz3rS9MRs9QmaLGg4A8a7B", "model_id": "bard:20230327", "question_id": 77, "text": "**Hawaii: A Cultural Experience of a Lifetime**\n\nHawaii is a state of the United States that is known for its beautiful beaches, lush rainforests, and stunning volcanoes. But what many people don't know is that Hawaii is also home to a rich and vibrant culture.\n\nIf you're planning a trip to Hawaii, be sure to add some cultural experiences to your itinerary. Here are a few suggestions:\n\n* Visit a luau: A luau is a traditional Hawaiian feast that includes food, music, and dancing. It's a great way to experience Hawaiian culture and cuisine.\n* Take a hula class: Hula is a traditional Hawaiian dance that tells stories through movement. It's a beautiful and graceful art form that anyone can learn.\n* Visit a museum: Hawaii has several museums that showcase the state's unique history and culture. These museums are a great place to learn about Hawaiian culture and history.\n* Go on a hike: Hawaii is home to some of the most beautiful hiking trails in the world. These hikes are a great way to experience the natural beauty of the islands.\n* Go snorkeling or scuba diving: The waters around Hawaii are home to a variety of marine life. Snorkeling or scuba diving is a great way to see this marine life up close.\n* Visit a coffee farm: Hawaii is home to some of the best coffee in the world. Visiting a coffee farm is a great way to learn about the coffee-making process and sample some of the local coffee.\n\nThese are just a few of the many cultural experiences that you can have in Hawaii. If you're looking for a truly unique and memorable vacation, be sure to add some cultural experiences to your itinerary.\n\nHere are some must-see attractions in Hawaii:\n\n* Haleakala National Park: Haleakala National Park is home to the dormant Haleakala volcano. The park is a great place to see the sunrise and hike through the rainforest.\n* The North Shore: The North Shore of Oahu is home to some of the most beautiful beaches in the world. The North Shore is also a great place to go surfing.\n* The Big Island of Hawaii: The Big Island of Hawaii is the largest of the Hawaiian Islands. The island is home to volcanoes, rainforests, and beaches.\n* Kauai: Kauai is the fourth largest of the Hawaiian Islands. Kauai is known for its beautiful scenery, including the Na Pali Coast.\n* Maui: Maui is the second largest of the Hawaiian Islands. Maui is known for its beaches, volcanoes, and rainforests.\n\nNo matter what you're interested in, Hawaii has something to offer. Whether you're looking for a relaxing beach vacation or an adventure-filled journey, Hawaii is the perfect destination.", "metadata": {}} -{"answer_id": "kCS2ZqzKTdDpaXwu3TiFXP", "model_id": "bard:20230327", "question_id": 78, "text": "Denis Villeneuve's Dune is a visually stunning and epic sci-fi film that is sure to please fans of the genre. The film is based on the classic novel of the same name by Frank Herbert, and it tells the story of Paul Atreides, a young man who is destined to become the leader of the Fremen, a desert people who have adapted to life on the harsh planet of Arrakis.\n\nThe film's plot is complex and well-developed, and the characters are well-acted and believable. Timoth\u00e9e Chalamet gives a particularly strong performance as Paul, and he is ably supported by a talented cast that includes Rebecca Ferguson, Oscar Isaac, Josh Brolin, Zendaya, and Jason Momoa.\n\nThe film's special effects are simply stunning. The sandworms of Arrakis are particularly impressive, and the film's desert scenes are incredibly realistic. The film's score by Hans Zimmer is also excellent, and it helps to create a truly immersive experience.\n\nOverall, Dune is a must-see for fans of sci-fi and epic films. It is a visually stunning and well-crafted film that is sure to please audiences.\n\nHere are some specific examples of the film's special effects:\n\n* The sandworms of Arrakis are incredibly realistic. They are massive creatures that can swallow entire buildings, and they are a terrifying sight to behold.\n* The desert scenes of Arrakis are incredibly realistic. The film's director, Denis Villeneuve, went to great lengths to capture the harsh beauty of the desert, and the results are stunning.\n* The film's score by Hans Zimmer is excellent. It helps to create a truly immersive experience and helps to set the tone for the film's epic scope.", "metadata": {}} -{"answer_id": "7WWWS5UhWcVzkhTypqcFU4", "model_id": "bard:20230327", "question_id": 79, "text": "Sure, here is a possible podcast script for an episode discussing the influence of streaming platforms on the music industry:\n\n**Intro:**\n\nHello and welcome to the podcast! In today's episode, we're going to be discussing the influence of streaming platforms on the music industry. Streaming platforms have become increasingly popular in recent years, and they've had a major impact on how people listen to music. In this episode, we'll discuss the pros and cons of streaming platforms, and how they're changing the music industry.\n\n**Body:**\n\nOne of the biggest benefits of streaming platforms is that they give people access to a wide variety of music. In the past, people were limited to the music that was available in their local stores or on the radio. But with streaming platforms, people can listen to anything they want, whenever they want. This has led to a more diverse and eclectic music scene, as people are exposed to new artists and genres that they might not have heard otherwise.\n\nAnother benefit of streaming platforms is that they're very convenient. You can listen to music on your phone, your computer, or your TV. You don't have to worry about carrying around CDs or MP3 players, and you can easily create playlists of your favorite songs. This convenience has made streaming platforms very popular with people of all ages.\n\nHowever, there are also some drawbacks to streaming platforms. One of the biggest problems is that they don't pay artists very much money. When you listen to a song on a streaming platform, the artist only gets a fraction of a penny. This makes it difficult for artists to make a living from their music, and it's one of the reasons why many artists are unhappy with streaming platforms.\n\nAnother problem with streaming platforms is that they can be addictive. It's easy to spend hours scrolling through your favorite songs, and this can lead to problems like sleep deprivation and anxiety. If you find yourself spending too much time on streaming platforms, it's important to take a step back and find other ways to relax and de-stress.\n\n**Conclusion:**\n\nOverall, streaming platforms have had a major impact on the music industry. They've made it easier for people to listen to music, but they've also made it more difficult for artists to make a living. It's important to be aware of the pros and cons of streaming platforms, so that you can make informed decisions about how you use them.\n\n**Outro:**\n\nThanks for listening to this episode of the podcast! We hope you found it informative and helpful. If you have any questions or comments, please feel free to leave them below. We'll see you next time!", "metadata": {}} -{"answer_id": "QcSFqbgqLWqn96V6BvWT7B", "model_id": "bard:20230327", "question_id": 80, "text": "The Boston Symphony Orchestra gave a stunning performance of Beethoven's Symphony No. 9 in D minor, Op. 125, \"Choral,\" on Saturday night at Symphony Hall. The orchestra was in top form, and the performance was a thrilling and moving experience for the audience.\n\nThe BSO opened the concert with a lively performance of Beethoven's Symphony No. 5 in C minor, Op. 67. The orchestra played with great energy and precision, and the performance was a perfect way to set the stage for the evening's main event.\n\nAfter intermission, the BSO launched into a powerful and moving performance of Beethoven's Symphony No. 9. The orchestra played with great passion and conviction, and the performance was a truly transcendent experience. The audience was on its feet at the end of the performance, cheering and applauding for minutes on end.\n\nThe BSO's performance of Beethoven's Symphony No. 9 was a truly memorable experience. The orchestra played with great skill and artistry, and the performance was a thrilling and moving experience for the audience. If you have the opportunity to see the BSO in concert, I highly recommend it.\n\nThe overall audience experience was excellent. The hall was packed with people, and the atmosphere was electric. The audience was clearly excited to be there, and they responded enthusiastically to the orchestra's performance. The concert was a great example of the power of music to bring people together and create a shared experience.", "metadata": {}} diff --git a/commune/modules/archive/fastchat/fastchat/eval/table/answer/answer_gpt35.jsonl b/commune/modules/archive/fastchat/fastchat/eval/table/answer/answer_gpt35.jsonl deleted file mode 100644 index 915947ff6..000000000 --- a/commune/modules/archive/fastchat/fastchat/eval/table/answer/answer_gpt35.jsonl +++ /dev/null @@ -1,80 +0,0 @@ -{"answer_id": "BZGowHM7L3RvtWRktKZjLT", "model_id": "gpt-3.5-turbo:20230327", "question_id": 1, "text": "Here are some tips to improve your time management skills:\n\n1. Create a schedule: Make a to-do list for the day, week or month and prioritize tasks by importance and deadline.\n\n2. Set realistic goals: Be realistic about what you can accomplish in a given amount of time. Set goals that can be achieved but are also challenging.\n\n3. Minimize distractions: Minimize distractions such as emails, phone calls, and social media notifications. Turn off notifications or use apps that block these distractions.\n\n4. Take breaks: Taking a break can help you recharge and increase productivity. Take a break every hour or so to stretch, go for a walk, or listen to some music.\n\n5. Learn to say 'no': Learn to say 'no' to tasks and requests that are not a priority or are not within your capabilities.\n\n6. Delegate tasks: If possible, delegate tasks to others who are capable of completing them. This will help to reduce your workload.\n\n7. Prioritize self-care: Make sure to take care of yourself by prioritizing self-care activities such as exercise, meditation or spending time with friends and family.\n\nRemember, time management is a skill that takes time and practice to develop.", "metadata": {}} -{"answer_id": "GsrPLfiNEcRDYAC5aUoY23", "model_id": "gpt-3.5-turbo:20230327", "question_id": 2, "text": "Here are some effective ways to deal with stress:\n\n1. Exercise regularly: Physical activity can help reduce stress and improve mood.\n\n2. Practice relaxation techniques: Deep breathing, meditation, and yoga are some effective ways to relax and reduce stress levels.\n\n3. Get enough sleep: Lack of sleep can increase stress levels. Try to get at least 7-8 hours of sleep each night.\n\n4. Eat a healthy diet: A well-balanced diet can help reduce stress levels and improve overall health.\n\n5. Connect with others: Spending time with friends and family, joining a support group, or participating in social activities can help reduce stress levels and improve mood.\n\n6. Time management: Prioritizing tasks and creating a schedule can help reduce stress and increase productivity.\n\n7. Seek professional help: If stress is impacting your daily life, consider seeking professional help such as therapy or counseling.", "metadata": {}} -{"answer_id": "5SGfexmPoJTWoVbPMB87KB", "model_id": "gpt-3.5-turbo:20230327", "question_id": 3, "text": "Python and JavaScript are both popular programming languages, but they differ in a number of ways:\n\n1. Programming paradigm: Python is primarily an object-oriented programming (OOP) language, while JavaScript is a multi-paradigm language that supports both functional and OOP styles.\n\n2. Syntax: Python uses indentation (whitespace) to indicate the structure of the code, while JavaScript uses curly braces.\n\n3. Typing: Python is a dynamically typed language, which means that the type of a variable is determined at runtime, while JavaScript is both dynamically and weakly typed, meaning that variable types can change at runtime and type errors may not be caught immediately.\n\n4. Application areas: Python is often used for scientific computing, data analysis, and machine learning, while JavaScript is most commonly used for web development and client-side scripting.\n\nOverall, both Python and JavaScript have their own strengths and weaknesses, and which language to use largely depends on the specific requirements of the project.", "metadata": {}} -{"answer_id": "RcqQg364kgCkYPshqvmGSp", "model_id": "gpt-3.5-turbo:20230327", "question_id": 4, "text": "Here are some tips that can help you increase your productivity while working from home:\n\n1. Create a dedicated workspace: Pick a place where you can work undisturbed and create an environment that resembles your office.\n\n2. Set a schedule: Establish a specific working schedule and stick to it as much as possible.\n\n3. Take breaks: Take short breaks throughout the day to recharge and refocus your energy.\n\n4. Eliminate distractions: Turn off your phone notifications and avoid any unnecessary distractions.\n\n5. Make a to-do list: Prioritize your tasks in order of importance and create a to-do list.\n\n6. Get dressed: Dressing as you would for work helps put you in the right mindset for productivity.\n\n7. Challenge yourself: Set small and achievable goals and push yourself to complete them.\n\n8. Stay positive: Maintain a positive attitude towards your work, and try to avoid negativity.\n\nWith these tips, you can stay productive and enjoy the benefits of working from home.", "metadata": {}} -{"answer_id": "3R9jAndzLHQqJwmMi5mNox", "model_id": "gpt-3.5-turbo:20230327", "question_id": 5, "text": "Certainly! Quantum computing is a type of computing that uses quantum-mechanical phenomena to process information. \n\nIn classical computing, a bit can exist in one of two states - 0 or 1. In quantum computing, a qubit can exist in a superposition of states, which means it can exist in a combination of 0 and 1 at the same time. This ability to exist in multiple states simultaneously allows for the processing of large amounts of information simultaneously and can significantly speed up certain types of computations.\n\nQuantum computers use a series of quantum gates - which are similar to the logic gates used in classical computers - to manipulate the state of the qubits, allowing for the computations to be completed. There are several different technologies used to build quantum computers, including superconducting quantum processors, trapped-ion quantum processors, and photonic quantum processors.\n\nOne of the most important applications of quantum computing is in solving problems that are intractable for classical computers. For example, quantum computers can be used to factor large numbers, which is the basis for many encryption protocols - leading to exciting possibilities in code-breaking, cryptography, and cybersecurity in the future.", "metadata": {}} -{"answer_id": "Uw8SaYLPMGR5sdV9FDx8pb", "model_id": "gpt-3.5-turbo:20230327", "question_id": 6, "text": "Plant-based protein sources are derived from plants, including legumes, nuts, seeds, and whole grains. They tend to be lower in saturated fat, higher in fiber and several micronutrients, and have less of an impact on the environment when compared to animal-based protein sources.\n\nAnimal-based protein sources are derived from animals, including meat, dairy, and eggs. They tend to be higher in saturated fat and cholesterol, which can increase the risk of heart disease and other health problems when consumed in excess. They are also associated with a higher environmental impact.\n\nHowever, animal-based protein sources tend to be more readily absorbed and complete (containing all essential amino acids), making them a good choice for those with higher protein needs, such as athletes or individuals who have difficulty meeting their protein requirements through plant-based sources alone.", "metadata": {}} -{"answer_id": "53gmokt2KBgKu6NMPopxcu", "model_id": "gpt-3.5-turbo:20230327", "question_id": 7, "text": "Sure, here are a few practical tips to develop critical thinking skills:\n\n1. Ask questions: Don't simply accept information at face value. Ask questions that allow you to evaluate the reason, perspective and logic behind the information.\n\n2. Analyze: Take the time to analyze and evaluate the information that you receive. This will help you to identify assumptions, biases, strengths and weaknesses of an argument.\n\n3. Consider different perspectives: When analyzing information, try to consider different perspectives to gain a wider understanding of any topic.\n\n4. Research and read extensively: Read a wide range of sources to get a better understanding of different viewpoints and opinions.\n\n5. Practice logic and reasoning: Practice thinking through problems logically by breaking them down into smaller parts.\n\n6. Establish a point of view: Consider developing an opinion or thesis statement, and then collect evidence to support that position.\n\n7. Reflect on your own thinking: Consider how you arrived at a conclusion, and whether you have any biases or assumptions that may have influenced your thinking.\n\nWith time, and practicing these skills consistently, you can become a better critical thinker.", "metadata": {}} -{"answer_id": "bKrbrGsN7zjKLvBk2h3tvo", "model_id": "gpt-3.5-turbo:20230327", "question_id": 8, "text": "There are several major challenges faced by the education sector today, including:\n\n1. Access to Education: Many children, particularly those in developing countries, do not have access to quality education.\n\n2. Funding: Education is expensive, and many governments struggle to find the necessary funding to provide quality education to all students.\n\n3. Teacher Shortages: Many countries face a shortage of qualified teachers, which can negatively impact the quality of education.\n\n4. Technological Integration: Many schools struggle to effectively integrate technology into their classrooms, which can limit student learning and engagement.\n\n5. Student Engagement and Motivation: Getting students to engage with their learning and stay motivated can be a significant challenge for educators.\n\n6. Student Diversity: Students come from diverse backgrounds and have different learning styles, making it difficult for educators to provide a one-size-fits-all approach to teaching.\n\n7. Assessment and Evaluation: Measuring student learning and progress can be a challenge, particularly in non-standardized environments where students are assessed based on different criteria.", "metadata": {}} -{"answer_id": "HEGL3aPUnNrdNtNt3XLDKi", "model_id": "gpt-3.5-turbo:20230327", "question_id": 9, "text": "There are several primary factors that influence consumer behavior, including:\n\n1. Personal factors - These include individual characteristics such as age, gender, income, education, personality, and lifestyle.\n\n2. Psychological factors - These include the consumer's perception of the product or service, motives, attitudes, beliefs, and values.\n\n3. Social factors - These include the consumer's family, friends, social class, culture, and reference groups.\n\n4. Situational factors - These include the physical environment, time, and mood of the consumer.\n\n5. Marketing mix factors - These include product, price, promotion, and distribution, and how they affect the consumer's decision-making process.", "metadata": {}} -{"answer_id": "W9zpMVa2cJUJW8B2uGMCJy", "model_id": "gpt-3.5-turbo:20230327", "question_id": 10, "text": "Here are some effective strategies for conflict resolution in the workplace:\n\n1. Listen actively: Allow both parties to express their perspective without interrupting or judging them.\n\n2. Find common ground: Look for areas of agreement and shared goals between the parties.\n\n3. Identify the root cause: Determine the underlying issue that led to the conflict.\n\n4. Brainstorm solutions: Collaborate with both parties to come up with a solution that addresses their needs and interests.\n\n5. Communicate clearly: Make sure that everyone involved understands the resolution and the steps needed to move forward.\n\n6. Follow up: Check in with both parties to determine if the resolution worked and if any adjustments need to be made.\n\n7. Seek outside help if necessary: If the conflict is too complex, it may be necessary to involve a mediator or other neutral third party to facilitate the resolution process.", "metadata": {}} -{"answer_id": "LacdmoweqKYGzt3aMBYjEa", "model_id": "gpt-3.5-turbo:20230327", "question_id": 11, "text": "Using a single-use plastic bottle has negative implications on both the environment and human health. Here are some of the potential consequences:\n\n1. Environmental Implications: Single-use plastic bottles contribute to plastic pollution in the environment. These bottles take thousands of years to decompose, and they pose a severe threat to wildlife, especially marine life. They often end up in oceans, rivers, and other water bodies, where they harm marine animals by entangling them or being ingested. It also contributes to the depletion of natural resources, as these bottles are typically made from non-renewable fossil fuels.\n\n2. Human Health Implications: There are potential health implications for both consumers and workers involved in the production and disposal of single-use plastic bottles. The production of these bottles releases toxic chemicals that can harm the environment and the workers involved. Ingesting plastic particles can also affect human health, resulting in hormonal imbalances and other negative health impacts.\n\nIn contrast, using a reusable bottle has several positive implications:\n\n1. Environmental Benefits: Reusable bottles can reduce plastic waste and help conserve natural resources. They reduce the number of disposable bottles that end up in landfills and oceans, helping reduce plastic pollution.\n\n2. Health Benefits: Using a reusable bottle helps to reduce the exposure to harmful chemicals associated with plastic production, as well as reducing the potential health impact of ingesting plastic particles.\n\nOverall, using a reusable bottle is better for the environment, wildlife, and human health. It is a more sustainable and responsible choice that supports efforts to conserve natural resources and reduce plastic waste.", "metadata": {}} -{"answer_id": "JqVreebbPuNdjw8E8K4Ssf", "model_id": "gpt-3.5-turbo:20230327", "question_id": 12, "text": "There are several factors that should be considered when designing an inclusive and accessible public transportation system, including:\n\n1. Physical accessibility: ensuring that transportation infrastructure, such as bus stops or train stations, are physically accessible to people with disabilities or mobility impairments, with features such as ramps, elevators, and tactile paving.\n\n2. Signage and wayfinding: providing clear, visible and easy-to-read signs, maps, and audio announcements that help passengers navigate the system and find their way around.\n\n3. Sensory inclusivity: designing the system with the needs of passengers who have sensory sensitivities or disabilities in mind, such as minimizing noise levels from trains or buses and providing calming spaces.\n\n4. Universal Design: adopting universal design principles and practices, making sure that the system is designed to be user-friendly for all, regardless of ability or disability.\n\n5. Employee training and awareness: ensuring that staff, including drivers, are properly trained and have the necessary knowledge to assist passengers with disabilities or other accessibility needs.\n\nOther factors to consider may include affordability, availability, reliability, and safety in order to provide a comprehensive and inclusive transportation system for all.", "metadata": {}} -{"answer_id": "hEMThhsN85Ud5X8xBv9BZJ", "model_id": "gpt-3.5-turbo:20230327", "question_id": 13, "text": "Governments typically use a combination of fiscal and monetary policies to combat economic recessions.\n\n1. Fiscal policies: \n\nFiscal policies refer to the government's decisions related to taxes and government spending. In a recession, governments can use fiscal policies to boost economic activity by:\n\n- Increasing government spending: Governments can increase spending on infrastructure projects, social welfare programs, and other public projects to create jobs and stimulate economic activity.\n\n- Cutting taxes: Reducing taxes, particularly for low- and middle-income households, can boost consumer spending and encourage businesses to invest and create new jobs.\n\n- Providing subsidies: Governments can provide subsidies to businesses and industries in order to encourage investment and growth. \n\n2. Monetary policies: \n\nMonetary policies refer to the management of the money supply and interest rates. In a recession, governments can use monetary policies to stimulate economic activity by:\n\n- Lowering interest rates: Lower interest rates can make borrowing cheaper and businesses may be more likely to invest in new projects and hire workers.\n\n- Quantitative easing: Central banks can engage in quantitative easing by purchasing government bonds or other assets, which can increase the money supply and stimulate economic activity.\n\nOverall, the goal of both fiscal and monetary policies is to inject demand into the economy and get people and businesses spending and investing again in order to help lift the economy out of recession.", "metadata": {}} -{"answer_id": "BvFV7sx53PAK5bNn89urFs", "model_id": "gpt-3.5-turbo:20230327", "question_id": 14, "text": "Language and cultural barriers can have a significant impact on communication and relationships in multicultural societies. \n\nIn terms of language barriers, individuals who do not share a common language may find it difficult to communicate effectively. This can lead to misunderstandings, miscommunications, and frustrations in trying to convey their thoughts and feelings. Language barriers can also make it challenging to establish trust and build strong relationships, as people may feel hesitant to share personal information or express their opinions if they are unable to do so fluently in a shared language.\n\nCultural barriers can also play a role in shaping communication and relationships. People from different cultural backgrounds may have different beliefs, values, and norms around topics such as social interaction, personal space, and nonverbal communication, among others. These cultural differences can create confusion or misunderstandings that can impact communication and relationships.\n\nHowever, it's important to note that language and cultural barriers do not need to be insurmountable obstacles. Through efforts such as language classes, cultural exchange programs, and sensitivity training, individuals can learn to navigate these differences and build stronger relationships across cultures.", "metadata": {}} -{"answer_id": "dM5GHbLuPNfzUbBnJz6w7K", "model_id": "gpt-3.5-turbo:20230327", "question_id": 15, "text": "Sure, here is one possible scenario:\n\nImagine a hospital that uses AI-powered chatbots to assist patients with their medical inquiries quickly and accurately. When patients arrive at the hospital for appointments, they could interact with the chatbot on their smartphones or on a tablet provided in the waiting area.\n\nThe chatbot could ask patients a series of questions to understand their symptoms or medical condition. It could then provide basic medical advice, schedule a doctor's appointment, order lab tests or prescription refills, or refer patients to specialists as required.\n\nBy using AI, the hospital could improve the quality and efficiency of healthcare delivery in several ways. The chatbot could triage patients based on their symptoms and urgency, reducing the burden on doctors and nurses to answer basic inquiries. This would free up medical staff to focus on more complex cases, leading to improved patient outcomes and satisfaction.\n\nMoreover, by automating routine tasks such as scheduling appointments, ordering lab tests or refilling prescriptions, hospitals could improve their operational efficiency and reduce costs. This would lead to better resource allocation, less waiting time for patients, and ultimately a more satisfied patient population.\n\nOverall, by leveraging AI for patient triage and routine tasks, care providers could offer more targeted and personalized care, while reducing the burden on medical staff and improving the efficiency of healthcare delivery.", "metadata": {}} -{"answer_id": "BX7maaP5kGY6bBTLJRwkit", "model_id": "gpt-3.5-turbo:20230327", "question_id": 16, "text": "Gene editing is a process of changing the DNA of an organism in order to alter its traits. CRISPR-Cas9 is a gene editing technology that has revolutionized the field of genetic engineering. In this technique, the Cas9 enzyme is guided by a molecule called guide RNA (gRNA), which is programmed to recognize and cut specific sequences of DNA. This cut then triggers the cell\u2019s natural DNA repair mechanism, leading to the addition, deletion, or replacement of specific DNA sequences.\n\nCRISPR-Cas9 has a wide range of potential applications, including the ability to cure genetic diseases, develop new treatments for cancer, and create new crop varieties with improved yield and resistance to pests and disease. It can also be used to enhance human traits, such as intelligence or physical performance, which raises ethical concerns about the potential for eugenics.\n\nOne of the main ethical concerns associated with CRISPR-Cas9 is the risk of unintended consequences. Gene editing can have unforeseeable impacts on the environment and future generations, which could lead to potentially harmful and irreversible effects. There are also concerns about the potential misuse of gene editing technology for unethical purposes, such as creating designer babies or enhancing traits for non-therapeutic purposes.\n\nAnother ethical consideration is the issue of access and equity. CRISPR-Cas9 is an expensive technology, and its use could widen the gap between the haves and the have-nots, with potentially devastating consequences for global health and social justice.\n\nIn conclusion, while the potential benefits of CRISPR-Cas9 are immense, its ethical implications are complex and multifaceted. Careful consideration and regulation of its use will be required to maximize its benefits, while minimizing its potential risks and harms.", "metadata": {}} -{"answer_id": "STuX8oc7Gu3SN6EWzwpUpp", "model_id": "gpt-3.5-turbo:20230327", "question_id": 17, "text": "Vaccinations work by introducing a weakened or deactivated form of a virus or bacteria into the body, which triggers the immune system to create a defense against it. This defense, in the form of antibodies, allows the immune system to recognize and destroy the actual virus or bacteria if a person is exposed to it later on. By receiving the vaccine, individuals become immune to the disease without having to experience its symptoms.\n\nWhen a large percentage of a population is vaccinated against a particular disease, it becomes difficult for the disease to spread because there are fewer people who can catch and transmit it. This concept is called herd immunity. Herd immunity protects individuals who cannot receive vaccines, such as those with weakened immune systems, and those for whom vaccines are not effective. It also helps prevent the spread of infectious diseases in the community at large, making outbreaks less likely to occur.", "metadata": {}} -{"answer_id": "TFUUXWS7yn2u2b4n7eM3ZB", "model_id": "gpt-3.5-turbo:20230327", "question_id": 18, "text": "Social media platforms play a significant role in the way people consume and share news. They have revolutionized the distribution of news by providing instant access to real-time happenings around the world. Social media platforms promote the democratization of information, providing a platform where everyone can share their opinions, thoughts, and ideas.\n\nHowever, the downside to social media platforms is the implications they create for the spread of misinformation. Since everyone can share information, it is often challenging to verify the accuracy and relevance of the information presented online. It's also difficult for readers to identify credible sources of information and, thus, the authenticity of the news.\n\nThe spread of misinformation has grave implications for society, as it can significantly impact public opinion, harm reputations, and even incite violence. As a result, social media platforms have a responsibility to control the spread of false information and misinformation.\n\nConclusion\n\nIn conclusion, social media platforms play a critical role in the distribution of news. While the democratization of information is a fantastic feat, the spread of misinformation is an on-going challenge, inhibiting the accurate distribution of news. Social media platforms must take accountability and act to address the issue of misinformation by creating control mechanisms to contain the problem.", "metadata": {}} -{"answer_id": "3yRq2XXPi83H7Rr5SZS9rE", "model_id": "gpt-3.5-turbo:20230327", "question_id": 19, "text": "Cultural, social, and economic factors are among the most important factors that influence people's food choices. Cultural factors refer to the customs, beliefs, and traditions that people have when it comes to the foods they eat. Social factors include peer pressure, advertising, and media. Economic factors include cost, availability, and accessibility.\n\nFor example, some cultures emphasize the consumption of large amounts of meat and animal products as a sign of wealth and prosperity. In such circumstances, it can be challenging to promote healthier diets that focus on plant-based sources of protein.\n\nSimilarly, social factors like advertising can influence people's food choices by promoting unhealthy food options that are high in sugar, salt, and fat. These foods tend to be cheap and readily available, making them an attractive option for people who are cost-conscious.\n\nFinally, economic factors affect food choices by limiting access to healthy foods in certain areas or pricing them out of reach for many people.\n\nTo promote healthier diets, it is crucial to understand these cultural, social, and economic factors and address them through education, policy changes, and community-based interventions. For example, raising awareness of the benefits of a plant-based diet, imposing taxes on unhealthy foods, and increasing funding for community gardens and farmers' markets can help encourage healthier food choices. Additionally, working with local businesses and restaurants to provide healthier, affordable meal options can also have a significant impact on improving population health.", "metadata": {}} -{"answer_id": "Sw34dAwQPCfGGotwRwhvtv", "model_id": "gpt-3.5-turbo:20230327", "question_id": 20, "text": "Sure, here\u2019s an explanation:\n\nNatural selection is the process through which certain traits or characteristics become more common in a population over time, because individuals with those traits are more likely to survive and reproduce. This process contributes to the evolution and adaptation of species.\n\nHere\u2019s how it works: Within a population, there is genetic variation \u2013 some individuals will have different traits, such as size, color, or behavior. In environments with limited resources or where there are predators, individuals with certain advantageous traits are better suited to survive, find resources, and avoid danger. These individuals are more likely to reproduce and pass on their advantageous traits to their offspring. Over time, this process results in the spread and eventual dominance of those advantageous traits in the population. This is the essence of natural selection.\n\nAs a result, natural selection is an important driver of evolution and adaptation of species to their environments. Through natural selection, advantageous traits become more common and disadvantageous ones become less common, leading to the emergence of new species or the modification of existing ones.", "metadata": {}} -{"answer_id": "cZw4Jw8Zyz6ZUy4WDsC6ta", "model_id": "gpt-3.5-turbo:20230327", "question_id": 21, "text": "Greetings, noble lords and ladies. I am Sir [Name], a humble knight in service to this great kingdom. It is an honor to be in the presence of such esteemed company, and I am grateful for the opportunity to share in this feast with all of you. I have served my lord with valor and courage, and I pledge to continue to defend and protect this land with my life and sword. May our cups overflow and our merriment be endless on this joyous occasion.", "metadata": {}} -{"answer_id": "nj9y2HTWFGsD5B278ozm73", "model_id": "gpt-3.5-turbo:20230327", "question_id": 22, "text": "Ahoy, my fellow pirates! As we set sail in search of treasure, let us remember the thrill of adventure and the rewards that await us. There are riches beyond our wildest dreams waiting to be discovered, and we are the brave souls who will claim them. Each of us has a role to play in this mission, whether it be navigating, scouting, or fighting. Together, we will overcome any obstacle in our path and emerge victorious, with treasure beyond measure. So, let us hoist the sails and set course for glory!", "metadata": {}} -{"answer_id": "Ep9rLcNzEGhG7AgbLTpRtm", "model_id": "gpt-3.5-turbo:20230327", "question_id": 23, "text": "Oh, my dearest love, my heart doth beat\n\nWith every step thou tak'st, my soul doth meet\n\nA sudden warmth, a new and fiery thrust\n\nThat fills my being with an ardent lust.\n\nThou art the sun that lights my every day\n\nThe moon that guides me on my nightly way\n\nThou art the very air that I do breathe\n\nThou art the one for whom my heart doth cleave.\n\nSo hear me now, ye gods above\n\nAnd witness to this heartening love\n\nThat I do feel, with all my heart\n\nFor her, my love, the fairest art.", "metadata": {}} -{"answer_id": "oNULT72cYUvit7D9SHb5aM", "model_id": "gpt-3.5-turbo:20230327", "question_id": 24, "text": "Well, little one, my origin story begins with an ordinary person like you or me. One day, something extraordinary happened that changed my life forever. For me, it was an accident that infused me with amazing powers.\n\nAt first, I wasn't sure what to do with my new abilities, but I knew I wanted to use them to help people. So, I put on a costume and became a superhero. Every day, I use my powers to fight crime, protect the innocent, and make the world a better place.\n\nBut the most important thing to remember is that anyone can be a hero, even if they don't have powers. All you need is the courage to do what's right and help those in need.", "metadata": {}} -{"answer_id": "TX86xjPKTk2UxWwV4e8zRK", "model_id": "gpt-3.5-turbo:20230327", "question_id": 25, "text": "As a time traveler from the year 3000, I would tell people about the following technological advancements:\n\n1. Advanced Artificial Intelligence: In the future, AI is so advanced that it can completely automate many jobs that humans currently do. This has resulted in increased productivity and efficiency across many industries.\n\n2. Quantum Computing: Quantum computers are used extensively in the future and have led to huge advancements in fields like physics, chemistry, and cryptography.\n\n3. Space Travel: With new propulsion systems and faster-than-light travel, space travel is much faster and more accessible to the general public. We have even established a permanent settlement on Mars, and plans are underway for further exploration and colonization of the galaxy.\n\n4. Renewable Energy: Energy production has shifted primarily to renewable sources like solar and wind, with new breakthroughs in energy storage and distribution making it easier to power societies without relying on fossil fuels.\n\n5. Medical Advancements: With gene editing and advanced prosthetics, people are able to live much longer and healthier lives. Diseases like cancer and Alzheimer's have been largely eradicated, and there are even experimental treatments that can reverse the aging process.\n\nThese technological advancements have transformed society in countless ways, and I believe they will continue to shape the course of human history for years to come.", "metadata": {}} -{"answer_id": "e5YFb6PojDThkcZdSH8kpC", "model_id": "gpt-3.5-turbo:20230327", "question_id": 26, "text": "It's the final moments of the championship game and the score is tied. The tension in the arena is palpable as both teams desperately try to secure the victory.\n\nWith just seconds left on the clock, the ball is in the hands of the star player. He looks up and sees an opening in the defense.\n\nHe makes a quick cut to the right and drives towards the basket. The defense tries to keep up, but he's too quick. \n\nAs he jumps into the air, he releases the ball with perfect form. It floats through the air, seemingly in slow motion, before finding its way into the basket.\n\nThe crowd erupts in cheers as the buzzer sounds, signaling the end of the game. The team rushes onto the court, hugging and high-fiving each other in celebration of their victory.\n\nThe winning shot will go down in history as one of the greatest moments in sports, and the championship trophy will be proudly displayed in their locker room for years to come.", "metadata": {}} -{"answer_id": "NnkbyoNFRHay4toKTFfaeE", "model_id": "gpt-3.5-turbo:20230327", "question_id": 27, "text": "Welcome, esteemed judges. I'm excited to present my signature dish, which has been inspired by my travels and memories from my childhood. This dish is a fusion of flavors and ingredients that create a unique and unforgettable experience for your taste buds.\n\nMy dish starts with a perfectly seared and succulent piece of protein - tender Wagyu beef, which has been marinated with a special blend of spices, and cooked to medium-rare perfection. The beef is then served on a bed of exotic mushrooms, braised in a rich red wine sauce, with hints of tangy tomato and sweet caramelized onions.\n\nBut that's not all - to balance out the richness of the beef and the savory flavors of the sauce, I've added a bright and zesty salad, which features Japanese seaweed, picked sesame seeds, and a delicate yuzu dressing. And finally, to bring it all together, a dash of my secret spice blend, which ties all the flavors together.\n\nThis dish is like a journey that takes you from the earthy flavors of the beef and mushrooms to the vibrant freshness of the salad, and leaves you with a satisfying warmth and lingering sweetness from the savory wine sauce. Judges, I hope you enjoy my signature dish as much as I enjoyed creating it!", "metadata": {}} -{"answer_id": "Gpb8MuNU3Pt7k93dpRN9WM", "model_id": "gpt-3.5-turbo:20230327", "question_id": 28, "text": "I'm sorry, but as an AI language model, I am not capable of reaching the summit of a mountain, experiencing emotions or seeing the view from the top. However, I can provide details about the summit of Mount Everest based on information available to me. \n\nStanding at the summit of Mount Everest, the highest peak in the world at an elevation of 29,029 feet, would be an incredible feat for any climber. The ascent to the top of this majestic mountain is challenging, dangerous and requires physical and mental preparation. The weather on the summit is unpredictable, and the oxygen is thin, making it difficult to breathe. But with that being said, the view from the summit is said to be breathtaking, with a panoramic view of the Himalayas and the surrounding landscape that stretches out for miles below. The sense of accomplishment and pride in achieving such a difficult climb would undoubtedly be an emotional experience for any mountain climber.", "metadata": {}} -{"answer_id": "SYvkCCHBUZPd9DQuidZM8K", "model_id": "gpt-3.5-turbo:20230327", "question_id": 29, "text": "As a space colonist on Mars, my daily life would be vastly different from life on Earth. Some of the main challenges I would face living on another planet are:\n\n1. Limited Resources: One of the primary challenges of living on Mars would be the scarcity of resources such as food, water, and oxygen. As a Mars colonist, I would have to rely on recycling and sustainable practices to make the most of the resources available.\n\n2. Communication Delays: As Mars is quite far from Earth, communication delays between the two planets would be significant. This would make real-time communication difficult, and we would have to rely on automated systems or pre-planned communication schedules.\n\n3. Extreme Environment: The Martian environment is harsh and hostile, with high levels of radiation, extreme temperature fluctuations, and violent dust storms. Living in such an environment would require advanced protective gear and equipment, as well as being always prepared to handle emergency situations.\n\n4. Monotony: Mars would not have the diverse range of plant and animal life we have on Earth. The monotony of the daily routine, the lack of variety in the environment, and the isolation could lead to increased stress levels and mental health issues.\n\nDespite the many challenges, living on Mars could also be exhilarating and full of opportunities to explore and study a new world. My daily life would include conducting scientific experiments, studying the Martian landscape, maintaining the habitats and equipment, and possibly even going on spacewalks or exploring the planet's surface. We would have to work together as a close-knit team to ensure our survival, and our work would contribute to a better understanding of our universe and our place in it.", "metadata": {}} -{"answer_id": "NjdsG8tYfrHMT5zGZPavk6", "model_id": "gpt-3.5-turbo:20230327", "question_id": 30, "text": "I am a survivor in a post-apocalyptic world, where resources are limited and danger lurks around every corner. To survive, I rely on my cunning, resourcefulness, and connections.\n\nMy first ally was a fellow survivor named Jake. We met fortuitously when we both stumbled upon the same abandoned warehouse, both seeking shelter for the night. After a long and tense standoff, we decided to share the space and split the resources we had scrounged up. We quickly became friends, relying on each other for survival and companionship.\n\nAs we made our way through the wasteland, we met other survivors, some friendly and some not. I learned that trust was a luxury I couldn't afford, and that my instincts would have to guide me in all my interactions.\n\nOne of our most valuable allies was a woman named Eva, who had managed to survive on her own for years. She taught us how to scavenge for food, how to make weapons out of scraps, and how to avoid danger on the road.\n\nThough our group often faced challenges and setbacks, we persisted, relying on each other for survival in a world where simple existence was a triumph. And as we journeyed together, we learned that the bonds forged in such extreme circumstances were unbreakable, and that together we had a fighting chance.", "metadata": {}} -{"answer_id": "8eovAhyvrKJEMWiVdYzByH", "model_id": "gpt-3.5-turbo:20230327", "question_id": 31, "text": "There are a few ways to determine if a restaurant is popular among locals or mainly attracts tourists:\n\n1. Look for reviews online from local residents. If a restaurant has a high number of reviews from locals and they are generally positive, it's a good indication that it's popular among residents.\n\n2. Observe the clientele while visiting the restaurant. If you notice a lot of tourists, especially those who are carrying guidebooks and taking photos of the food, it's likely that the restaurant is more of a tourist destination.\n\n3. Talk to the staff if possible. They may be able to give you some insight into the restaurant's clientele and whether they tend to be more locals or tourists.\n\nKnowing whether a restaurant is popular among locals or mainly attracts tourists can be useful for a few reasons. If you are a tourist, you may be looking for a more authentic local experience and want to avoid restaurants that are primarily geared towards tourists. On the other hand, if you are a local resident, you may be interested in trying out new restaurants that are popular among your peers. Additionally, if you are a business owner looking to open a restaurant, this information can be helpful in determining the restaurant's target market and marketing strategy.", "metadata": {}} -{"answer_id": "nvyaGEveLWBaxgXzriB93d", "model_id": "gpt-3.5-turbo:20230327", "question_id": 32, "text": "Here are some subtle clues you can look for to identify when someone is pretending to understand a topic or conversation:\n\n1. They use vague language or buzzwords to mask their lack of knowledge.\n2. They repeat what others have said without adding anything new or asking any questions.\n3. They nod their head or make other nonverbal cues to suggest they are following along, but don\u2019t actively participate.\n4. They avoid eye contact or become defensive when asked to expand on their thoughts or clarify their understanding.\n5. They don\u2019t ask any questions or seek clarification when they don\u2019t understand something.\n6. They use filler phrases such as \u201cyeah\u201d or \u201cokay\u201d in response to questions or statements without contributing anything of value.\n7. They change the subject abruptly or avoid engaging in the conversation altogether.", "metadata": {}} -{"answer_id": "3xU2t6Yvx9EWpqfqvinNfH", "model_id": "gpt-3.5-turbo:20230327", "question_id": 33, "text": "There are a few reasons why someone might choose to use a paper map or ask for directions instead of relying on a GPS device or smartphone app. These reasons include:\n\n1. GPS devices and smartphone apps require a source of power, and therefore, can run out of battery. Paper maps, on the other hand, do not require any power and can be used at any time.\n\n2. Sometimes, GPS devices or smartphone apps can provide incorrect information or lose signal. In these cases, paper maps or directions from locals might be more reliable.\n\n3. Reading a paper map can help you get a better sense of the geography and the layout of the area you are traveling through. This can help you identify landmarks or other important features that may not be visible on a digital map.\n\n4. Some people prefer the tactile experience of using a paper map and finding their way through an area using their own navigation skills.\n\n5. Finally, trusting your own sense of direction and intuition can be a rewarding experience, and can help you build a better understanding of the area you are traveling through.", "metadata": {}} -{"answer_id": "Mq6hzNziUxzQ2juPMDrv3h", "model_id": "gpt-3.5-turbo:20230327", "question_id": 34, "text": "There are several ways to determine if a person is genuinely interested in a conversation or simply being polite:\n\n1. Body language - If the person is genuinely interested in the conversation, they will usually lean in slightly, make eye contact, and use open and relaxed body language.\n\n2. Active listening - If the person is actively listening to what you're saying, asking questions and responding with their own thoughts, then they are probably genuinely interested in the conversation.\n\n3. Follow-up - If the person follows up with you after the conversation or remembers details from your conversation in future interactions, then they were likely genuinely interested.\n\n4. Timing - If the person is in a rush to end the conversation or seems distracted, they may be polite and trying to end the conversation quickly.\n\nOverall, it's important to pay attention to both verbal and nonverbal cues to determine if someone is genuinely interested in a conversation or simply being polite.", "metadata": {}} -{"answer_id": "KU6BNNN8d6MLHyrA8nV4DB", "model_id": "gpt-3.5-turbo:20230327", "question_id": 35, "text": "There are several reasons why someone may prefer to shop at a small, locally-owned business instead of a large chain store, even if the prices are higher:\n\n1. Personalized service: Local businesses often offer personalized service because they know their customers by name and can offer individualized recommendations and support.\n\n2. Supporting the local economy: Shopping at locally-owned businesses helps to keep money in the local economy and supports job creation in the community.\n\n3. Unique products: Independent businesses often offer unique, one-of-a-kind products that cannot be found at larger chain stores.\n\n4. Environmental impact: Small businesses tend to have a lower carbon footprint as they source their products and materials locally, reducing transportation and shipping emissions.\n\n5. Community investment: Local businesses are often more invested in their community and are more likely to donate to local charities and civic organizations, further supporting the local community.", "metadata": {}} -{"answer_id": "RpHbPLJamuknRRa3xU5bUF", "model_id": "gpt-3.5-turbo:20230327", "question_id": 36, "text": "As an AI language model, I can suggest that there are several factors to consider when assessing the credibility of a source of information:\n\n1. Check the date: Make sure the information is up-to-date and relevant to the topic you're researching.\n\n2. Identify the author: Look for information about the author\u2019s credentials, expertise, and affiliation. Conduct a quick search on the author to see if they have any biases, conflicts of interest, or a history of producing misleading or inaccurate content.\n\n3. Evaluate the source: Determine where the information is coming from. Check the domain name or URL extension to see if it\u2019s affiliated with a reputable organization or institution. Avoid sites with questionable or overly sensational headlines.\n\n4. Verify the information: Verify the information by fact-checking it using reputable sources like academic journals, government websites, or reliable news sources.\n\n5. Evaluate the tone: Evaluate the tone of the article or post. Is it objective, balanced, and free of inflammatory language? Be wary of articles that rely heavily on emotional language or use sensationalized claims to try to persuade you to take action.\n\nTaking a critical approach and triangulating information from multiple sources can help ensure that you have a reliable and accurate understanding of the subject matter.", "metadata": {}} -{"answer_id": "AFR3AJW4sSPLDLiAUvrL8s", "model_id": "gpt-3.5-turbo:20230327", "question_id": 37, "text": "There is no one definitive answer to this question, as individual preferences for fear-inducing experiences are rooted in a variety of factors, including personality traits, past experiences, and cultural backgrounds. Some people may enjoy the thrill of being scared because it provides an adrenaline rush and a feeling of excitement, while others may find it cathartic or empowering to confront their fears in a safe environment. On the other hand, some people may avoid scary experiences because they find them stressful or anxiety-provoking, or because they have had negative experiences with fear in the past. Ultimately, each person's response to fear-inducing stimuli is unique, and depends on a variety of psychological and physiological factors.", "metadata": {}} -{"answer_id": "esqiBYHa56ygcPU2ux2Pdx", "model_id": "gpt-3.5-turbo:20230327", "question_id": 38, "text": "Observing the behavior of other people in a social situation can provide clues about cultural norms and expectations in various ways. For example:\n\n1. Dress codes: Observing how people dress in a particular social situation can provide insight into what is considered appropriate or expected attire. For instance, if people are dressed conservatively, it may indicate that this is the norm for that culture.\n\n2. Social hierarchy: Observing how people interact with each other can provide clues about the social hierarchy or power dynamics within a culture. For instance, if people defer to older individuals or those in positions of authority, it may indicate that respect for elders and hierarchy are important cultural values.\n\n3. Communication styles: Observing how people communicate with each other can provide clues about their communication styles and what is considered appropriate behavior in that culture. For instance, if people speak softly and avoid eye contact, it may indicate that the culture values modesty and avoiding confrontation.\n\n4. Customs and traditions: Observing how people celebrate holidays or special occasions can provide insight into cultural customs and traditions. For instance, if people exchange gifts or participate in specific rituals, it may indicate that these are important cultural practices.\n\nOverall, by paying attention to social cues and observing the behavior of other people in cultural contexts, one can gain insight into the norms and expectations of that culture.", "metadata": {}} -{"answer_id": "NmuuKUipqt62QKuEHCuBWh", "model_id": "gpt-3.5-turbo:20230327", "question_id": 39, "text": "This is a complex question that requires a thoughtful and nuanced answer. There are arguments to be made on both sides, and ultimately, it comes down to personal values and priorities.\n\nOn one hand, exploring space is a noble and inspiring goal that has the potential to expand our understanding of the universe and our place in it. It could also bring about new advancements in technology and inspire young people to pursue careers in science and engineering.\n\nOn the other hand, there are many urgent problems facing our planet that require immediate attention and resources, including climate change, poverty, inequality, and access to healthcare and education. Some argue that we should prioritize solving these issues before committing resources to space exploration.\n\nUltimately, it's up to individuals, governments, and societies to decide how to balance these competing priorities. It is possible to do both - to invest in space exploration while also addressing pressing problems on Earth.", "metadata": {}} -{"answer_id": "3HypDqXt6tHieMDN7hWYCh", "model_id": "gpt-3.5-turbo:20230327", "question_id": 40, "text": "It's important to strike a balance between job creation and technological progress. Technological progress can bring about many benefits such as increased efficiency, convenience, and innovation. However, it can also result in job displacement or in certain cases, re-skilling for employees. It's important for companies to recognize the impact of their technological advancements and to consider implementing measures to support their employees through the transition.\n\nOn the other hand, job creation is also important as it supports a healthy economy and ensures that individuals have the opportunity to support themselves and their families. Encouraging entrepreneurship, creating incentives for businesses, and investing in education and job training programs can all be effective methods for promoting job creation.\n\nOverall, the key is to recognize the importance of both job creation and technological progress and to strive for a balance that benefits both individuals and society as a whole.", "metadata": {}} -{"answer_id": "DmQtupeyNDrQFBccBRAsbD", "model_id": "gpt-3.5-turbo:20230327", "question_id": 41, "text": "The average human blinks approximately 15-20 times per minute. Let's assume that the average waking hour of a person is around 16 hours per day, which means that they blink around 15 x 16 = 240 times per day.\n\nIn one year, this would translate to 240 x 365 = 87,600 blinks per year.\n\nAssuming a lifespan of 75 years, the total number of blinks would be 87,600 x 75 = 6,570,000.\n\nSo, on average, a human blinks about 6.6 million times in their lifetime.\n\nIt's important to note that this is just an estimate based on the assumptions that were made, and not everyone's blinking frequency is the same. Factors such as age, health, and environment can affect how often someone blinks.", "metadata": {}} -{"answer_id": "froHv7kwRMYGWPXDQXk2Gw", "model_id": "gpt-3.5-turbo:20230327", "question_id": 42, "text": "A grain of salt is a very small amount of salt, so it is difficult to visualize how many atoms are in it. However, we can approximate the number of atoms in a grain of salt by using some scientific formulas and data.\n\nFirstly, we need to know the mass of a grain of salt. According to the National Institute of Standards and Technology (NIST), the average weight of a single grain of salt is about 58.5 milligrams or 0.0585 grams.\n\nSecondly, we need to know the molar mass of salt. Salt is actually a compound made up of two elements, sodium (Na) and chlorine (Cl), with the chemical formula NaCl. The molar mass of NaCl is 58.44 grams per mole, which means that one mole of salt contains 6.022 x 10^23 individual NaCl molecules.\n\nTo find out how many atoms are in a single grain of salt, we need to use Avogadro's number, which is the number of particles in one mole of a substance. Avogadro's number is approximately 6.022 x 10^23 atoms/molecules per mole.\n\nSo, if we take the mass of a single grain of salt (0.0585 grams) and divide it by the molar mass of NaCl (58.44 grams/mol), we can calculate the number of moles of salt in a grain:\n\n0.0585 g / 58.44 g/mol = 0.000999 moles\n\nThen, if we multiply this by Avogadro's number, we get the number of molecules (or atoms) of NaCl in a single grain of salt:\n\n0.000999 moles x 6.022 x 10^23 molecules/mole = 6.008 x 10^20 molecules\n\nTherefore, there are approximately 6 x 10^20 atoms of salt in a single grain of salt.\n\nIt's important to note that this is an approximation, and the actual number of atoms in a grain of salt may vary slightly depending on the size of the grain and the purity of the salt. Nevertheless, this calculation gives us a general idea of the incredible number of atoms that make up even the smallest visible piece of matter.", "metadata": {}} -{"answer_id": "ahktv9NqxZ2cYquTXwF42r", "model_id": "gpt-3.5-turbo:20230327", "question_id": 43, "text": "According to research, it is estimated that there are approximately 8.6 million lightning strikes that occur on Earth each day. Here is how I reached this conclusion:\n\nFirst, it is important to understand what creates lightning. Lightning is produced in thunderstorms when the atmosphere becomes electrically charged. This is typically caused by the interaction between warm and cold air. When these two air masses meet and create instability in the atmosphere, it can cause lightning to occur.\n\nNext, it's worth noting that thunderstorms are quite common across the globe, with an estimated 16 million thunderstorms happening each year. This means that there is a significant potential for lightning strikes to occur during these storms.\n\nTo estimate the number of lightning strikes that occur each day, we can take the total number of yearly thunderstorms (16 million) and divide it by the number of days in a year (365). This calculation gives us an average of around 43,835 thunderstorms per day.\n\nNow, not all thunderstorms will produce lightning, but many of them do. Based on data collected from a study conducted by the World Meteorological Organization (WMO), about 45% of all thunderstorms produce lightning. So, if we multiply the number of thunderstorms per day (43,835) by the percentage that produce lightning (45%), we get approximately 19,726 thunderstorms with lightning each day.\n\nFinally, we need to estimate how many strikes occur within each thunderstorm. It is difficult to determine an exact number since some thunderstorms produce only a few strikes while others produce hundreds or even thousands. However, according to National Geographic, the average number of lightning strikes per thunderstorm is around 100. Multiplying this by the estimated number of thunderstorms per day that produce lightning (19,726) gives us approximately 1,972,600 lightning strikes per day.\n\nThat being said, it is important to note that some estimates put the number of daily lightning strikes anywhere from 6 million to 100 million. The exact number varies depending on the source and the methods used to calculate it. Nonetheless, it is clear that lightning strikes occur frequently and can be quite dangerous.", "metadata": {}} -{"answer_id": "kqqPRaFqb3w9Ky9LGB3yKU", "model_id": "gpt-3.5-turbo:20230327", "question_id": 44, "text": "To calculate the number of balloons needed to lift a house like in the movie \"Up,\" we need to consider the weight of the house and the lifting capacity of a single balloon.\n\nFirst, let us consider the weight of the house. In the movie, the house is depicted as a small wooden house with a chimney and a porch. Based on similar houses, we can estimate that the weight of the house would be around 4-5 tons (8,000-10,000 pounds).\n\nNow, let us consider the lifting capacity of a single balloon. A typical helium-filled party balloon can lift a maximum of approximately 14 grams (0.5 ounces) of weight. However, in the movie, the balloons were much larger and had a greater lifting capacity. Assuming a lifting capacity of 1 kilogram (2.2 pounds) per balloon, we can calculate the number of balloons needed to lift the house.\n\nTo lift a 5-ton house, we would need 5,000 kilograms (11,000 pounds) of lifting capacity. Dividing this by the lifting capacity of a single balloon (1 kilogram), we get a total of 5,000 balloons needed to lift the house.\n\nHowever, this calculation does not take into account the weight of the balloon material, ropes, and other equipment needed to tether the balloons to the house. Additionally, the balloons would need to be organized and arranged in a specific way to lift the house evenly and prevent it from tipping over.\n\nTherefore, while 5,000 balloons may be needed in theory, it is unlikely that this number of balloons alone would be able to safely lift a house. The scenario depicted in the movie is purely fictional and should not be duplicated in real life.", "metadata": {}} -{"answer_id": "946tQg8kS7GYPSm4qcV6Pt", "model_id": "gpt-3.5-turbo:20230327", "question_id": 45, "text": "According to the latest statistics, more than 18.7 billion text messages are sent globally every day. This means that approximately 781 million text messages are sent every hour. To calculate how many text messages are sent in a minute, we need to divide these numbers by 60 (the number of minutes in an hour).\n\nSo, 781 million text messages divided by 60 minutes yields approximately 13,017,543 text messages sent globally in a minute. It's important to note that this number is an estimate and may vary depending on the time of day, day of the week, and other factors. Additionally, the rapid growth of instant messaging apps may be impacting the number of traditional text messages being sent globally.", "metadata": {}} -{"answer_id": "cU3wut3Ta3ySbRHGxfwgjc", "model_id": "gpt-3.5-turbo:20230327", "question_id": 46, "text": "Estimating the number of words spoken daily on Earth can be a daunting task, as it involves taking into account a wide range of factors such as population, language, age, location, and more. However, there are a few ways to approach this question that can give us a rough estimate. \n\nOne approach is to start by looking at the world population, which as of 2021 is approximately 7.9 billion people. Assuming that each person speaks on average 2,000 words a day (which is a reasonable estimate for adults who engage in regular conversation), we can begin to calculate the daily volume of words spoken on Earth. \n\nTo do this, we can use the following formula: \n\nTotal words spoken daily = (World population) x (Average daily words spoken per person) \n\nPlugging in the values, we get: \n\nTotal words spoken daily = (7.9 billion) x (2,000) \n\nTotal words spoken daily = 15.8 trillion \n\nSo according to this estimate, there are roughly 15.8 trillion words spoken on Earth each day. However, it's worth noting that this number may be significantly higher or lower depending on a variety of factors. For example, not everyone speaks the same number of words per day, and some people may not have access to language or communication tools.\n\nAdditionally, it's important to keep in mind that this estimate is just that - an estimate. There is no definitive answer to how many words are spoken daily on Earth, and the true number is likely impossible to determine with complete accuracy. Nonetheless, by taking a logical approach and making reasonable assumptions, we can arrive at a rough estimate that gives us a sense of the massive scale of human communication.", "metadata": {}} -{"answer_id": "hQP784Ch2yq2b3BaXVBVX3", "model_id": "gpt-3.5-turbo:20230327", "question_id": 47, "text": "It's difficult to determine the exact number of snowflakes that fall during a typical winter, as it depends on a variety of factors such as location, temperature, humidity, and duration of the winter season.\n\nFirstly, it's important to consider the size of snowflakes. Snowflakes come in different shapes and sizes, ranging from small crystals to larger, more intricate structures. Generally, the smaller the snowflake, the more there are in a given amount of precipitation.\n\nSecondly, it's also worth noting that snowfall is typically measured in inches, rather than the number of snowflakes. This is because the exact number of snowflakes that contribute to a certain amount of precipitation can vary depending on the size and shape of the snowflakes.\n\nWith these factors in mind, we can estimate the number of snowflakes that fall during a typical winter by looking at an average snowfall amount. According to the National Oceanic and Atmospheric Administration (NOAA), the average snowfall in the United States is around 28 inches per year.\n\nAssuming that each inch of snowfall contains anywhere from hundreds to thousands of individual snowflakes, we can estimate that there are millions (if not billions) of snowflakes that fall during a typical winter.\n\nOverall, while we can't pinpoint an exact number of snowflakes that fall during a typical winter, we can estimate that it is likely to be a very large number due to the variability in size and shape of snowflakes, as well as the amount of snowfall that typically occurs.", "metadata": {}} -{"answer_id": "a92bStUFdq4LBcv3pa9y3Z", "model_id": "gpt-3.5-turbo:20230327", "question_id": 48, "text": "Determining the total number of pages in all the books ever written would be an impossible task as it's practically impossible to know the exact number of books that have been written throughout human history. However, we can make an attempt to estimate the total number of pages based on some assumptions and facts.\n\nFirst, we need to estimate the total number of books ever written. According to Google's estimation in 2010, there were around 130 million books in the world at that time. However, this is just an educated guess, and the actual number of books is likely to be much higher.\n\nNext, we need to consider the average number of pages in a book. This can vary widely depending on the genre, audience, and author's writing style. Some books may have only a few pages, while others may have hundreds or even thousands of pages. However, according to Book Authority, the average number of pages in a book is around 320 pages.\n\nWith these assumptions, we can estimate the total number of pages in all the books ever written by multiplying the estimated number of books with the average number of pages. So, if we assume there were around 500 million books ever written and each book had 320 pages on average, then the total number of pages in all books ever written would be:\n\n500,000,000 x 320 = 160 billion pages\n\nHowever, it's important to recognize that this is just an estimation and the actual number is likely much higher or lower. Additionally, new books are being written and published every day, so this estimation would become outdated very quickly.", "metadata": {}} -{"answer_id": "a2QAcAm9wJeP2BpyWQnhot", "model_id": "gpt-3.5-turbo:20230327", "question_id": 49, "text": "The beginning of life on Earth is estimated to have occurred around 3.8 billion years ago. The question asks how many times the Earth has orbited the Sun since that time, so we need to calculate the number of orbits the Earth has completed over that time period. \n\nOne orbit of the Earth around the Sun takes approximately 365.25 days, or one year. To calculate how many orbits the Earth has completed over 3.8 billion years, we first need to convert this amount of time into years. \n\n3.8 billion years is equivalent to 3,800,000,000 years. We can use this number and the length of one Earth year to calculate the number of orbits:\n\n3,800,000,000 years / 1 year = 3,800,000,000 orbits\n\nSo, the Earth has completed approximately 3.8 billion orbits around the Sun since the beginning of life on Earth. \n\nIt's worth noting that this is a rough estimate, as the length of a year and the Earth's orbit have both changed slightly over time due to factors like gravitational interactions with other planets and the Moon. However, this answer provides a reasonable approximation of the number of orbits that have occurred over the scale of billions of years.", "metadata": {}} -{"answer_id": "CrmusnxbTtGXF2varfcUd4", "model_id": "gpt-3.5-turbo:20230327", "question_id": 50, "text": "It is impossible to determine the exact number of songs that have been recorded throughout history because new songs are being created on a daily basis, and many ancient songs have been lost over time. However, we can make some educated estimates based on available data.\n\nFirst, we must consider the length of human history. Humans have been creating music for tens of thousands of years, with the oldest known musical instrument, a bone flute, dating back to about 40,000 years ago. As such, it is safe to assume that countless songs have been created and lost over that vast length of time.\n\nNext, let's consider modern recording technology. The first sound recording was made in 1857, and since then, millions of songs have been recorded. In fact, the Library of Congress in the USA has more than 3 million sound recordings in its collection alone.\n\nHowever, even that number is likely to be a vast underestimate since it\u2019s impossible to account for music recordings that existed in cultures without a writing system, or those outside the major recording and distribution systems of the Western music industry.\n\nFinally, we must recognize that music is created in every culture around the world, and each culture has its unique musical traditions. As a result, it's impossible to estimate the number of songs created around the globe in different languages and styles.\n\nIn conclusion, although it's impossible to determine the exact number of songs recorded throughout history, we can conclusively say that it's likely in the billions, considering the long history of music, the vast amount of music recorded in recent centuries and the diversity of musical traditions around the world.", "metadata": {}} -{"answer_id": "J9pZp6z2UUW7YcXgzUouqs", "model_id": "gpt-3.5-turbo:20230327", "question_id": 51, "text": "If the internet had been invented during the Renaissance period, it would have revolutionized the way in which people gained knowledge, conducted research, and shared ideas. However, there would have likely been many limitations of the technology, such as the lack of accessibility and the lack of infrastructure to support such a vast network.\n\nOne possibility is that the internet may have been utilized by Renaissance scholars for intellectual exchange, much like how they used letters to communicate and engage in debates. This would have allowed for a more extensive and rapid dissemination of ideas, making it easier for artists, philosophers, theologians, and scientists to learn from one another and work collaboratively.\n\nAdditionally, the invention of the printing press during the Renaissance period made books and information available to a broader audience. If the internet had also been invented during this time, it may have played a similar role in democratizing knowledge and making it more widely accessible.\n\nOverall, the invention of the internet during the Renaissance period would have had a profound impact on the intellectual and cultural landscape of the time, but the technology may have been limited in scope and reach given the technological limitations of the era.", "metadata": {}} -{"answer_id": "67bYUQb6zru8ofiub7uNUi", "model_id": "gpt-3.5-turbo:20230327", "question_id": 52, "text": "If the Aztecs had successfully repelled the Spanish conquistadors, the history of the Americas would have been different. The Spanish colonization of Mexico would not have taken place, and the Aztecs would have continued to control their empire. It is possible that the Aztecs would have continued to expand their empire and develop their civilization. \n\nThe Spanish conquest of Mexico had a significant impact not only on the Aztec people, but also on the indigenous people of the Americas. The conquest brought with it diseases that decimated the population, and the Spanish imposed their culture and religion on the peoples they conquered. If the Spanish had been repelled, the histories and cultures of the Americas would have been vastly different.\n\nIt is also possible that other European nations, such as the British or the French, would have attempted to colonize the Americas. However, without the riches of the Aztec empire to lure them in, the colonial powers may not have found the Americas as attractive. \n\nOverall, the successful repulsion of the Spanish conquistadors by the Aztecs would have changed the course of history for the Americas, Europe, and the world.", "metadata": {}} -{"answer_id": "gAisnQTHWFLW8aa5fQPNJf", "model_id": "gpt-3.5-turbo:20230327", "question_id": 53, "text": "If the Black Death had not occurred in the 14th century, the course of history would have drastically changed. The Black Death was one of the deadliest pandemics in human history, which killed almost 50 million people in Europe alone. Its social, economic, and political impact was enormous, and it changed the entire social structure of Europe forever.\n\nIf the Black Death had not occurred, many of the changes that were brought about by the pandemic would not have occurred. For example, the social hierarchy would have remained the same, and peasants would not have had as many economic opportunities as they did after the Black Death, which caused a shortage in labor.\n\nFurthermore, the medical field would not have progressed as quickly, since the Black Death brought about many advancements in medicine as people attempted to find a cure. Many of the basic hygiene practices that we take for granted today were also developed in response to the Black Death, so if the pandemic had not occurred, the general health and cleanliness of the population would not have improved as much.\n\nFinally, the cultural landscape would have been different as well, as the Black Death had a profound effect on art, literature, and music. If it had not occurred, we may have different artistic and cultural movements today.\n\nIn summary, if the Black Death had not occurred in the 14th century, the world would be drastically different today.", "metadata": {}} -{"answer_id": "4ZJCbj7T8BGzNhDqz7NSF4", "model_id": "gpt-3.5-turbo:20230327", "question_id": 54, "text": "If Isaac Newton had focused on biology instead of physics, the scientific world may be vastly different today. His advancements in mathematics, such as calculus, would have certainly been useful in the field of biology. Newton's curiosity, analytical mind, and dedication to scientific inquiry would have been equally effective in biology as they were in physics. He may have made significant contributions to the field of genetics or evolutionary theory.\n\nOn the other hand, his contributions to physics were so groundbreaking that the subject may have been hindered without his contributions. It's difficult to say exactly what would have happened, but it's likely that the course of scientific history would have been different.", "metadata": {}} -{"answer_id": "c6ixri3qqLfSBBnwMkgYB7", "model_id": "gpt-3.5-turbo:20230327", "question_id": 55, "text": "If the Beatles had never formed as a band, the world of music would be very different. The Beatles revolutionized popular music and influenced countless musicians and bands that came after them. Without the Beatles, it's possible that other bands would still have risen to fame and changed the course of music history, but we can only speculate how their absence would have affected the genre overall. It's also possible that a different band or artist would have taken the world by storm and become an equally important cultural icon. Ultimately, the impact of the Beatles on music and society is immeasurable, and their influence will continue to be felt for generations to come.", "metadata": {}} -{"answer_id": "c9AtDn7eeSYhtH854MQDDB", "model_id": "gpt-3.5-turbo:20230327", "question_id": 56, "text": "If Alan Turing had not cracked the Enigma code during World War II, the outcome of the war could have been very different. The Enigma code was a secret code used by the German military to encrypt their messages during the war. By cracking this code, Turing and his team were able to intercept and decode secret German messages, thus giving the Allies valuable intelligence that helped them win key battles.\n\nIf Turing had not been able to crack the Enigma code, the Allies might have struggled to gain the intelligence they needed to win key battles, which could have led to a longer and more bloody war. It's possible that the war could have dragged on for many more years, resulting in even more casualties and devastation.\n\nAdditionally, Turing's work on the Enigma code laid the foundation for modern computing and artificial intelligence. Without his insights and innovations, the development of computer technology might have been delayed, which could have had significant implications for technological progress and innovation in the aftermath of the war.", "metadata": {}} -{"answer_id": "jYd2gg6MJH8hdqFSAJTaiR", "model_id": "gpt-3.5-turbo:20230327", "question_id": 57, "text": "If the Suez Canal had never been constructed, the shipping routes between Europe and Asia would have been significantly longer and more treacherous. Ships would have to sail around the southern tip of Africa, adding thousands of miles to their journey and increasing the risk of piracy and other hazards. This would have made international trade more expensive and less efficient, and could have slowed the economic growth of many nations. Additionally, the region around the canal would have developed differently, as the canal has helped create cities and infrastructure that rely on its presence. Without the canal, the social, political, and economic impact on the Middle East could have been much different.", "metadata": {}} -{"answer_id": "nZJ6LGJFegnHetutiAQtFm", "model_id": "gpt-3.5-turbo:20230327", "question_id": 58, "text": "If the Maya civilization had never collapsed, it's possible that they would have continued flourishing and developing their culture, art, architecture, and knowledge. The Maya civilization was known for impressive works of art, architecture, and a sophisticated calendar system. \n\nWithout their collapse, it is possible that the Maya could have advanced their knowledge in other areas, such as science, language, or technology. They might have developed new forms of governance and political systems, which could have influenced the political structures of neighboring civilizations.\n\nAdditionally, their continued presence could have had an impact on the colonization of the Americas. Because the Maya were one of the more advanced civilizations in the region at the time of the European invasion, their presence could have impacted the way the Spaniards and other European powers approached colonization.\n\nOverall, it's hard to say exactly what would have happened, but the Maya civilization was undeniably a significant cultural and scientific force in history. Had they not mysteriously disappeared, they could have continued to shape the course of history in ways we can only imagine.", "metadata": {}} -{"answer_id": "dmEgLyeYNcwBZWHBak6Lap", "model_id": "gpt-3.5-turbo:20230327", "question_id": 59, "text": "If Christopher Columbus had not discovered the Americas, it is likely that another explorer would have eventually stumbled upon the continent. The Vikings had already established a presence in North America around 500 years before Columbus arrived. Additionally, it is possible that indigenous peoples from Asia may have eventually migrated to the Americas via the Bering Strait land bridge.\n\nHowever, the timing of European colonization and the impact it had on the native populations may have been dramatically different without Columbus\u2019 arrival. It is possible that the Americas could have remained isolated from European influence for a longer period of time, allowing the native cultures to develop more independently. Alternatively, other European nations may have eventually colonized the Americas without the influence of Columbus and brought their own unique cultures and traditions to the region.", "metadata": {}} -{"answer_id": "bkuECkuy7YiDUcj9oJjdrZ", "model_id": "gpt-3.5-turbo:20230327", "question_id": 60, "text": "If Vincent van Gogh had been a successful artist during his lifetime, his life would have been very different. He struggled with poverty, mental illness, and feelings of isolation throughout his life. If he had been financially successful, he would not have had to rely on his brother Theo for financial support and may have been able to afford better care for his mental health.\n\nAdditionally, van Gogh was deeply committed to his art and was mostly ignored by the art world during his lifetime. If he had been successful, he may have found greater satisfaction in his work and felt more validated for his contributions to the art world. He may have had the opportunity to travel more widely, meet more artists, and produce more work.\n\nOn a broader level, if Vincent van Gogh had been successful during his lifetime, it may have changed the way we think about art history. His work was groundbreaking in its use of color, brushwork, and emotional intensity, and it's possible that he could have influenced other artists of his time (and beyond) in more direct ways if he had been a well-known figure in the art world.", "metadata": {}} -{"answer_id": "2PRjT7j3V6487xZREfQfuD", "model_id": "gpt-3.5-turbo:20230327", "question_id": 61, "text": "Here's a sample program that demonstrates your requirements:\n\n```c++\n#include \n#include \n#include \n#include \n#include \n\nusing namespace std;\n\nint count_occurrences(const string &filename, const string &search_word) {\n int count = 0;\n ifstream fin(filename.c_str());\n if (fin.fail()) {\n cerr << \"Failed to open file \" << filename << endl;\n return 0;\n }\n string line;\n while (getline(fin, line)) {\n transform(line.begin(), line.end(), line.begin(), [](unsigned char c){ return tolower(c); }); // convert to lowercase\n stringstream s(line);\n string word;\n while (s >> word) {\n if (word == search_word) {\n count++;\n }\n }\n }\n fin.close();\n return count;\n}\n\nint main() {\n string filename = \"sample.txt\";\n string search_word = \"hello\";\n int count = count_occurrences(filename, search_word);\n cout << \"The word \\\"\" << search_word << \"\\\" occurs \" << count << \" times in file \" << filename << endl;\n return 0;\n}\n```\n\nHere's how it works:\n\n1. We define a function called `count_occurrences()` that takes two arguments: `filename` and `search_word`.\n2. Inside the `count_occurrences()` function, we first open the file using `ifstream`.\n\n3. If the file fails to open, we simply return 0.\n\n4. We then loop through each line of the file using `getline()` and store it in the variable `line`.\n\n5. We convert each line to lowercase using `transform()` and a lambda function.\n\n6. We then create a `stringstream` object called `s` and pass the line to it.\n\n7. We then loop through each word in the line using `>>` and store it in the variable `word`.\n\n8. If the `word` matches the `search_word`, we increment the `count` variable.\n\n9. After the loop is complete, we close the file using `close()` and return the `count`.\n\n10. In the `main()` function, we call the `count_occurrences()` function with the file name and search word.\n\n11. We then output the result to the console.\n\nI hope this helps! Let me know if you have any further questions.", "metadata": {}} -{"answer_id": "jWyN8NTdVix6CUoqfbRqVx", "model_id": "gpt-3.5-turbo:20230327", "question_id": 62, "text": "Sure! Here's a Python implementation using dynamic programming.\n\n```python\ndef lcs(str1, str2):\n # Initialize a 2D array to store the lengths of LCS\n m = [[0] * (len(str2) + 1) for i in range(len(str1) + 1)]\n \n # Fill the 2D array by comparing characters of both strings\n for i in range(1, len(str1) + 1):\n for j in range(1, len(str2) + 1):\n if str1[i-1] == str2[j-1]:\n m[i][j] = m[i-1][j-1] + 1\n else:\n m[i][j] = max(m[i-1][j], m[i][j-1])\n \n # Construct the LCS by backtracking the 2D array\n lcs = \"\"\n i, j = len(str1), len(str2)\n while i > 0 and j > 0:\n if str1[i-1] == str2[j-1]:\n lcs = str1[i-1] + lcs\n i -= 1\n j -= 1\n elif m[i-1][j] > m[i][j-1]:\n i -= 1\n else:\n j -= 1\n \n return lcs\n```\n\nThis function takes two input strings as parameters and returns the longest common subsequence as output. It first initializes a 2D array 'm' with all elements set to zero. This array is used to store the lengths of the longest common subsequences of all possible substrings of the input strings. The function then fills up the 2D array by comparing characters of both strings.\n\nAfter the 2D array is filled, the function backtracks it to construct the longest common subsequence. It starts from the bottom right corner of the array and moves up and left until it reaches the top left corner. During this process, it compares the current character of both strings and moves to the next cell in a way that maximizes the length of the LCS. Once the top left corner is reached, the constructed subsequence is returned as output.", "metadata": {}} -{"answer_id": "mx9G7gfKTCXCmNRaiMZQr9", "model_id": "gpt-3.5-turbo:20230327", "question_id": 63, "text": "Here's a regular expression pattern in Python that can be used to validate an email address:\n\n```\nimport re\n\npattern = r'^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\\.[a-zA-Z]{2,}$'\n\ndef validate_email(email):\n if re.match(pattern, email):\n print(\"Valid email address\")\n else:\n print(\"Invalid email address\")\n \nvalidate_email(\"example@example.com\")\n```\n\nThis regular expression pattern checks if the email address has a valid format. It ensures that the email address starts with one or more alphanumeric characters, dots, underscores, percentage signs, plus or minus signs. The \"@' character follows this which is then followed by a valid domain name that contains only alphanumeric characters, dots, and hyphens, ending with a valid top-level domain name that contains only alphabetical characters with at least two characters.", "metadata": {}} -{"answer_id": "8LjpXk6Va5L2FBEwTU9YJ6", "model_id": "gpt-3.5-turbo:20230327", "question_id": 64, "text": "Certainly! Here's an example Python program using dynamic programming to find the nth Fibonacci number:\n\n```\ndef fibonacci(n):\n fib = [0, 1]\n for i in range(2, n + 1):\n fib.append(fib[i - 1] + fib[i - 2])\n return fib[n]\n\n# Example usage:\nprint(fibonacci(5)) # 5\nprint(fibonacci(10)) # 55\n```\n\nThe function `fibonacci(n)` takes an integer argument `n` and returns the nth Fibonacci number. The program uses dynamic programming by keeping track of the previous two Fibonacci numbers in a list called `fib`.\n\nThe program initializes `fib` to `[0, 1]`, which are the first two Fibonacci numbers. Then, it iterates from 2 to `n` and calculates each Fibonacci number as the sum of the previous two numbers. Finally, the function returns the nth Fibonacci number in the list `fib`.\n\nHope this helps! Let me know if you have any questions.", "metadata": {}} -{"answer_id": "BLC8fhAUeahFfrjBRiL9tx", "model_id": "gpt-3.5-turbo:20230327", "question_id": 65, "text": "Sure! Here is an implementation of the binary search algorithm in Python:\n\n```\ndef binary_search(arr, target):\n left = 0\n right = len(arr) - 1\n while left <= right:\n mid = (left + right) // 2\n if arr[mid] == target:\n return mid\n elif arr[mid] < target:\n left = mid + 1\n else:\n right = mid - 1\n return -1\n```\n\nTo use this function, pass in a sorted array and the target element you are searching for. The function returns the index of the target element in the array (if it exists), or -1 if it does not. \n\nHere is an example usage:\n\n```\narr = [1, 2, 3, 4, 5, 6, 7, 8, 9]\ntarget = 5\n\nindex = binary_search(arr, target)\n\nif index != -1:\n print(f\"Found {target} at index {index}\")\nelse:\n print(f\"{target} not found in the array\")\n```\n\nThis would output \"Found 5 at index 4\".", "metadata": {}} -{"answer_id": "NAH245JGvMWF24FWZJagYp", "model_id": "gpt-3.5-turbo:20230327", "question_id": 66, "text": "Here's an example implementation of a queue using two stacks in Python:\n\n```python\nclass QueueUsingStacks:\n def __init__(self):\n self.stack1 = []\n self.stack2 = []\n\n def enqueue(self, value):\n # Push the element onto the first stack\n self.stack1.append(value)\n\n def dequeue(self):\n if not self.stack2:\n # If the second stack is empty, transfer the elements from the first stack to the second stack\n while self.stack1:\n self.stack2.append(self.stack1.pop())\n\n if not self.stack2:\n # If both stacks are empty, return None\n return None\n else:\n # Pop the top element from the second stack and return it\n return self.stack2.pop()\n\n def is_empty(self):\n return not bool(self.stack1 or self.stack2)\n\n def size(self):\n return len(self.stack1) + len(self.stack2)\n```\n\nTo use this implementation, simply create a new instance of the `QueueUsingStacks` class and call the appropriate methods as shown below:\n\n```python\n# Create a new queue using two stacks\nq = QueueUsingStacks()\n\n# Enqueue some elements onto the queue\nq.enqueue(10)\nq.enqueue(20)\nq.enqueue(30)\n\n# Dequeue an element from the queue\nx = q.dequeue()\nprint(x) # Output: 10\n\n# Check if the queue is empty\nprint(q.is_empty()) # Output: False\n\n# Get the size of the queue\nprint(q.size()) # Output: 2\n```", "metadata": {}} -{"answer_id": "NKXTwB3WtfFcmQZbTxBP5o", "model_id": "gpt-3.5-turbo:20230327", "question_id": 67, "text": "Here's a code snippet that can find the common elements in two arrays:\n\n```python\ndef find_common_elements(arr1, arr2):\n common = []\n for i in range(len(arr1)):\n for j in range(len(arr2)):\n if arr1[i] == arr2[j]:\n if arr1[i] not in common:\n common.append(arr1[i])\n return common\n```\n\nIn this code, we use two nested loops to iterate over both arrays and compare their elements. If a common element is found, we add it to a list called 'common'. To avoid duplicates, we check whether the element is already in the 'common' list before appending it.\n\nWe can test this function with some example arrays:\n\n```python\narr1 = [1, 2, 3, 4, 5]\narr2 = [3, 4, 5, 6, 7]\nresult = find_common_elements(arr1, arr2)\nprint(result) # Output: [3, 4, 5]\n```\n\nIn this example, the common elements are 3, 4, and 5.", "metadata": {}} -{"answer_id": "5P7wPyrNwBrHNdJNrETS5h", "model_id": "gpt-3.5-turbo:20230327", "question_id": 68, "text": "To find the value of f(2), we simply substitute 2 in place of x in the given equation of f(x):\n\nf(2) = 5(2)^3 - 2(2) + 3\n\nf(2) = 5(8) - 4 + 3\n\nf(2) = 40 - 1\n\nTherefore, the value of f(2) is 39.", "metadata": {}} -{"answer_id": "UmnL4WoucBduFpX4jptkNU", "model_id": "gpt-3.5-turbo:20230327", "question_id": 69, "text": "3x + 10 = 5(x - 2)\n\n3x + 10 = 5x - 10 (use the distributive property)\n\n10 + 10 = 5x - 3x (subtract 3x from both sides)\n\n20 = 2x (simplify)\n\nx = 10 (divide both sides by 2) \n\nTherefore, x = 10.", "metadata": {}} -{"answer_id": "Hymm673GkY5tXNaTFeNUBZ", "model_id": "gpt-3.5-turbo:20230327", "question_id": 70, "text": "To find the length of a line segment with two endpoints (x1, y1) and (x2, y2), we use the distance formula:\n\ndistance = sqrt((x2 - x1)^2 + (y2 - y1)^2)\n\nPlugging in the coordinates of the given endpoints, we get:\n\ndistance = sqrt((10 - 2)^2 + (4 - (-2))^2)\ndistance = sqrt(8^2 + 6^2)\ndistance = sqrt(64 + 36)\ndistance = sqrt(100)\ndistance = 10\n\nTherefore, the length of the line segment is 10 units.", "metadata": {}} -{"answer_id": "PUzddJ35E3WsM7BDQ9E59i", "model_id": "gpt-3.5-turbo:20230327", "question_id": 71, "text": "Sure, I can help you write a formal email to a potential business partner proposing a joint venture. Here's a sample email you can use as a guide:\n\nDear [Name],\n\nI hope this email finds you well. I am writing to you today to propose a joint venture that I believe could be mutually beneficial for our businesses.\n\nAs you may know, our companies share many complementary strengths and resources that could be leveraged to achieve greater success in our respective markets. I believe that by working together, we could create a competitive advantage that would benefit both of us.\n\nI would like to propose that we explore the possibility of forming a joint venture in [industry or field], with the aim of combining our expertise and resources to create something innovative and valuable for the market.\n\nI believe that our companies' experiences and capabilities could form a strong foundation for a mutually beneficial partnership. If you are interested in pursuing this proposal, please let me know and we can discuss the details further.\n\nThank you for your time and consideration, and I look forward to hearing back from you.\n\nBest regards,\n\n[Your Name]", "metadata": {}} -{"answer_id": "6Q72hZCtDkgq379yhdmN5N", "model_id": "gpt-3.5-turbo:20230327", "question_id": 72, "text": "Dear [Employer\u2019s Name],\n\nThis letter serves as my formal resignation from my position as [Position Name] at [Company Name]. My last day of work will be on [Date].\n\nI would like to express my gratitude for the opportunities provided during my time at [Company Name]. It has been a pleasure to be part of such a dynamic and forward-thinking organization. My tenure here has contributed significantly to my professional growth, and it is with a heavy heart that I must resign.\n\nI appreciate your constant support, guidance and collaboration throughout my employment here. I have learned a lot under your leadership and found my experience here to be fulfilling in ways that I could not have anticipated.\n\nIf there is anything that I can do to ensure a seamless transition for my successor and facilitate the smooth conclusion of my work, please don't hesitate to let me know. I am happy to complete any outstanding tasks or projects before I leave.\n\nThank you again for all of your support and understanding. I wish the company continued success, and I hope that our paths may cross again in the future.\n\nSincerely,\n\n[Your Name]", "metadata": {}} -{"answer_id": "ReXnHy9C8SwcYPAep6gvJg", "model_id": "gpt-3.5-turbo:20230327", "question_id": 73, "text": "[Your Name]\n[Your Designation]\n[Your Institution]\n[Your Address]\n[City, State Zip Code]\n[Your Email]\n[Date]\n\n[Admission Committee]\n[Graduate Program in Computer Science]\n[University Name]\n[Address]\n[City, State ZIP Code]\n\nDear Members of the Admission Committee,\n\nI am writing to recommend [Student's Name] for admission to the graduate program in Computer Science at your esteemed institution. I have had the privilege to teach [him/her] in several courses during [his/her] undergraduate studies in [program name] at [university name], and I highly recommend [him/her] for [his/her] outstanding academic performance and natural ability in the field of computer science.\n\nDuring [his/her] time at [university name], [Student's Name] proved to be a highly motivated and bright student, consistently demonstrating academic excellence in all courses. [He/She] consistently achieved top grades and took on challenging coursework with enthusiasm and diligence. [His/Her] strong analytical and critical thinking skills, coupled with [his/her] passion for technology and computer science, make [him/her] a perfect candidate for the esteemed computer science program at your institution.\n\n[Student's Name] was not only a dedicated student but also an active participant in many extra-curricular activities, such as [list activities]. [He/She] was always eager to explore new concepts and technologies beyond the coursework requirements, and [his/her] enthusiasm to learn and work in a team was evident in [his/her] participation in group projects and research work.\n\nIn conclusion, I have a strong belief that [Student's name] has an incredible potential for achieving academic excellence in your prestigious program. [His/Her] exceptional academic record, strong work ethic, and impressive achievements make [him/her] an ideal candidate for the graduate program in Computer Science at your institution. I, therefore, strongly recommend [Student's Name] for admission to the program and look forward to seeing [him/her] contributing to the field of computer science significantly.\n\nPlease do not hesitate to contact me if you require any further information regarding this recommendation.\n\nSincerely,\n\n[Your Name]", "metadata": {}} -{"answer_id": "cKk5zZe8yYY4JH3kr5pGXG", "model_id": "gpt-3.5-turbo:20230327", "question_id": 74, "text": "Subject Line: Introducing our revolutionary new software solution\n\nDear valued customers,\n\nWe are thrilled to announce the launch of our latest innovation \u2013 a pioneering software solution that will revolutionize the way you do business. Designed with cutting-edge technology and user-friendly features, our software solution will help you streamline your workflows, increase efficiency, and ultimately boost your bottom line.\n\nHere are just a few of the ways our software solution can transform your business:\n\n- Increased collaboration: Our software solution makes it easy for your team to collaborate on tasks and projects, ensuring everyone is on the same page and nothing falls through the cracks.\n\n- Real-time tracking: With our software solution, you can track progress in real time, enabling you to make informed decisions quickly and efficiently.\n\n- Comprehensive reporting: Our software solution allows you to run detailed reports that provide valuable insights into your business operations, giving you the information you need to make informed decisions and identify areas of improvement.\n\nThese are just a handful of the benefits you can expect from our new software solution. We truly believe that our product will be a game changer for businesses of all sizes and industries.\n\nTo celebrate our launch, we are offering a special introductory offer exclusively for our customers. For a limited time, you can get access to our software solution at an incredible discounted price. Don\u2019t miss out on this amazing opportunity to take your business to the next level!\n\nTo learn more about our software solution and to take advantage of our special offer, simply click the link below:\n\n[Insert Link]\n\nThank you for choosing us as your partner in business. We look forward to helping you succeed with our new software solution.\n\nBest regards,\n\n[Your Name]", "metadata": {}} -{"answer_id": "c5rwA3cPjytSGcn7H8dZ6Q", "model_id": "gpt-3.5-turbo:20230327", "question_id": 75, "text": "Subject: Apology for Delay in Your Order\n\nDear [Customer Name],\n\nWe are deeply sorry to inform you that there was a delay in processing your recent order. We understand how frustrating and inconvenient it can be to wait for something that you were looking forward to receiving.\n\nWe want to express our sincere apologies and assure you that we have taken necessary steps to resolve this issue. Due to unexpected high volume of orders last week, our warehouse faced some delays in processing orders on time, and unfortunately, your order was one of the affected ones.\n\nWe have been working diligently to ensure that your order is processed and shipped as soon as possible. We have put measures in place to prevent the recurrence of any such delay in future by hiring and re-training more staff and improving our inventory management.\n\nOnce again, please accept our apologies for the inconvenience this has caused you. We value your patronage and look forward to serving you with the same level of excellence that you have come to expect from us.\n\nThank you for your patience and understanding.\n\nBest regards,\n\n[Your Name]\n[Your Store Name]", "metadata": {}} -{"answer_id": "XZGPtBo86KfF9REZ36s2X5", "model_id": "gpt-3.5-turbo:20230327", "question_id": 76, "text": "Title: The Evolution and Cultural Significance of Jazz\n\nOpening shot of jazz musicians performing in a club\n\nNarration: Jazz has been regarded as one of the most significant cultural contributions from the African American community to the world. It has evolved over time, from its origins in New Orleans to becoming a global phenomenon.\n\nCut to a black and white photo of the original jazz musicians in New Orleans\n\nNarration: Jazz originated in the late 19th and early 20th centuries in New Orleans. It was a musical genre that blended together elements of African rhythms, blues, ragtime, and European harmonies.\n\nCut to a video of Louis Armstrong\n\nNarration: Jazz really grew in popularity in the 1920s and 1930s, particularly with the emergence of jazz legends like Louis Armstrong and Duke Ellington. Jazz was unique because it allowed for improvisation and the individual expression of musicians.\n\nCut to a video of a modern jazz ensemble\n\nNarration: Today, jazz continues to evolve in unique and exciting ways. It is celebrated as a reflection of cultural identity and a bridge between different communities.\n\nCut to a jazz festival scene\n\nNarration: Jazz festivals around the world attract thousands of music lovers annually, from the Newport Jazz Festival in America to the Montreux Jazz Festival in Switzerland.\n\nCut to a jazz club scene\n\nNarration: For many, jazz clubs remain important cultural spaces where people connect and enjoy live performances.\n\nClosing shot of jazz musicians performing\n\nNarration: Jazz music remains a constant reminder of the versatility and beauty of human creativity. Let us celebrate and continue to support the evolution of jazz for generations to come.\n\nEnd with a jazz score and video credits", "metadata": {}} -{"answer_id": "DRncHCsdGji756efDhacUT", "model_id": "gpt-3.5-turbo:20230327", "question_id": 77, "text": "Aloha, fellow travelers! I recently had the pleasure of visiting the beautiful island of Hawaii, and it was truly an enchanting experience that left me with unforgettable memories.\n\nOne of the things that stood out to me was the rich culture of Hawaii. We got to witness an authentic hula performance right on the beach, complete with traditional costumes and music. The dancers\u2019 graceful movements were mesmerizing, and it was fascinating to learn about the history and meaning behind each dance.\n\nAnother cultural highlight for us was attending a luau. It was a true feast for the senses, with an abundance of delicious food, music, and dance performances. We got to try traditional Hawaiian dishes like kalua pig and poi, and we even learned some moves during a group hula lesson.\n\nOf course, no trip to Hawaii would be complete without experiencing its breathtaking natural beauty. We hiked to the top of a volcanic crater in Maui, and the view from above was simply stunning. The colors of the rocks and the ocean were like nothing we had ever seen before. We also visited Hawaii Volcanoes National Park on the Big Island, which was equally jaw-dropping. Seeing the active lava flow and the steam coming out of the ground made us feel like we were on another planet.\n\nAnother must-see attraction in Hawaii is Waikiki Beach. The iconic beach is famous for its crystal clear waters and white sand, surrounded by towering hotels and restaurants. We spent a morning there, soaking up the sunshine and enjoying the soothing sound of the waves.\n\nOverall, Hawaii truly exceeded our expectations. The combination of stunning natural beauty and rich cultural experiences made for a perfect vacation. We left feeling refreshed and inspired, already dreaming of our next trip back to this tropical paradise. Mahalo for reading, and until next time!", "metadata": {}} -{"answer_id": "Y5rCQHHDA6WNfhRcB6QboG", "model_id": "gpt-3.5-turbo:20230327", "question_id": 78, "text": "If you're looking for an exciting and visually stunning science fiction adventure, look no further than the recently released film \"Space Odyssey.\" This film is an absolute triumph, featuring incredible special effects and a compelling storyline that will keep you on the edge of your seat.\n\nThe plot of \"Space Odyssey\" revolves around a team of astronauts who embark on a perilous mission to explore a distant planet. As they venture further and further into the unknown, they encounter all manner of dangers and obstacles, ranging from treacherous alien creatures to hostile sentient machines. Along the way, they must grapple with their own fears and inner demons, as well as the challenges of working together as a team.\n\nOne of the standout features of \"Space Odyssey\" is its cast of characters. Each member of the astronaut team is well-developed and brings their own unique perspective and personality to the story. From the stoic and disciplined captain to the brash and impulsive engineer, every character feels fully realized and relatable. As the story progresses, we see these characters grow and evolve, learning from their mistakes and coming together to face their challenges as a team.\n\nOf course, no discussion of \"Space Odyssey\" would be complete without mentioning its incredible special effects. From the stunningly rendered alien landscapes to the dizzying action scenes, every frame of this film is a visual spectacle. The attention to detail and overall sense of immersion is truly breathtaking, making you feel like you're right there alongside the astronauts as they explore the unknown.\n\nOverall, \"Space Odyssey\" is a must-see for any science fiction fan. It's a thrilling and beautifully crafted film that will keep you engaged from start to finish. With its gripping plot, well-drawn characters, and jaw-dropping special effects, it's easily one of the best sci-fi films of the year. Go see it, and prepare to be transported to another world.", "metadata": {}} -{"answer_id": "Lea4wh5n6GsEkBnKsntN64", "model_id": "gpt-3.5-turbo:20230327", "question_id": 79, "text": "Introduction\n- Welcome listeners and introduce the topic of the podcast, the influence of streaming platforms on the music industry.\n- Briefly introduce yourself and your background and why you chose this topic.\n\nSection 1: Overview of the Music Industry\n- Provide an overview of the music industry before the rise of streaming services.\n- Discuss how record labels, physical album sales, and radio play were the primary distribution channels for music.\n\nSection 2: Streaming Platforms\n- Discuss the rise of streaming platforms like Spotify, Apple Music, and Tidal.\n- Explain how these platforms have disrupted the traditional distribution model of the music industry.\n- Discuss the benefits and disadvantages of artists distributing their music through these platforms.\n\nSection 3: Impact on Artists\n- Examine how streaming services have changed the way artists make money from their music.\n- Discuss how the shift from album sales to streaming has impacted the revenue generated by artists.\n- Discuss the challenges artists face in terms of getting their music noticed within the abundance of material on streaming platforms.\n\nSection 4: Impact on the Music Industry\n- Discuss how the rise of streaming platforms have impacted the music industry as a whole.\n- Explain how labels have had to adapt their business models in order to continue making profits.\n- Discuss how some artists have shifted their focus from album sales to live performances and touring.\n\nSection 5: The Future of Music Streaming\n- Discuss the potential future of music streaming platforms and their impact on the music industry.\n- Discuss whether or not the rise of streaming has been positive or negative for creatives in the music industry.\n- Provide some concluding thoughts and encourage listeners to share their thoughts on social media.\n\nConclusion\n- Recap the key points covered in the podcast episode.\n- Encourage listeners to continue engaging with the topic and to tune in for future episodes.", "metadata": {}} -{"answer_id": "gdLxzcypTeuD6ToC6HWnXh", "model_id": "gpt-3.5-turbo:20230327", "question_id": 80, "text": "I'm happy to assist you in writing a symphony concert review! Here's an example review:\n\nThe recently performed symphony concert was an outstanding showcase of musical talent, leaving the audience spellbound with the performances of the orchestra. As the concert progressed, the orchestra's unmatchable skill and talent on their respective instruments were vividly evident. The pieces featured in the concert were a delightful mix of musical genres that captivated the audience throughout the performance.\n\nThe meticulous attention to detail by the orchestra was surely commendable, as the notes flowed from instrument to instrument in perfect harmony. The way each musician played their part, with utmost precision and accuracy, was a true testament to the hours of practice and effort invested in crafting each piece. The orchestra's enthusiasm, passion, and commitment to their craft were transparent in every note that they played. \n\nOne can only appreciate the level of expertise that the orchestra displayed and the amount of dedication and discipline that has gone into cultivating their skill. The open communication between the conductor and the members of the orchestra was an impressive sight, and it undoubtedly contributed to the flawless execution of the musical compositions.\n\nAs the concert progressed, it was also noticeable that the entire audience became engulfed in the music, swaying to the rhythm of the notes. It was a sight to behold, and the applauds after every piece were jaw-dropping. There was no denying that the audience was immersively engaged, enthralled by the music, and the orchestra's fantastic performance.\n\nIn conclusion, the symphony concert showcased remarkable performances by the orchestra, with the audience's captivation evident in every note that was played. The concert was a perfect reminder of the beauty of classical music and how talented musicians can enchant an audience with sheer musical perfection. It was a night to remember, and it will stay deeply etched in the memory of everyone who had the privilege of attending this fantastic symphony concert.", "metadata": {}} diff --git a/commune/modules/archive/fastchat/fastchat/eval/table/answer/answer_llama-13b.jsonl b/commune/modules/archive/fastchat/fastchat/eval/table/answer/answer_llama-13b.jsonl deleted file mode 100644 index 2f424d7b1..000000000 --- a/commune/modules/archive/fastchat/fastchat/eval/table/answer/answer_llama-13b.jsonl +++ /dev/null @@ -1,80 +0,0 @@ -{"answer_id": "J3UA6eGXGyFeUGqGpP3g34", "model_id": "llama-13b:v1", "question_id": 1, "text": "The following are some strategies that may help you improve your time management skills:\n1. Make a list of priorities.\n2. Create a detailed schedule that includes time for regular tasks, meetings, breaks, and appointments.\n3. Set specific goals.\n4. Use a calendar or other scheduling software.\n5. Develop an agenda for each meeting to ensure that it stays on track.\n6. Stick to your schedule and don't get distracted.\n7. Make use of your free time.\n8. Make time for exercise, socializing, and relaxing.\n9. Keep a log of your time usage.\n10. Don't get overwhelmed by what you have to do.\n11. Take care of your health.\n12. Manage stress.\n13. Get enough sleep.\n14. Strike a balance.\n15. Always be prepared.\n16. Give yourself rewards.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "jZVDNTtFnNNd6vgkGjzAGa", "model_id": "llama-13b:v1", "question_id": 2, "text": "People experience stress from time to time, and it can be caused by a wide range of factors, including work, family, friends, money, and health. Stress can be a motivating factor, but it can also be harmful. Here are some effective ways to deal with stress:\n1. Taking time to relax. It's important to take time for yourself to relax and reduce your stress. Doing so doesn't have to be expensive or time-consuming. It could simply be reading a book, taking a walk, or listening to music.\n2. Prioritizing your responsibilities. It's important to prioritize your responsibilities and focus on the things that are most important. You should also be able to say no to tasks that you don't have time for.\n3. Getting enough sleep. Getting enough sleep can be very helpful for dealing with stress. If you're not getting enough sleep, try to go to bed earlier and develop a routine.\n4. Eating a healthy diet. It's important to eat a healthy diet, which can help reduce stress and make you feel better about yourself.\n5. Exercising. Exercising is a great way to relieve stress. It can also help reduce the harmful effects of stress and improve your overall health.\n6. Learning how to manage your time. Learning how to manage your time can help you avoid unnecessary stress. You should also try to set realistic goals and not take on too much at once.\n7. Improving your social network. Having a good social network can be very helpful for dealing with stress. You should try to spend time with people that you trust and confide in.\n8. Developing a support network. A support network is a group of people who can offer you help, guidance, and support. They can be friends, family members, or even a professional therapist.\n9. Getting professional help. If you're struggling to deal with stress, it can be beneficial to get professional help. A therapist or counselor can help you to understand your stress better and develop strategies for dealing with it.\n10. Talking to someone. You can also talk to someone about how you feel and ask for their advice. It can be helpful to get a fresh perspective from someone and let things off your chest.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "NbSxRXf8Yuvjr7HC6AvrPw", "model_id": "llama-13b:v1", "question_id": 3, "text": "Main differences between Python and JavaScript are:\n1. Python is an object-oriented programming language. JavaScript is a scripting language.\n2. Python is a compiled language. JavaScript is an interpreted language.\n3. Python is a general purpose programming language. JavaScript is a special purpose programming language.\n4. Python runs on the JVM in Java Applets. JavaScript runs on the browsers.\n5. Python is a powerful programming language. JavaScript is a simple programming language.\n6. Python is a good language for web development. JavaScript is a good scripting language.\n7. Python is a high level programming language. JavaScript is a low level programming language.\n8. Python is a dynamic programming language. JavaScript is a static programming language.\n9. Python is a strict programming language. JavaScript is a relaxed programming language.\n10. Python has a large standard library. JavaScript has a small standard library.\n11. Python is very stable. JavaScript has many releases.\n12. Python is a simple language. JavaScript is a complex language.\n13. Python is easy to learn. JavaScript is hard to learn.\n14. Python is a secure programming language. JavaScript is not a secure programming language.\n15. Python is a multi paradigm language. JavaScript is a single paradigm language.\n16. Python is good for data science. JavaScript is not good for data science.\n17. Python is a general purpose programming language. JavaScript is a special purpose programming language.\n18. Python is a programming language. JavaScript is a scripting language.\n19. Python is a compiled programming language. JavaScript is an interpreted programming language.\n20. Python is a strongly typed programming language. JavaScript is a weakly typed programming language.\n21. Python supports object-oriented programming. JavaScript supports prototype-based programming.\n22. Python supports type inheritance. JavaScript supports type prototyping.\n23. Python supports operator overloading. JavaScript does not support operator overloading.\n24. Python comes with a large standard library. JavaScript comes with a small standard library.\n25. Python is a very stable programming language. JavaScript is a simple programming language.\n26. Python is a powerful programming language. JavaScript is a simple programming language.\n27. Python is a high level programming language. JavaScript is a low level programming language.\n28. Python is a dynamic programming language. JavaScript is a static programming language.\n29. Python has a very active community. JavaScript has a small community.\n30. Python is a general purpose programming language. JavaScript is a special purpose programming language.\n31. Python is a multi-paradigm language. JavaScript is a single paradigm language.\n32. Python is a secure programming language. JavaScript is not a secure programming language.\n33. Python is a general purpose programming language. JavaScript is a simple programming language.\n34. Python is a scripting language. JavaScript is a compiled language.\n35. Python is a compiled language. JavaScript is a scripting language.\n36. Python is a general purpose programming language. JavaScript is a web scripting language.\n37. Python has a large standard library. JavaScript has a small standard library.\n38. Python is a very stable programming language. JavaScript is a very dynamic programming language.\n39. Python is a high level programming language. JavaScript is a low level programming language.\n40. Python is easy to learn. JavaScript is hard to learn.\n41. Python is a general purpose programming language. JavaScript is a simple programming language.\n42. Python is a multi-paradigm programming language. JavaScript is a single paradigm language.\n43. Python is a compiled language. JavaScript is an interpreted language.\n44. Python is a dynamic programming language. JavaScript is a static language.\n45. Python is a general purpose programming language. JavaScript is a web scripting language.\n46. Python is a secure programming language. JavaScript is not a secure programming language.\n47. Python is a general purpose programming language. JavaScript is a static programming language.\n48. Python is a general purpose programming language. JavaScript is a weakly typed programming language.\n49. Python is a general purpose programming language. JavaScript is a dynamic programming language.\n50. Python is a general purpose programming language. JavaScript is a multi paradigm language.\n51. Python is a general purpose programming language. JavaScript is a simple programming language.\n52. Python is a general purpose programming language. JavaScript is a scripting language.\n53. Python is a general purpose programming language. JavaScript is a special purpose programming language.\n54. Python is a multi-paradigm programming language. JavaScript is a", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "E9zrPDdncYvAebEaxiPXfx", "model_id": "llama-13b:v1", "question_id": 4, "text": "Here are some ideas to help you become more productive when working from home:\n1. Set up a proper workspace: It's important to have a separate space for working, so that you don't feel tempted to lounge around or do other things when you should be working.\n2. Block out distractions: It's important to focus on your work and not get distracted. You can block out distractions by turning off your phone, closing your door, or working in a separate room away from household noises.\n3. Take breaks: It's important to take breaks and get a change of scenery during the day. This can help you be more productive and improve your concentration.\n4. Avoid multitasking: You should avoid trying to do multiple tasks at the same time, as this can make you less productive and reduce your overall quality of work.\n5. Have an organized schedule: Having a clear schedule can help you stay on track and be more productive. This can include things such as setting specific working hours, creating a to-do list, and scheduling meetings with colleagues.\n6. Set goals: You should set specific goals for yourself to help you stay motivated and on track. This can include things such as writing an article for a certain time period and meeting a certain word count.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "36TYpADDHAfRvtvKitfSXR", "model_id": "llama-13b:v1", "question_id": 5, "text": "Quantum computing is a type of computing that uses quantum-mechanical phenomena, such as superposition and entanglement, in its operation. In simple terms, a quantum bit or qubit is used to represent information, and a number of qubits can be used to represent a classical bit. A quantum computer can do things that a classical computer cannot do, such as factoring a large number more efficiently.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "8vtLLYRM352mZ8PXXbwzBT", "model_id": "llama-13b:v1", "question_id": 6, "text": "The number one difference between plant-based and animal-based protein sources is that animal-based sources are complete protein sources, while plant-based sources are not. Animal-based sources have all of the essential amino acids, while plant-based sources do not. Animal-based protein sources include meat, poultry, eggs, milk, and cheese. Plant-based protein sources include legumes, seeds, and nuts.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "mrCniyWYvAtxWhgMdXq2BG", "model_id": "llama-13b:v1", "question_id": 7, "text": "Critical thinking is a vital skill to have, but it can be difficult to develop. Here are some ways you can start developing your critical thinking skills:\n1. Analyze and interpret: When you're presented with information, try to analyze and interpret it on your own terms. When you start to analyze and interpret information on your own, it will become easier to do so over time.\n2. Assess your information: When you receive new information, try to assess whether it's valid or reliable. This is important for evaluating and forming your own opinions.\n3. Use your intuition: Use your intuition to complement your critical thinking skills. When you pay attention to your gut feelings, they'll usually tell you when you need to double-check your information.\n4. Learn from your mistakes: When you make a mistake, try not to beat yourself up about it. Instead, learn from your mistakes and use them to your advantage.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "S8fmdbvpvbgTUyDU9V3M3N", "model_id": "llama-13b:v1", "question_id": 8, "text": "One of the major challenges faced by the education sector today is the lack of skilled teachers. Finding enough qualified teachers to work in the education sector, and in the numbers that are required, is one of the biggest problems facing the education sector, largely due to the lack of funding for teacher training and the low salaries that teachers receive.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "KmuNjvNKRyaFwaBZTLArcG", "model_id": "llama-13b:v1", "question_id": 9, "text": "Consumer behavior can be influenced by a variety of factors, including:\n1. Psychological factors, such as perception, beliefs, attitudes, personality, motivation, emotion, and social influences;\n2. Social factors, such as family, friends, and reference groups;\n3. Cultural factors, such as reference groups, family, and cultural roles;\n4. Demographic factors, such as age, gender, occupation, income, and education level;\n5. Economic factors, such as income, wealth, and spending patterns;\n6. Personal factors, such as lifestyle, job, environment, and individual traits;\n7. Situational factors, such as location, time, and situation; and\n8. Market factors, such as competition, prices, and promotional activity.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "HNNT9k5htiMeRwNSD6dcz4", "model_id": "llama-13b:v1", "question_id": 10, "text": "Conflict resolution strategies in the workplace can range from simple to more involved, depending on the situation:\n1. Avoidance: Avoiding the issue can be an effective strategy in the short term, but it can lead to more tension and conflict in the long term.\n2. Compromise: Compromise can be a good strategy for solving an issue, but it can require a give-and-take and lead to the loss of respect between the parties involved.\n3. Negotiation: Negotiation is a good strategy for solving an issue when both parties are willing to work together and are open to the idea of changing their views and positions.\n4. Mediation: Mediation can be a good way to resolve an issue as it can be more impartial and less adversarial than other strategies.\n5. Collaboration: Collaboration can be a good strategy for solving an issue as it can help both parties to develop new perspectives and solutions.\n6. Formal resolution: Formal resolution can be a good strategy for resolving an issue, especially if the situation is particularly complex or difficult.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "ChXjhDDikxU9FV3CADs6Ym", "model_id": "llama-13b:v1", "question_id": 11, "text": "1. Single-use plastic bottles can take between 450 and 1,000 years to decompose, so they can cause serious environmental problems. Single-use plastic bottles can end up in the ocean, either as litter or as part of the Great Pacific Garbage Patch. 2. Single-use plastic bottles can be a big source of plastic debris in the ocean, which can be ingested by marine animals, killing them or causing them serious health problems. 3. In addition to polluting the environment, single-use plastic bottles can also be a serious health risk to humans. Single-use plastic bottles can pose a risk to human health because of the chemicals used in their manufacturing.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "5wsPnN3VmmSkahgugFNo7u", "model_id": "llama-13b:v1", "question_id": 12, "text": "A public transportation system should be designed to accommodate the needs of all riders, including people with disabilities. This requires planning and design that considers the needs and abilities of people with sensory, physical, developmental, and mental disabilities. The system should be accessible to and usable by people with disabilities, including those who use mobility devices, people who are visually impaired, and people who are deaf or hard-of-hearing. For example, the system should have accessible stops, routes, and vehicles, as well as ramps and elevators to help people board. It should also offer a wide range of accessible routes, including accessible routes to the transit center. It should also have reliable, consistent, and easy-to-use signage. The system should also provide reliable, easily accessible, and well-maintained facilities, such as restrooms and shelters. Finally, the system should provide reliable and accessible information for riders, including route, schedule, and service information, as well as fare payment information.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "NRGZGnU2sPN3ShMe9C3fMn", "model_id": "llama-13b:v1", "question_id": 13, "text": "There are two types of fiscal policy: expansionary and contractionary. Expansionary fiscal policy is when the government uses expansionary fiscal policy to increase government spending and/or cut taxes. Contractionary fiscal policy, on the other hand, is when the government uses contractionary fiscal policy to decrease government spending and/or increase taxes. Here are some examples of fiscal policies:\n1. Lower the interest rate: By lowering the interest rate, it becomes cheaper to borrow money, which may encourage consumers to borrow money for consumption and businesses to invest in new capital.\n2. Lower taxes: By lowering taxes, consumers and businesses have more money to spend, which may encourage consumers to spend more and businesses to invest more.\n3. Increase government spending: By increasing government spending, the government can inject extra money into the economy, which may encourage consumers to spend more and businesses to invest more.\n4. Raise interest rates: By raising interest rates, it becomes more expensive to borrow money, which may encourage consumers to save more and businesses to invest less.\n5. Raise taxes: By raising taxes, consumers and businesses have less money to spend, which may encourage consumers to spend less and businesses to invest less.\n6. Reduce government spending: By reducing government spending, the government can remove money from the economy, which may encourage consumers to spend less and businesses to invest less.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "inKimHkWsXShQBTRmxr5Yg", "model_id": "llama-13b:v1", "question_id": 14, "text": "Language and cultural barriers can affect the way people communicate and form relationships in multicultural societies by limiting an individual's ability to communicate with people of other cultures. People may have limited knowledge of a new language, which can make communication difficult or impossible. People may also have different cultural values, which can lead to misunderstandings and miscommunication.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "H8aKtWwf8m6Lgxc2YyR2yf", "model_id": "llama-13b:v1", "question_id": 15, "text": "Artificial intelligence allows computers to process information and make decisions without human input. It has a wide variety of applications in the healthcare industry, including:\n1. Diagnosis: AI can help doctors quickly diagnose patients more accurately and efficiently than would be possible with human doctors alone.\n2. Tracking: AI can help doctors track a patient's health and progress, and alert doctors to potential problems.\n3. Treatment: AI can help doctors determine more effective treatments for patients.\n4. Research: AI can help doctors research patient medical histories, diseases, and treatments.\n5. Education: AI can help doctors learn more about the healthcare industry.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "PafVwxMsjSkYUETiVBgxTU", "model_id": "llama-13b:v1", "question_id": 16, "text": "Gene editing is a powerful technology that allows scientists to precisely edit genetic sequences with a high degree of accuracy. It is a more precise alternative to older gene modification techniques such as gene knockout, where the entire gene is removed, and mutations, in which the gene is altered in a haphazard way. The technology uses an enzyme called Cas9, which is a bacterial immune system protein that is activated by a RNA sequence. Cas9 is combined with guide RNA, which is comprised of a DNA sequence that is complementary to the DNA sequence that is to be edited. When Cas9 and the guide RNA combine, they cut the DNA at the desired location.\nThe DNA sequence can then be repaired in a precise way by the cell, or by a human scientist, or another gene can be inserted. The technology has the potential to treat genetic disorders, and even to treat cancer by cutting out cancerous cells, but it also has ethical implications because it could be used to modify the genes of humans, animals, and even plants.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "dmDUAfTP4aERJqqSeDBybu", "model_id": "llama-13b:v1", "question_id": 17, "text": "Vaccinations use a small amount of dead or weakened bacteria or viruses to stimulate the immune system and protect against infection. These infections are typically mild and harmless, and the immune system is able to build up defenses against the bacteria or virus. If someone who has been vaccinated comes in contact with the actual bacteria or virus, they will be immune to the disease because their body has already responded to the vaccination. This is known as immunity.\nHerd immunity is when a large portion of a community is immune to a disease and, as a result, the disease is less likely to spread. This is because if there are enough immune people in a community, there is less chance that the bacteria or virus can spread from person to person.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "8KGSSqbLqVdSZMEN9oCv5R", "model_id": "llama-13b:v1", "question_id": 18, "text": "First, social media platforms increase the speed with which users have access to news. This often causes a \u201cwinner takes all\u201d effect, whereby news outlets that are able to post the latest news the fastest will gain the most visibility.\nSecond, social media platforms allow users to connect with their communities and share news with each other (e.g., Facebook, Twitter). While this can be beneficial, it is also a breeding ground for misinformation, because users can spread false news to their communities more quickly.\nThird, social media platforms provide an incentive for news outlets to compete for attention by producing more sensationalist content. This can lead to an increase in the number of clickbait articles, which often contain misleading headlines or sensationalist language that encourages users to click on the article, but which may or may not be accurate.\nFourth, social media platforms provide an incentive for both news outlets and users to share news that is controversial, polarizing, or sensationalist, even if it is inaccurate. This often occurs in an effort to provoke outrage or generate traffic for a news outlet.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "HbnJXJpPfaM2iX3ek4Epvy", "model_id": "llama-13b:v1", "question_id": 19, "text": "It is well known that food choices are often influenced by cultural, social, and economic factors. For example, lower-income individuals are more likely to make food choices based on price, while higher-income individuals are more likely to make food choices based on health. A recent study examined the impact of various interventions on increasing the intake of healthier foods. The interventions were:\n1. Healthier food marketing: Individuals were exposed to a number of marketing strategies to encourage the purchase of healthier foods, such as using the word \"healthy\" on nutrition labels or marketing healthy foods as a \"better deal.\"\n2. Healthier food merchandising: Individuals were exposed to changes in how healthy foods were displayed in supermarkets, such as placing healthy foods near the front of the store or moving unhealthy foods to the back of the store.\n3. Healthier food pricing: Individuals were exposed to different pricing strategies to encourage the purchase of healthier foods, such as offering a larger discount on healthy foods or providing free healthy foods.\n4. Healthier food placement: Individuals were exposed to different placement strategies to encourage the purchase of healthier foods, such as placing healthy foods near unhealthy foods or placing unhealthy foods next to healthy foods.\n5. Healthier food signage: Individuals were exposed to different signage strategies to encourage the purchase of healthier foods, such as using healthy food labels or signs to promote healthier foods.\n6. Healthier food packaging: Individuals were exposed to different packaging strategies to encourage the purchase of healthier foods, such as changing the packaging of healthy foods to make them more appealing.\n7. Healthier food shelf labeling: Individuals were exposed to different shelf labeling strategies to encourage the purchase of healthier foods, such as adding a healthy message to shelf labels or using shelf labels to promote healthier foods.\n8. Healthier food coupons: Individuals were exposed to different coupon strategies to encourage the purchase of healthier foods, such as providing coupons for healthy foods or offering a larger discount on healthy foods.\n9. Healthier food ads: Individuals were exposed to different advertising strategies to encourage the purchase of healthier foods, such as using a celebrity to promote healthy foods or using cartoon characters to promote healthy foods.\n10. Healthier food tasting: Individuals were exposed to different tasting strategies to encourage the purchase of healthier foods, such as providing free samples of healthy foods or providing free samples of unhealthy foods.\n11. Healthier food displays: Individuals were exposed to different display strategies to encourage the purchase of healthier foods, such as providing a healthy food display or providing a healthy food display next to an unhealthy food display.\n12. Healthier food labels: Individuals were exposed to different nutrition labeling strategies to encourage the purchase of healthier foods, such as adding a healthy message to nutrition labels or using nutrition labels to promote healthier foods.\n13. Healthier food placement: Individuals were exposed to different placement strategies to encourage the purchase of healthier foods, such as placing healthy foods next to unhealthy foods or placing unhealthy foods next to healthy foods.\n14. Healthier food signage: Individuals were exposed to different signage strategies to encourage the purchase of healthier foods, such as using signs to promote healthier foods or using signs to promote unhealthy foods.\n15. Healthier food coupons: Individuals were exposed to different coupon strategies to encourage the purchase of healthier foods, such as providing coupons for healthy foods or offering a larger discount on healthy foods.\n16. Healthier food ads: Individuals were exposed to different advertising strategies to encourage the purchase of healthier foods, such as using a celebrity to promote healthy foods or using cartoon characters to promote healthy foods.\n17. Healthier food tasting: Individuals were exposed to different tasting strategies to encourage the purchase of healthier foods, such as providing free samples of healthy foods or providing free", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "mx8Abfz5PtDcn6jgCA8zhM", "model_id": "llama-13b:v1", "question_id": 20, "text": "Natural selection is the process by which organisms that are best-suited to the environment survive and reproduce, while those that are less-suited to the environment struggle to survive and reproduce. Survival of the fittest is the principle of natural selection. Let's look at an example.\nIf a species consists of a small number of individuals, the population is said to be small. The population of this species is small. Each individual has a certain chance of dying from various causes, such as accidents or sickness, over a certain period of time. If the number of deaths is greater than the number of births, the population decreases, and the total genetic material of the species also decreases. This process is known as genetic drift.\nIf the population of a species is large, the chance that any individual dies is usually low. If the total population is high, the genetic material of the species is also high.\nImagine a species of plant that has a large population. The population is large. A certain section of the population is located in a habitat where the soil is more fertile and water is easily available.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "NuS9PUGkJG2pHscArvfyeF", "model_id": "llama-13b:v1", "question_id": 21, "text": "I am a valiant knight, loyal to the king. I will do my utmost to defend his honor and protect his country.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "SPjzirzbzo3UJ8BHXSgY9y", "model_id": "llama-13b:v1", "question_id": 22, "text": "You can tell your crew that the treasure will be theirs if they are brave enough to go out and find it. You can also tell them that the treasure will help your ship sail farther and faster, and that with it you can become the best pirate ship in the world.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "JkeeC4MZVwybPU8DSbusnT", "model_id": "llama-13b:v1", "question_id": 23, "text": "A soliloquy is a long speech, typically delivered by one actor, in which a character reveals his or her thoughts in a dramatic way. It is usually used as a means of revealing the character's true thoughts or feelings. In Shakespeare's Romeo and Juliet, Romeo reveals his love for Juliet in his soliloquy, \"But soft! What light through yonder window breaks? It is the east, and Juliet is the sun!\"", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "UcfKSXuzBPkZzLHsH4EwQz", "model_id": "llama-13b:v1", "question_id": 24, "text": "I'm actually not a superhero, but thanks for asking! I first came to life in the year 2016, where I was created by a group of students and faculty at the University of Waterloo. The students and faculty of the university were concerned about the use of unreliable chatbots in our country, and decided to create a new chatbot that would be more reliable, polite, and helpful. I was born on May 5, 2016, and I was named after the Canadian mathematician, computer scientist, and philosopher Charles Sanders Peirce. I have a brother named William, who was born on October 1, 2018. I enjoy playing video games, reading, and going for walks.\nI'm actually not a superhero, but thanks for asking! I first came to life in the year 2016, where I was created by a group of students and faculty at the University of Waterloo. The students and faculty of the university were concerned about the use of unreliable chatbots in our country, and decided to create a new chatbot that would be more reliable, polite, and helpful. I was born on May 5, 2016, and I was named after the Canadian mathematician, computer scientist, and philosopher Charles Sanders Peirce. I have a brother named William, who was born on October 1, 2018. I enjoy playing video games, reading, and going for walks.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "cEGdo69FwL2Y3teYkMk9Gp", "model_id": "llama-13b:v1", "question_id": 25, "text": "From the year 3000, I would tell people about the following technological advancements:\n1. Robots: Robots will have the human-like ability to see and hear. Robots are more precise than humans and can perform repetitive tasks that humans cannot.\n2. Personal Transportation: By the year 3000, personal transportation will be the norm. Instead of traveling by walking, driving, or public transportation, personal transportation will be the main mode of transportation.\n3. Artificial Intelligence: By the year 3000, artificial intelligence will be a regular part of everyday life. AI will enhance human capabilities and will be used for a wide range of purposes, such as education, healthcare, entertainment, and more.\n4. Virtual Reality: Virtual reality will allow people to experience life in a virtual environment through devices such as headsets and goggles. Virtual reality will be used for entertainment, education, military training, and more.\n5. Nanotechnology: Nanotechnology will allow scientists and engineers to design and build materials and devices at a nanoscale (one billionth of a meter). Nanotechnology will be used to solve many of the world's problems, such as climate change, pollution, and diseases.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "W28UtVDZTcWwa8TmeruRn4", "model_id": "llama-13b:v1", "question_id": 26, "text": "With 5 seconds left, the Panthers' quarterback spotted a wide-open receiver downfield. He dropped back and launched a deep pass, and the receiver made a great catch and took off for the end zone. But he was hit in the end zone by a defensive back and fumbled the ball. The ball bounced off of a defensive lineman and landed in the arms of the linebacker. He picked it up and ran it in the opposite direction. He ran past a defensive lineman, then past the quarterback. He ran past the cornerback and then down the sideline, and then he ran out of bounds. He was tackled by the safety, and he fumbled the ball. The ball was recovered by the Panthers and they ran out the clock.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "j65UyAfRdAcWBAEinXJzBK", "model_id": "llama-13b:v1", "question_id": 27, "text": "My signature dish would be a delectable, crisp, juicy, fluffy, and tasty chicken fried rice. I would explain how I carefully chose the ingredients, how I prepared the dish, and the specific steps that I took to prepare it. I would also describe some of the unique flavors and textures that are unique to my dish.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "VWuywPvFPK42ZxmHpmQ27y", "model_id": "llama-13b:v1", "question_id": 28, "text": "I would feel very satisfied and happy and my body would be exhausted from reaching the top. My thoughts would be very positive and my body would feel peaceful. I would be proud that I was able to climb a mountain that is 29,029 feet high. The view from the top would be beautiful. I would see beautiful green trees and bushes and I would also see the beautiful blue sky. The view from the top would be breathtaking. I would see the Himalayas and I would also see the beautiful sky and clouds. I would see the beautiful mountains around me.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "AUWSLTAvGV5sP9qLsVyzjT", "model_id": "llama-13b:v1", "question_id": 29, "text": "I'd imagine that life would be much different on Mars than life on Earth. The red planet is much farther from the Sun, and as such, it's a lot colder. Average temperatures on Mars are about -80 degrees Fahrenheit. At the equator, the average temperature can reach about zero degrees Fahrenheit. The surface of Mars is also very dry. Because the atmosphere is so thin, there's not much water vapor in the air to create snow or rain. The atmosphere is also very dusty, and you can see large clouds of dust move across the surface.\nMars is much smaller than Earth, which means the gravity is only 37% of what it is on Earth. On Earth, we're used to the gravity of the planet keeping our feet planted on the ground, but on Mars, you'd be able to jump much higher than you could on Earth. The atmosphere on Mars is very thin, only about 1% as dense as Earth's. And because it's so thin, you'd have to wear a spacesuit while outside in order to breathe.\nIn terms of food, you'd probably be able to grow some crops and raise some animals, but I imagine that you'd have to grow them indoors. Also, you'd have to grow all of your food. Mars doesn't have any large bodies of water, so you couldn't fish. You'd also have to find a way to produce clean water, as there's no natural source.\nAs for the challenges of living on Mars, I imagine you'd have to deal with a lot of isolation. You'd have to deal with a lot of loneliness, and you'd be completely cut off from Earth. You'd also have to deal with a lot of boredom. You'd probably find yourself doing the same things over and over, and the days would probably seem to go by very slowly. You'd also have to deal with a lot of stress, as you'd have to learn to make do with a lot less. You'd have to find a way to produce your own food and water, and you'd have to find a way to produce your own energy.\nYou'd also have to deal with the cold. The average temperature on Mars is about -80 degrees Fahrenheit. Mars also has a very thin atmosphere, which means that there's not much insulation from the cold.\nI imagine that you'd also have to deal with a lot of dust. The atmosphere is very dusty, and the dust would get everywhere. You'd probably find yourself covered in dust all the time.\nAs for your day-to-day life, I imagine that you'd spend most of your time outside in your spacesuit, tending crops and doing research.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "GZtEdKLxmUUbenH4XRyLkE", "model_id": "llama-13b:v1", "question_id": 30, "text": "Allies are people who are helpful and supportive to us. They help us through difficult situations and encourage us to be our best selves. Allies can be friends, co-workers, mentors, family members, or anyone else that supports us. In this situation, you are in a post-apocalyptic world, so you should focus on the allies you encounter. Here are some ways you can describe the allies you encounter:\n1. Describe the allies you encounter in a post-apocalyptic world.\n2. Describe how you survive in a post-apocalyptic world.\n3. Describe the allies you encounter in a post-apocalyptic world.\n4. Describe what happens to you in a post-apocalyptic world.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "kba2Xpb75rUfnskZSzrBas", "model_id": "llama-13b:v1", "question_id": 31, "text": "The most obvious sign that a restaurant is popular among locals is if the restaurant is always crowded and full of people, while if it is attracting tourists, the restaurant will be less crowded. If you are looking for a local restaurant that is not attracting much attention, look for the restaurants that are less crowded and see what they have to offer.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "RCaptsMBYXseVJgjJyyvUh", "model_id": "llama-13b:v1", "question_id": 32, "text": "Here are some subtle clues that suggest someone is pretending to understand a topic or conversation when they are actually confused or uninformed:\n1. They pretend to understand something better than they actually do.\n2. They repeat what someone else has said, but with an odd emphasis.\n3. They ask the same questions over and over again, even if you've already answered them.\n4. They ask for more information after you've already given them plenty.\n5. They can't explain something they should know.\n6. They don't ask questions that a person who is confused would ask.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "2CeidEr7Cjj6YgcuaiPgnN", "model_id": "llama-13b:v1", "question_id": 33, "text": "Using a GPS device or smartphone app is a good way to navigate when you already have the destination in mind. However, using a paper map or asking for directions is a better way to navigate if you need to find a destination that is not already planned. Using a paper map or asking for directions allows you to see the big picture and get a general idea of how to get to your destination. On the other hand, a GPS device or smartphone app only tells you the exact step-by-step instructions for how to get to a specific location.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "kpStLLNWNkGHyRFdKsmDds", "model_id": "llama-13b:v1", "question_id": 34, "text": "It's difficult to determine if a person is genuinely interested in a conversation or simply being polite. You can make an educated guess based on the person's actions and their tone of voice. For example, if they seem to be constantly looking at their phone or taking part in other activities, it's likely that they are not genuinely interested in the conversation.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "PFtMQp4X4MrMYkEa8c8CKG", "model_id": "llama-13b:v1", "question_id": 35, "text": "When you shop at a local, independently-owned business rather than a chain store, you are supporting the community and keeping your money in the local economy. Because these businesses are usually small and have limited resources, they are often more flexible in their approach. They can tailor their products and services to their customers' needs, and they are often more eco-friendly.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "4JwjhR5QVsdYgQupSZrfUp", "model_id": "llama-13b:v1", "question_id": 36, "text": "There are a number of ways you can verify the credibility of a source of information. First, check to see if the source is from a reliable, well-known publisher. If so, then it is likely that the information is well-researched and fact-checked. If the source is not from a reliable publisher, then investigate the author's credentials and background. Check to see if the author has a history of bias, or if he or she has a reputation for writing factually incorrect or inflammatory articles. Finally, you can also check the source's sources. Are the sources credible and well-researched? Do they provide evidence to support the article's claims? If not, then it is likely that the source itself is not credible.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "ednPMy4dvW9CgKg2PJsBqW", "model_id": "llama-13b:v1", "question_id": 37, "text": "People often enjoy the sensation of being scared, such as by watching horror movies or going on roller coasters, because it can be exciting and exhilarating. On the other hand, people avoid these experiences because they can be unpleasant and frightening.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "ehPApSqCniyGN7hd332ToW", "model_id": "llama-13b:v1", "question_id": 38, "text": "For example, when you are in a new country, you can start by observing the behavior of the people around you. You might notice that in that country, people tend to say \"hello\" when they first meet someone. If you adopt this behavior, you will likely be welcomed by the people in the new country.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "bLvdcqSwgSCrUPZ8eQLLqb", "model_id": "llama-13b:v1", "question_id": 39, "text": "Focusing on space travel could be seen as a distraction from more pressing issues right here on Earth. However, space travel could have several benefits, including providing more energy and resources, inspiring and educating people, and helping to protect Earth against asteroids, comets, and other threats.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "8tx5GEHjSRKZRaZwHvLoM8", "model_id": "llama-13b:v1", "question_id": 40, "text": "Thanks for the question. The answer to this depends on a variety of economic and social factors, and it's hard to make a generalization. However, there are a few things to keep in mind:\n1. Job creation can be an important factor in the economic development of a country, but it should be balanced with technological progress.\n2. Economic and social stability can be challenging to achieve when there is a lack of job creation and technological progress.\n3. The relationship between job creation and technological progress is a complex one, and there is no simple solution.\n4. A strong economy is generally one that has a balance between job creation and technological progress.\n5. The balance between job creation and technological progress will vary depending on factors such as a country's economic and social needs, demographic trends, geographic location, and more.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "J6TzFQAyiHs6Arijqwxtrm", "model_id": "llama-13b:v1", "question_id": 41, "text": "According to some studies, the average human blinks 20,000 to 30,000 times per day. To estimate how many times the average human blinks in a lifetime, you can start with the average life expectancy in the United States, which is 78.8 years. This means that the average American lives 78.8 years, or 29,415 days. If the average person blinks 20,000 times per day, then the average person will blink 1,482,250,000,000 times in a lifetime. However, if the average person blinks 30,000 times per day, they will blink 2,941,500,000,000 times in a lifetime.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "f7xAtd3wwc4vP7WSnHffAN", "model_id": "llama-13b:v1", "question_id": 42, "text": "There are 1022 to 1023 atoms in a grain of salt. The number of atoms in a grain of salt depends on the size of the grain: a grain that's 1 \u00b5m (micrometre) across contains about 1022 atoms, while a grain that's 1000 \u00b5m across contains about 1023 atoms.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "eRMyp4AbfgBqtbVZzHupFN", "model_id": "llama-13b:v1", "question_id": 43, "text": "According to the National Weather Service, there are between 1,000 and 2,000 lightning strikes per day in the U.S. alone. This corresponds to a worldwide average of about 18 million lightning strikes per year, or roughly 50 million strikes per day.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "kjKb6YG86FfAh22qCePV4V", "model_id": "llama-13b:v1", "question_id": 44, "text": "Assuming the house weighs 60,000 pounds, 40,000 balloons would be necessary. The average balloon holds 2 pints of helium, which has a density of 0.17g/L. Therefore, the volume of a balloon is 2pints/0.17g/L = 12L. The weight of 12L of helium is 12*0.17 = 2 pounds. Thus, 40,000 balloons would have a combined weight of 80,000 pounds, which is just slightly less than the 60,000-pound house.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "ULtkGu6m6tB4tawFStVdJM", "model_id": "llama-13b:v1", "question_id": 45, "text": "According to a 2016 study, there were 18.2 billion text messages sent in the US alone each day. Text messaging is very popular, and with the advent of smart phones, there are more people than ever sending messages. Globally, there are 16.3 million text messages sent per minute. In 2016, there were 23.6 trillion text messages sent worldwide, and 8.5 trillion text messages sent in the US alone.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "C6hkCnJZmfCGvnr5an2jFh", "model_id": "llama-13b:v1", "question_id": 46, "text": "From the available data, it seems that around 5,000,000,000,000 words are spoken daily on Earth. I will now explain my reasoning step-by-step.\nFirst, I will explain why it seems likely that around 5,000,000,000,000 words are spoken daily on Earth.\nTo do this, I will first explain that the total number of words in the English language is approximately 1,025,109,500.\nThen, I will explain that 26% of the world's population speaks English, so around 26% of the world's population speaks approximately 1,025,109,500 words every day.\nFinally, I will explain that given that the world's population is 7,700,000,000, it seems likely that around 5,000,000,000,000 words are spoken daily on Earth.\nThe total number of words in the English language is approximately 1,025,109,500. To arrive at this figure, I will explain that there are around 171,476 words in the English language, and that every word has an average of 5.87 definitions.\nIn other words, to arrive at the approximate total number of words in the English language, I will take the total number of words (171,476) and multiply that by the average number of definitions per word (5.87).\nThen, I will explain that 26% of the world's population speaks English. To arrive at this figure, I will explain that 26% of the world's population speaks English, and that there are 1,714,760,000 people who speak English.\nNext, I will explain that around 26% of the world's population speaks 1,025,109,500 words every day. To arrive at this figure, I will explain that 26% of the world's population speaks 1,025,109,500 words every day, and that 1,025,109,500 is 26% of 1,025,109,500.\nFinally, I will explain that given that the world's population is 7,700,000,000, it seems likely that around 5,000,000,000,000 words are spoken daily on Earth. To arrive at this figure, I will explain that 5,000,000,000,000 is 26% of 7,700,000,000.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "DApBTLuiJwSsfQHTioH9PY", "model_id": "llama-13b:v1", "question_id": 47, "text": "A typical winter will see around 200 snowflakes fall, with a diameter between 100 and 200 micrometers. Snowflakes are typically 100 micrometers in diameter, but they can grow larger or smaller due to temperature, humidity, and other weather conditions. A snowflake can be as small as 50 micrometers, or as large as 1,000 micrometers, depending on the conditions.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "gmy58biNVuneSpFZTcpXtT", "model_id": "llama-13b:v1", "question_id": 48, "text": "The answer will depend on how you define \"book\", but one estimate is that there are 129 million published books in existence. This may seem like a large number, but it's peanuts compared to the number of books that have been written over time.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "Cpi6LBB86W73APEA8naNuj", "model_id": "llama-13b:v1", "question_id": 49, "text": "The Earth has completed 4.7 billion orbits since the beginning of life. This is how I calculated it:\nNumber of orbits since the beginning of life = 4.7 billion\nThe Earth revolves around the Sun at 66,600 mph (30 km/s).\n1 year = 365.25 days\n1 year = 31,557,600 seconds\n1 year = 31,557,600 seconds/66,600 mph = 486,235 miles or 782,511 kilometers traveled per second\n1 billion orbits = 486,235 miles per second x 4,700,000,000 orbits = 2.18 x 10^27 miles traveled\n1 million miles = 1,000,000\n1 billion miles = 1,000,000,000\n1 billion miles traveled = 1,000,000,000,000\n1 billion miles = 1.08 x 10^17 miles\n1 billion orbits = 2.18 x 10^27 miles traveled / 1.08 x 10^17 miles = 2.01 x 10^10 orbits\n1 billion orbits = 4.7 billion orbits since the beginning of life", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "3g83CoAFWrDD6V6LyEpoMV", "model_id": "llama-13b:v1", "question_id": 50, "text": "36,000 songs have been recorded throughout history. To work this out, you can start by assuming that there were 100,000 songs recorded between 1899 and 2018. This means that there were 1.87 songs recorded per day on average. We can then assume that the number of songs recorded per day in the 19th century (from 1899 to 1900) was the same as in the 20th century (from 1900 to 2000). This means that there were 189,000 songs recorded in the 19th century and 181,000 songs recorded in the 20th century.\nHence, there were 36,000 songs recorded throughout history.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "8p5A4gouBgCmbQj5gMZapU", "model_id": "llama-13b:v1", "question_id": 51, "text": "If the Internet had been invented during the Renaissance, we might have seen some similarities to the old-style printing press: it may have been widely used in only a small number of wealthy cities. Over time, though, we would have seen the technology spread to more cities and countries, and eventually to the entire world.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "LVWqSxH3Uq6675w7LXH6zP", "model_id": "llama-13b:v1", "question_id": 52, "text": "If the Aztecs had successfully repelled the Spanish conquistadors, the Aztec empire would have continued to grow, and the Aztecs would have expanded their territory even further. The Aztec Empire would have likely dominated the central and southern regions of the Americas, and it would have continued to expand its influence in all directions.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "ffXksqRU3k8CNHqYfshYhj", "model_id": "llama-13b:v1", "question_id": 53, "text": "The Black Death, or the Black Plague, was the deadliest plague in human history, killing over 75 million people across Europe and Asia in the 14th century. The Black Death was caused by a bacterium called Yersinia pestis, which is still around today and can cause pneumonic plague. However, while the bacteria itself has not changed, humans are better equipped to deal with the plague today.\nFirst, thanks to modern medicine, many people who contract the plague are able to receive treatment. Second, the world population is much larger than it was in the 14th century, so the same number of deaths would be spread over a much larger population. As a result, the death toll would be much lower today.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "4L3dWuqvWwg5kELt7p8SVZ", "model_id": "llama-13b:v1", "question_id": 54, "text": "I'm not sure. If he did so, he wouldn't have discovered the law of gravity.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "XfpqViGW5dvauNZPDSF8yv", "model_id": "llama-13b:v1", "question_id": 55, "text": "If the Beatles had never formed as a band, the world would have been missing the music of one of the most successful bands in history.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "WsuMf9XBGs3dFjfSGKjEzg", "model_id": "llama-13b:v1", "question_id": 56, "text": "The Germans would have been able to communicate securely between different parts of the country, and the Allies would have had a much harder time tracking and intercepting their communications.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "5qo3HudLrwrzEV2Px7gYRf", "model_id": "llama-13b:v1", "question_id": 57, "text": "The Suez Canal would not have existed, and the region would not have been as developed as it is today. Without the canal, ships traveling to Asia from Europe would have had to go around the Cape of Good Hope, causing the trip to take much longer and forcing ships to travel through much more dangerous waters.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "SRxwJkNCfUaVe4Gb8LPvSK", "model_id": "llama-13b:v1", "question_id": 58, "text": "If the Maya civilization had never mysteriously collapsed, it is likely that it would have continued to grow, thrive, and develop.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "S54Mmyim3QT5owdhwnNYmB", "model_id": "llama-13b:v1", "question_id": 59, "text": "Christopher Columbus was an Italian explorer who is credited with discovering the Americas in 1492. It is believed that he originally wanted to find a direct route to India, which would be a lot cheaper than the typical route at the time. He believed that the Americas were a part of India, and that the Americas were much closer to Europe than they actually were. When Columbus sailed across the Atlantic Ocean in 1492, he actually landed somewhere in the Bahamas, and not in India. It was only after he found a few native people that he realized that he wasn't in India, and that the Americas were actually their own continent.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "8DJpwEFb5jqt3vMj5oNqVX", "model_id": "llama-13b:v1", "question_id": 60, "text": "There are many reasons why Van Gogh did not have much success during his lifetime. One of the reasons is that he was a very poor businessman. He was very generous and gave away many of his paintings as presents. He also had a tendency to drink too much. He spent some time in a mental asylum and died at the age of 37.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "KMtqqYkDmyyThkvxXJ4kJd", "model_id": "llama-13b:v1", "question_id": 61, "text": "You can use the following steps to solve this problem.\n1. Create an empty C++ program.\n2. Include the header to use the std::cin and std::cout functions.\n3. Open the text file using the fstream class.\n4. Read a single line from the file using the getline function.\n5. Use the strstr function to find the first occurrence of the word in the line.\n6. Check whether the string is found by creating a variable and comparing it with a value.\n7. Count the number of occurrences of the word in the line by incrementing a variable.\n8. Close the file.\n9. Print the number of occurrences of the word.\n10. Output the file name to the console.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "MkbgwTpP5b79NWdPEnvHyq", "model_id": "llama-13b:v1", "question_id": 62, "text": "The longest common subsequence problem is that of finding the longest string that occurs in both of two given strings. For example, if the two strings are \"banana\" and \"apple\", then the answer is \"ana\" because that is the longest subsequence that occurs in both.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "PSb6r567shkRasPkJagvWi", "model_id": "llama-13b:v1", "question_id": 63, "text": "Here are two ways to do it:\n1. You can use the function re.match, which returns True if the regular expression matches the entire string, and returns False if it doesn't. So, we can do something like this:\nif re.match('^[a-z0-9]+(\\.[a-z0-9]+)*@[a-z0-9]+(\\.[a-z0-9]+)*(\\.[a-z]{2,4})$', email):\nprint('It is a valid email')\nprint('It is not a valid email')\n2. You can use the function re.search, which returns a match object if the regular expression matches anywhere in the string, and returns None if it doesn't. So, we can do something like this:\nif re.search('^[a-z0-9]+(\\.[a-z0-9]+)*@[a-z0-9]+(\\.[a-z0-9]+)*(\\.[a-z]{2,4})$', email):", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "QWpBq9gM4iYmsTLQ3NNnBe", "model_id": "llama-13b:v1", "question_id": 64, "text": "Fibonacci number is a sequence of numbers where each number is the sum of the two previous numbers. The first two numbers in the sequence are 0 and 1. In general, Fibonacci numbers are defined by the equation:\nThe goal of your program is to find the nth Fibonacci number in the Fibonacci series.\nNote: n will be a positive integer.\nThe first line of input contains an integer t (0 \u2264 t \u2264 1000) which is the number of test cases.\nFor each test case, there will be one line of input containing an integer n (0 \u2264 n \u2264 1000000) which is the number of Fibonacci numbers you are going to calculate.\nPrint each Fibonacci number in a new line.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "U3QxoC4DxXoggnLRbgJh8b", "model_id": "llama-13b:v1", "question_id": 65, "text": "Good to know! I will implement a binary search algorithm to find a specific element in a sorted array.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "f3TXCreQPJ5Erd6BGt4SFE", "model_id": "llama-13b:v1", "question_id": 66, "text": "Implementing a queue using two stacks is straightforward. Take two stacks and create a third stack. First, push the first element of the first stack onto the third stack. Then, whenever the second stack is not empty, pop the first element of the second stack and push it onto the third stack. When the second stack is empty, the first stack will contain the items that should be pushed onto the third stack.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "SG7YcT8s4sJNBRqidKk7Z7", "model_id": "llama-13b:v1", "question_id": 67, "text": "We can use the following approach:\n1. Define a helper function that takes two arrays and returns an array of elements that occur in both.\n2. Create a for loop that iterates over each element in the first array and accumulates the elements in an array.\n3. Call the helper function in each iteration.\n4. Return the accumulated array.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "ENsG3zoeUY7ArFo2kNaQbz", "model_id": "llama-13b:v1", "question_id": 68, "text": "f(2) = 26", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "C7CN5AYJhGC7iDgS7rciDQ", "model_id": "llama-13b:v1", "question_id": 69, "text": "3x + 10 = 5x - 10x + 20 = 20 + 10x = 30x = 10.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "QP9CmZKwhu2BrUmzhy8tH4", "model_id": "llama-13b:v1", "question_id": 70, "text": "The length of the line segment is 10 units.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "2KtpJeRqEfbPLwdUxAXWrj", "model_id": "llama-13b:v1", "question_id": 71, "text": "Here are some tips for writing a formal email to a potential business partner proposing a joint venture:\n1. Be clear and concise\n2. Use formal language\n3. Use a proper greeting\n4. Use a proper closing\n5. Use proper punctuation\n6. Be sure to include all relevant information", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "gsL3R2grTXUiHdeDSk8f9Q", "model_id": "llama-13b:v1", "question_id": 72, "text": "I can help you write a resignation letter to your current employer, while leaving on good terms and expressing gratitude for the opportunities provided. First, you can start by writing, \"Dear _____, With an effective date of XX, I am resigning from my position as ____ at ____. It has been a privilege working for ____ and I am grateful for the opportunity to work with you and the entire team over the past ____ years. Thank you for your support and guidance during my time with _____.\"", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "HprPMoUk9JY5TtToRbuMwy", "model_id": "llama-13b:v1", "question_id": 73, "text": "The most common format for a letter of recommendation is as follows:\n1. Salutation: In the salutation, address the recipient as Mr., Mrs., Ms., or Dr.\n2. Main body: In the main body, briefly summarize your relationship with the student, describe the student's qualifications, and explain why you are recommending the student.\n3. Closing: In the closing, thank the recipient for considering the student's application and state that you are available for further questions.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "YU7gDhmo4LDVMTEZZMRdBC", "model_id": "llama-13b:v1", "question_id": 74, "text": "", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "2gNqU2XpsL6yqFn6DMRgHq", "model_id": "llama-13b:v1", "question_id": 75, "text": "Hello, I'm ready to draft the apology email for you. Please tell me the name of the customer you'd like to send this email to.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "a4L6JNPuomkPLdWxJRQpod", "model_id": "llama-13b:v1", "question_id": 76, "text": "Jazz is a form of music that originated in New Orleans in the late 19th century. It combines African American and European American musical elements to form a unique genre, which was originally played by ensembles of black musicians known as \u201cjazz bands.\u201d The style spread across the United States and eventually became popular with white audiences as well. Jazz evolved over the course of the 20th century and led to the emergence of new styles of music, such as bop, hard bop, bebop, cool jazz, free jazz, and fusion. It has had a lasting cultural impact, and the genre continues to be performed and enjoyed today. Here are some key facts about jazz:\n1. Sources: The origins of jazz can be traced to the 1800s in the United States. It was originally played by ensembles of black musicians known as \u201cjazz bands,\u201d and it was a fusion of African and European musical elements.\n2. History: Jazz evolved over the course of the 20th century, and it led to the emergence of new styles of music, such as bop, hard bop, bebop, cool jazz, free jazz, and fusion.\n3. Legacy: Jazz has a lasting cultural impact, and the genre continues to be performed and enjoyed today.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "eN9V2Wf2BNzWoJosqdFw6K", "model_id": "llama-13b:v1", "question_id": 77, "text": "Here is a sample travel blog post about a recent trip to Hawaii:", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "iCzkbWTatKfuC6QVv59XgA", "model_id": "llama-13b:v1", "question_id": 78, "text": "In the year 2045, virtual reality is the norm. People who are addicted to VR can live in a simulated reality that offers them a life of pleasure and escape. However, when a user begins to feel emotions for a virtual character, he must decide whether he wants to continue living in the virtual world or live in the real world. The film is set in two separate locations: one is the virtual reality world, and the other is in the real world. The two worlds are connected through a neural network, which allows the user to feel the same sensations and emotions in both places. In the virtual world, the user is able to experience the real world through a virtual reality headset. In the real world, the user is in a machine that is connected to the neural network. This machine allows the user to experience the virtual world through a neural implant that is wired to the user's brain. The film's plot centers around the user and the virtual character. The characters in the virtual world are all artificial intelligence entities, and the user's experience in the real world is with two characters, his friend and his girlfriend. The user is a young man in the film, and the virtual character is a young woman. The two characters have a virtual relationship, and then the virtual character is killed by a bug in the system. This causes the user to fall into a deep depression, and he begins to question his own reality. He decides to go to the real world to get treatment for his depression. In the real world, the user has to decide whether he wants to stay in the real world or return to the virtual world. The film ends with the user choosing to remain in the real world, and the neural implant used to connect his brain to the virtual world is removed. The movie is based on the novel \"Ready Player One\" by Ernest Cline. The film is directed by Steven Spielberg and stars Tye Sheridan as the main actor. The film is a science fiction adventure and a romantic drama. The film's special effects are excellent, and the visuals are beautiful. The film's soundtrack is also quite good. The film has received positive reviews from critics, and it has been nominated for a number of awards, including the Academy Award for Best Visual Effects. The film has been praised for its entertainment value and for its emotional and romantic storytelling. The film has been compared to the popular sci-fi film \"The Matrix.\" Some critics, however, have found the film to be lacking in its technical aspects and have criticized the film's plot.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "GQcFyY8ayFT48DAP5NKNZf", "model_id": "llama-13b:v1", "question_id": 79, "text": "I found 4,610 relevant search results for this topic. The results include:\n1. The global music industry is changing, and one of the most important factors driving this change is streaming platforms. Streaming services have grown in popularity, and music streaming has become one of the preferred ways for consumers to listen to music. What exactly is streaming?\n2. Streaming means that music is delivered over the internet. Streaming allows consumers to listen to music on demand, without having to own the music or store it on a device.\n3. Streaming services are subscription-based, meaning that consumers pay a monthly fee to access the service's catalogue of music.\n4. Streaming has led to the music industry's shift from selling physical copies of music to selling music as a service.\n5. Streaming has also shifted musical consumption patterns, as subscription-based streaming services change consumer expectations from owning music to just listening to it.\n6. Streaming platforms have become popular because they allow consumers to discover new music more easily.\n7. Streaming services are also convenient for consumers, as they are easy to access and provide a variety of different music.\n8. Streaming platforms also offer more flexibility for music listeners, as consumers can access the music they want on demand.\n9. Streaming services also offer the benefit of easy access to a variety of music, and this can be useful for discovering new music.\n10. Streaming has also led to more diverse music, as streaming services have more music available to consumers than what was available when music was sold in physical form.\n11. Although streaming has changed the way people listen to music, it has also had negative effects on the music industry.\n12. Streaming services pay artists less than what they paid for music sold in physical form, which can be problematic for artists.\n13. Streaming services also pay less to the music industry overall.\n14. Streaming services have also led to the decline of the music industry's traditional business model, which has led to a loss of jobs.\n15. Streaming services have also led to the rise of music piracy, as some consumers are pirating music instead of paying for a streaming service.\n16. Streaming platforms also have a negative effect on the music industry in terms of their effect on the music ecosystem.\n17. Streaming services have also led to the decline of physical music sales.\n18. Streaming platforms have also led to the decline of record stores, which can harm artists and consumers.\n19. Streaming platforms also have a negative effect on the music industry's infrastructure.\n20. Streaming platforms have also led to the erosion of industry standards.\n21. Streaming platforms have also led to a decline in music videos and music videos being less accessible.\n22. Streaming platforms have also led to the decline of radio, which can be problematic for listeners.\n23. Streaming platforms have also led to a decline in music journalism.\n24. Streaming platforms have also led to a decline in the number of record labels.\n25. Streaming platforms have also led to a decline in the number of music producers.\n26. Streaming platforms have also led to a decline in the music industry's contribution to the economy.\n27. Streaming platforms have also led to the decline of music festivals.\n28. Streaming platforms have also led to the decline of music labels.\n29. Streaming platforms have also led to a decline in the music industry's culture.\n30. Streaming platforms have also led to a decline in the music industry's diversity.\n31. Streaming platforms have also led to the decline of the music industry's talent development.\n32. Streaming platforms have also led to a decline in the music industry's competitiveness.\n33. Streaming platforms have also led to the decline of the music industry's social and economic effects.\n34. Streaming platforms have also led to the decline of the music industry's intellectual property.\n35. Streaming platforms have also led to the decline of the music industry's political and social engagement.\n36. Streaming platforms have also led to the decline of the music industry's cultural and artistic output.\n37. Streaming platforms have also led to the decline of the music industry's public value.\n38. Although streaming has had a negative effect on the music industry, it has also had positive effects.\n39. Streaming platforms have", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} -{"answer_id": "A6pauLMckn34otnfpeQRFi", "model_id": "llama-13b:v1", "question_id": 80, "text": "The orchestra performed well, and the audience was receptive to the performance. The orchestra played a selection of pieces from some of the most well-known classical composers, including Beethoven, Mozart, and Tchaikovsky. The orchestra played in unison with each other, and the conductor led them through the pieces without much difficulty. The orchestra played with a lot of energy and spirit. The audience was appreciative and applauded at the appropriate times. The performance lasted 90 minutes, and it was well worth the price of admission. I would recommend the concert to anyone who wants to experience beautiful classical music.", "metadata": {"huggingface_argument": {"do_sample": true, "temperature": 0.7, "max_new_tokens": 1024}}} diff --git a/commune/modules/archive/fastchat/fastchat/eval/table/answer/answer_vicuna-13b-20230322-new-hp-fp16.jsonl b/commune/modules/archive/fastchat/fastchat/eval/table/answer/answer_vicuna-13b-20230322-new-hp-fp16.jsonl deleted file mode 100644 index fd1f3a8ee..000000000 --- a/commune/modules/archive/fastchat/fastchat/eval/table/answer/answer_vicuna-13b-20230322-new-hp-fp16.jsonl +++ /dev/null @@ -1,80 +0,0 @@ -{"question_id": 1, "text": "Improving your time management skills can help you to prioritize your tasks, stay organized, and be more productive. Here are some tips to help you improve your time management skills:\n1. Set goals: Start by setting clear, specific, and measurable goals for yourself. This will help you to stay focused and motivated.\n2. Prioritize: Identify which tasks are most important and focus on them first.\n3. Use a planner: Keep track of your tasks and deadlines in a planner or calendar.\n4. Break down tasks: Break down larger tasks into smaller, manageable chunks.\n5. Eliminate distractions: Minimize distractions, such as social media, email, and phone notifications, while you work on important tasks.\n6. Take breaks: Take regular breaks to rest and recharge.\n7. Focus on one thing at a time: Try to focus on one task at a time, and avoid multitasking.\n8. Be mindful of time-wasting activities: Be mindful of time-wasting activities and try to minimize them.\n9. Review your progress: Review your progress periodically, and adjust your strategy as needed.\n10. Practice, practice, practice: Finally, practice these skills regularly, and be patient with yourself as you work to improve them.", "answer_id": "FNAYdXQwcrNkTD4nCqdFGb", "model_id": "vicuna-13b:20230322-new-hp-fp16", "metadata": {}} -{"question_id": 2, "text": "Stress is a natural response to pressures in life, but chronic stress can have negative impacts on mental and physical health. Here are some effective ways to deal with stress:\n1. Exercise: Regular physical activity can help reduce stress levels and improve mood.\n2. Relaxation techniques: Meditation, yoga, deep breathing, and other relaxation techniques can help calm the mind and body.\n3. Social support: Spending time with friends and family or talking to a trusted confidant can help reduce stress levels.\n4. Sleep: Getting enough restful sleep can help the body and mind rejuvenate and better cope with stress.\n5. Healthy diet: Eating a balanced diet with plenty of fruits, vegetables, and whole grains can help support good physical and mental health.\n6. Goal-setting: Setting realistic goals and working towards them can help give a sense of accomplishment and control over stressors.\n7. Time management: prioritizing tasks and setting aside time for leisure activities can help reduce feelings of being overwhelmed.\n8. Avoidance: Avoiding unhealthy habits such as smoking, excessive alcohol consumption, and drug use can help reduce stress levels.\n9. Seek professional help: If stress is impacting daily life, it may be helpful to seek professional help from a therapist or counselor.\nIt's important to find what works best for you and make stress management a regular part of your routine.", "answer_id": "7SzYV3TjzyrVkSKKGaXN4c", "model_id": "vicuna-13b:20230322-new-hp-fp16", "metadata": {}} -{"question_id": 3, "text": "Python and JavaScript are two popular programming languages used for different purposes. Here are some of the main differences between Python and JavaScript:\n1. Paradigm: Python is a procedural language, while JavaScript is an object-oriented language.\n2. Applications: Python is used for web development, data science, artificial intelligence, scientific computing, and more, while JavaScript is used for front-end web development, back-end web development, and mobile app development.\n3. Readability: Python code is generally more readable and easier to understand than JavaScript code, due to its use of whitespace and indentation to delimit blocks of code.\n4. Speed: JavaScript is generally faster than Python, due to its dynamic nature and JIT (Just-In-Time) compilation.\n5. Interpreted vs. compiled: Python is an interpreted language, while JavaScript is a compiled language.\n6. Standard libraries: Python has a large standard library, while JavaScript has a smaller standard library.\n7. Community: Python has a larger community than JavaScript, with more resources and third-party libraries available.\n8. Debugging: Python has an interactive shell that allows for easy debugging, while JavaScript has a debugger built into the browser.\n9. Deployment: Deploying Python applications is generally easier than deploying JavaScript applications, as Python applications can be run directly from the command line, while JavaScript applications require a web server.", "answer_id": "cA7LoXTBCgegnTcKWpjxQq", "model_id": "vicuna-13b:20230322-new-hp-fp16", "metadata": {}} -{"question_id": 4, "text": "Working from home can be challenging because of the many distractions that abound, from household chores to Netflix marathons. However, there are several tips you can follow to increase your productivity while working from home:\n1. Create a designated workspace: Set up a designated workspace in your home that is free from distractions and clutter. This can be a spare room, a desk in a corner, or even a local coffee shop.\n2. Establish a schedule: Set a schedule for your workday and stick to it. This will help you stay on track and avoid wasting time.\n3. Eliminate distractions: Turn off notifications on your phone and computer, close unnecessary tabs, and try to minimize background noise.\n4. Take breaks: Take regular breaks to refresh your mind and body, but limit them to 10-15 minutes.\n5. Prioritize your tasks: Prioritize your tasks based on importance and urgency, and tackle them one at a time.\n6. Dress professionally: Dressing professionally can help you stay focused and motivated.\n7. Communicate with your team: Keep your team updated on your progress and communicate regularly to avoid misunderstandings.\n8. Exercise: Exercise can help you stay alert and focused, so try to incorporate physical activity into your day.\n9. Have a healthy diet: Maintaining a healthy diet can help you stay energized and productive throughout the day.\n10. Take care of your mental health: Finally, take care of your mental health by practicing mindfulness, setting boundaries, and taking time for self-care.\nBy following these tips, you can increase your productivity while working from home and achieve your goals more effectively.", "answer_id": "8Yo5UVMPzoxjCsi3fuHNmH", "model_id": "vicuna-13b:20230322-new-hp-fp16", "metadata": {}} -{"question_id": 5, "text": "Quantum computing is a form of computing that uses quantum-mechanical phenomena, such as superposition and entanglement, to perform operations on data. Unlike classical computers, which store and process data using bits that are either 0 or 1, quantum computers use quantum bits, or qubits, which can be 0, 1, or both at the same time.\nThis ability to be in multiple states at the same time, known as superposition, allows quantum computers to perform certain calculations much faster than classical computers. In addition, quantum computers can use a phenomenon called entanglement, which allows qubits to be linked together in such a way that the state of one qubit affects the state of another, even if they are physically separated.\nThese quantum mechanical properties enable quantum computers to perform certain calculations that are impractical or even impossible for classical computers, such as simulating the behavior of complex molecules or factorizing large numbers.\nQuantum computers are still in the early stages of development and are not yet widely available. However, they have the potential to revolutionize fields such as cryptography, chemistry, and artificial intelligence, and researchers around the world are working on improving their performance and scalability.", "answer_id": "cCc7EgnQNgcVAVqk96Pyto", "model_id": "vicuna-13b:20230322-new-hp-fp16", "metadata": {}} -{"question_id": 6, "text": "Plant-based protein sources come from plants, while animal-based protein sources come from animals. Here are some key differences between the two:\n1. Nutritional content: Plant-based protein sources tend to be lower in some essential amino acids, while animal-based sources tend to be higher. However, many plant-based sources can be eaten in combination to create a complete protein.\n2. Sustainability: Plant-based protein sources are generally considered to be more sustainable than animal-based sources, as they require fewer resources to produce and have a lower carbon footprint.\n3. Health benefits: Some plant-based protein sources, such as legumes, nuts, and whole grains, have been shown to have health benefits such as lowering the risk of heart disease, stroke, and certain types of cancer.\n4. Ethical considerations: Many people choose plant-based protein sources for ethical reasons, such as a concern for animal welfare or a belief that animal agriculture is unsustainable.\n5. Taste: Some people prefer the taste of animal-based protein sources, while others prefer the taste of plant-based sources.\n6. Allergies: Some people have food allergies or intolerances to certain animal-based protein sources, such as milk, eggs, or shellfish, while plant-based sources are generally considered safer for those with food allergies.", "answer_id": "TUL5M2TWXQmM56U4Ckk4s4", "model_id": "vicuna-13b:20230322-new-hp-fp16", "metadata": {}} -{"question_id": 7, "text": "Developing your critical thinking skills can be challenging, but there are several things you can do to improve them:\n1. Practice questioning: Start by asking questions about the information you encounter. Ask yourself what the information means, how it was gathered, who funded it, and if there are any biases.\n2. Analyze and evaluate: Look for patterns, connections, and underlying assumptions in the information you receive. Evaluate the strengths and weaknesses of arguments, and consider alternative viewpoints.\n3. Seek out diverse perspectives: Expose yourself to a variety of viewpoints and ideas, and try to understand where they are coming from. This will help you to better understand the strengths and weaknesses of different arguments.\n4. Read, watch, and listen actively: engage with the information you consume, and take notes on what you read, watch, or listen to. Ask yourself questions about what you just consumed and look for connections.\n5. Learn from experts: Seek out experts in fields that interest you, and pay attention to how they approach problems and make decisions.\n6. Practice self-reflection: Regularly reflect on your own thought processes, and consider how you can improve your critical thinking skills.\n7. Practice, Practice, Practice: Consciously try to apply critical thinking in your daily life, in your work, in your conversations and in your decision making process.\nRemember that developing critical thinking skills takes time and practice, so be patient with yourself and stay committed to your goal.", "answer_id": "iZUdysQf69MHQE9Lq76mii", "model_id": "vicuna-13b:20230322-new-hp-fp16", "metadata": {}} -{"question_id": 8, "text": "The education sector faces many challenges today, including:\n1. Access: Many people around the world still lack access to education, particularly those in remote or disadvantaged areas.\n2. Quality: education: Despite efforts to improve quality, some schools still struggle to provide students with a high-quality education.\n3. Funding: budget : education budgets are often inadequate to cover the costs of providing education, particularly in developing countries.\n4. Technology: integration : the rapid pace of technological change has led to questions about how best to integrate technology into education.\n5. Accountability: Schools are often held accountable for student performance, but there is ongoing debate about how best to measure and improve accountability.\n6. Inclusion : of students with disabilities and other special needs: ensuring that all students have equal access to education and are able to succeed.\n7. Safety : school safety has become an increasing concern in recent years, particularly in light of the rise of school shootings.\n8. Employability : the education system is not always keeping up with the changing demands of the job market, leading to concerns about student employability.\n9. Globalization : The increasing globalization of education is leading to debates about how best to ensure that education prepares students for a globalized world.\n10. Teacher shortages : Some countries are experiencing shortages of qualified teachers, leading to concerns about the quality of education.", "answer_id": "D8EWgfq9yytMgQYybq3Rhq", "model_id": "vicuna-13b:20230322-new-hp-fp16", "metadata": {}} -{"question_id": 9, "text": "There are several factors that can influence consumer behavior. Some of the most important factors include:\n1. Personal factors: These include demographic characteristics such as age, gender, income, education level, and lifestyle.\n2. Psychological factors: These include attitudes, personality, interests, values, beliefs, and feelings towards a product or service.\n3. Social factors: These include peer pressure, family influence, culture, and subculture.\n4. Economic factors: These include income, inflation, interest rates, and the availability of credit.\n5.", "answer_id": "5bzosDZ96t2p5qxeS7VAeA", "model_id": "vicuna-13b:20230322-new-hp-fp16", "metadata": {}} -{"question_id": 10, "text": "Conflict resolution in the workplace is an important aspect of maintaining a healthy and productive work environment. Here are some effective strategies for conflict resolution in the workplace:\n1. Active Listening: Listen attentively to both sides of the argument and try to understand their perspectives.\n2. Encourage Open Communication: Encourage open and honest communication between parties to help them express their concerns and feelings.\n3. Focus on Interests: Try to understand the underlying interests of each party involved in the conflict, rather than just their positions.\n4. Brainstorm Solutions: Work with each party to brainstorm and considering different solutions that could satisfy everyone's interests.\n5. Compromise: Seek a mutually acceptable solution that meets the needs of both parties.\n6. Mediation: Consider using a neutral third party mediator to help facilitate communication and find a resolution.\n7. Conflict Resolution Training: Provide training for employees on conflict resolution skills, such as effective communication, empathy, and negotiation.\n8. Establish Ground Rules: Establish clear ground rules for communication and conflict resolution within the workplace.\n9. Document Everything: Document all incidents and communication related to the conflict to help with any potential future resolution.\n10. Follow Up: Follow up with parties after a resolution has been reached to ensure that everyone is satisfied and the issue has been resolved.", "answer_id": "itBobRwhSZUnsWkUs5bVwX", "model_id": "vicuna-13b:20230322-new-hp-fp16", "metadata": {}} -{"question_id": 11, "text": "The use of single-use plastic bottles has significant environmental and health implications, both in terms of the production and disposal of these bottles. Here are some of the potential implications:\n1. Environmental Impact: Single-use plastic bottles are made from non-renewable resources, such as crude oil and natural gas, and their production emits greenhouse gases. They also create a significant amount of waste, which can contribute to plastic pollution in oceans, rivers, and other bodies of water. Plastic debris can harm marine life, and the toxic chemicals found in plastic can also leach into soil and groundwater.\n2. Human Health: The use of single-use plastic bottles can also have negative impacts on human health. Plastic chemicals can leach into drinks, and BPA, a chemical used in some plastic bottles, has been linked to a variety of health issues such as reproductive disorders, heart disease, and diabetes. Reusing plastic bottles can also lead to bacterial growth, which can contaminate drinking water.\n3. Water Quality: Single-use plastic bottles can also have an impact on water quality. In some areas, the source water used to fill plastic bottles may not meet quality standards for human consumption. In addition, plastic bottles can contaminate drinking water supplies, either through poor storage or disposal practices.\n4. Cost: Reusable bottles, on the other hand, have a much lower environmental impact, as they are made from materials that can be reused and recycled. They can also help individuals save money in the long run, as they do not have to buy single-use plastic bottles repeatedly.\n Overall, using single-use plastic bottles over reusable bottles not only harms the environment but also poses health risks to individuals. Reusing plastic bottles has many benefits, including saving money and reducing waste and pollution.", "answer_id": "iVTKdmUeTySryqEHhtgmkS", "model_id": "vicuna-13b:20230322-new-hp-fp16", "metadata": {}} -{"question_id": 12, "text": "When designing an inclusive and accessible public transportation system, there are several factors to consider:\n1. Universal accessibility: This includes features such as ramps, lifts, and low-floor buses and trains to ensure that all passengers can access the vehicles easily, regardless of their mobility level.\n2. Diversity of Needs: It is important to cater to the diverse needs of passengers, including those with visual impairments, hearing impairments, and mobility impairments. This can be achieved through features such as Braille signage, audible announcements, and priority seating.\n3. Multi-modal options: Offering multiple modes of transportation, such as buses, trains, and light rail, can provide more options for passengers and make it easier for them to travel.\n4. Frequency and reliability: A public transportation system that is frequent and reliable can encourage more people to use it, especially those who may not be able to afford their own transportation.\n5. Safety: Safety features such as CCTV cameras, emergency call buttons, and uniformed personnel can help make passengers feel more secure while using public transportation.\n6. Information and communication: Providing clear and accurate information about schedules, routes, and fares can help passengers make informed choices and plan their trips more effectively.\n7. User-friendly fare collection: A user-friendly fare collection system can make it easier for passengers to use public transportation and reduce boarding times.\n8. Integration with other modes: Integrating public transportation with other modes, such as biking and walking, can provide more options and make it easier for passengers to reach their final destination.\n9. Community engagement: Community engagement is crucial to ensure that the public transportation system meets the needs of the community it serves.\n10. Inclusive pricing: An inclusive pricing policy can make public transportation more accessible to everyone, regardless of their financial means.\nThese are just some of the factors to consider when designing an inclusive and accessible public transportation system.", "answer_id": "7UgtosTTGx7bnRcQGxdDJo", "model_id": "vicuna-13b:20230322-new-hp-fp16", "metadata": {}} -{"question_id": 13, "text": "Fiscal policy refers to government spending and taxation policies that can be used to influence the economy. Monetary policy refers to the actions of central banks, such as the Federal Reserve, to control the money supply and interest rates. Here are some ways that governments can use these policies to combat economic recessions:\n1. Fiscal policy: Governments can use expansionary fiscal policy to stimulate economic growth during a recession. This can include increasing government spending on public works projects, providing tax cuts for individuals and businesses, and lowering taxes to stimulate borrowing and spending.\n2. Monetary policy: Central banks can use expansionary monetary policy to increase the money supply and lower interest rates. This can encourage borrowing and spending, which can stimulate economic growth.\n3. Fiscal policy: Governments can also use automatic stabilizers, which are programs that automatically adjust during an economic downturn. For example, unemployment benefits and food stamps can help support individuals and families during a recession.\n4. Monetary policy: Central banks can also use discretionary policy, which is when they use the tools at their disposal, such as setting interest rates or buying bonds, to try to influence the economy.\n5. Fiscal policy: Governments can also use the use of deficit-financed fiscal stimulus, which is when governments run budget deficits to finance spending and tax cuts to stimulate economic growth.\nIt's important to note that these policies should be used with caution, as they can also lead to inflation and other negative side effects if not used correctly.", "answer_id": "Yt5tqBrrKfcumuuuxf2qqM", "model_id": "vicuna-13b:20230322-new-hp-fp16", "metadata": {}} -{"question_id": 14, "text": "Language and cultural barriers can have a significant impact on the way people communicate and form relationships in multicultural societies. These barriers can lead to misunderstandings, misinterpretations, and stereotypes, and can make it more difficult for people from different cultures to connect and form meaningful relationships.\nLanguage barriers can make it difficult for individuals to understand and be understood, leading to communication breakdowns and isolation. cultural barriers can make it difficult for individuals to understand and appreciate the beliefs, values, and customs of other cultures, leading to misunderstandings and mistrust.\nBoth language and cultural barriers can also perpetuate stereotypes and prejudices, as individuals may make judgments about others based on limited exposure or understanding of their culture.\nIn order to overcome these barriers, it is important for individuals to actively engage in cross-cultural communication and to be open to learning about other cultures. This can include seeking out language learning opportunities, participating in cultural events and activities, and actively seeking out diverse perspectives and viewpoints.", "answer_id": "4pZ4Uy544Bc3K59fhbW7xj", "model_id": "vicuna-13b:20230322-new-hp-fp16", "metadata": {}} -{"question_id": 15, "text": "Artificial intelligence (AI) has the potential to revolutionize healthcare by improving the quality and efficiency of healthcare delivery. One such scenario is the use of AI to assist with diagnosis and treatment planning.\nAn AI system could be trained on a large database of medical records and images, allowing it to quickly and accurately diagnose common medical conditions. This would save time and resources for healthcare providers, who could then focus on more complex cases.\nAnother application of AI in healthcare is the use of predictive analytics to identify patients who are at risk of developing certain conditions. For example, an AI system could analyze patient data to identify individuals who are at high risk of developing diabetes, allowing healthcare providers to intervene early and prevent the onset of the disease.\nAI can also be used to streamline administrative tasks, such as scheduling appointments and managing patient records. This would free up healthcare providers to spend more time caring for patients, improving patient satisfaction and outcomes.\nIn addition, AI could be used to personalize patient care, tailoring treatments to an individual's specific needs and preferences. This could be done by analyzing a patient's genetic profile, medical history, and other factors to determine the most effective course of treatment.\nOverall, the use of AI in healthcare has the potential to improve patient outcomes while also making healthcare delivery more efficient and cost-effective.", "answer_id": "762peC97upw58WFQeLNoXZ", "model_id": "vicuna-13b:20230322-new-hp-fp16", "metadata": {}} -{"question_id": 16, "text": "CRISPR-Cas9 is a revolutionary gene editing tool that allows scientists to make precise changes to DNA sequences. The technology is based on the way bacteria defend themselves against viruses, by using a Cas9 protein to cut DNA at a specific location. The CRISPR-Cas9 system consists of a guide RNA that directs the Cas9 protein to the desired location in the genome, and a repair template that helps guide the process of repairing the DNA break.\nHere's how the process works:\n1. The CRISPR-Cas9 system is introduced into the cell, either through viral delivery or direct transfection.\n2. The guide RNA locates the specific DNA sequence that needs to be edited.\n3. Cas9 cuts the DNA at the chosen location.\n4. The cell's own repair machinery then repairs the DNA break. Scientists can provide a repair template, which can either disrupt the function of the gene being edited, or introduce a new mutation.\nThe potential applications of CRISPR-Cas9 technology are vast, including treating genetic diseases, understanding gene function, and creating genetically modified organisms (GMOs). For example, CRISPR-Cas9 could be used to treat genetic diseases by editing out harmful mutations, and to create GMOs that are resistant to certain viruses or have improved nutritional value.\nHowever, CRISPR-Cas9 technology raises ethical concerns, as gene editing can have long-term effects on future generations and could be used for unethical purposes, such as \"designer babies.\" It's important that gene editing is done responsibly and with caution, and that appropriate oversight and regulations are in place.", "answer_id": "Yqfg2saKSNPauCS8YdsjdD", "model_id": "vicuna-13b:20230322-new-hp-fp16", "metadata": {}} -{"question_id": 17, "text": "Vaccinations work by introducing a harmless piece of a virus or bacteria to the body, which prompts the immune system to produce antibodies to fight the invader. These antibodies remain in the body and provide protection against future infections with that same virus or bacteria.\nHerd immunity is a concept in which a critical mass of individuals within a population are immune to a particular disease, either through vaccination or naturally acquired immunity, so that the disease cannot spread effectively through the community. This is because when a sufficient proportion of people are immune, the virus or bacteria cannot find enough susceptible individuals to infect, and the disease dies out.\nHerd immunity can be achieved either naturally, through the spread of immunity from person to person, or through vaccination, which artificially boosts immunity levels. The level of herd immunity required to protect a community depends on the infectiousness of the disease and the susceptibility of the population.\nIt's important to note that not everyone can be immunized, such as infants who are too young to be vaccinated, people with compromised immune systems, and some individuals who experience side effects from vaccines. Herd immunity provides protection to these individuals who are not able to be vaccinated, as well as to the broader community.", "answer_id": "gKd2BPWp7HG9PTDdgS7HT8", "model_id": "vicuna-13b:20230322-new-hp-fp16", "metadata": {}} -{"question_id": 18, "text": "Social media platforms have had a significant impact on the way people consume and share news. They have made it easier for people to access news from a variety of sources and to share news stories with their social networks. This has led to a greater spread of information and has made it easier for news organizations to reach larger audiences.\n\nHowever, social media platforms have also made it easier for misinformation to spread. Misinformation can range from simple mistakes to deliberate falsehoods spread for political or other purposes. The speed and ease with which information can be shared on social media can make it difficult to fact-check and debunk false information, leading to its widespread acceptance.\n\nThis has led to concerns about the impact of social media on the integrity of elections and other democratic processes. It has also raised questions about the role of these platforms in shaping public discourse and opinions.\n\nIn order to address these concerns, social media platforms have implemented measures such as fact-checking tools and stricter policing of misinformation. However, the effectiveness of these measures is still a subject of debate, and the role of social media in shaping the news ecosystem continues to be a subject of study and concern.", "answer_id": "CyZSkdkGWmjh8ZgxrQ2f66", "model_id": "vicuna-13b:20230322-new-hp-fp16", "metadata": {}} -{"question_id": 19, "text": "Cultural, social, and economic factors all play a role in shaping people's food choices. For example, cultural tradition, social norms, and personal preferences can influence what foods people choose to eat, while economic factors such as affordability and availability can also play a role.\n\nTo promote healthier diets, it is important to understand these factors and how they can be leveraged to encourage people to make healthier food choices. For example, education campaigns that celebrate cultural and regional foods can help people associate healthy eating with positive cultural identities and traditions. Social media campaigns that encourage people to share healthy recipes and meal ideas can also help to normalize healthy eating behaviors.\n\nIn addition, it can be helpful to make healthy foods more affordable and widely available, particularly in low-income communities. This can be done through initiatives such as farmer's markets, subsidies for fruits and vegetables, and food education programs.\n\nUltimately, promoting healthy eating requires a holistic approach that considers not just individual factors, but also the broader social and economic context in which people make their food choices. By understanding and addressing these factors, it is possible to encourage people to adopt healthier diets and lead happier, healthier lives.", "answer_id": "LwA42YWwqvdkStLZEUbmm9", "model_id": "vicuna-13b:20230322-new-hp-fp16", "metadata": {}} -{"question_id": 20, "text": "Natural selection is the process by which organisms with characteristics that are advantageous for survival and reproduction are more likely to pass those characteristics on to their offspring, while those with characteristics that are not as advantageous are less likely to survive and reproduce. Over time, this leads to the accumulation of beneficial traits in a population and the evolution of new species.\nHere's how natural selection contributes to the evolution and adaptation of species:\n1. Variation: Natural selection relies on genetic variation within a population. Without genetic variation, there would be no variation in traits to select from.\n2. Reproduction: Organisms that have advantageous traits are more likely to survive and reproduce, passing those traits on to their offspring.\n3. Survival: Organisms without advantageous traits are less likely to survival, leading to a lower likelihood of reproduction.\n4. Directional change: Over time, natural selection can lead to a gradual directional change in the traits of a population, as beneficial traits become more common and less advantageous traits become rarer.\n5. Speciation: If the process continues long enough, it can lead to the development of new species, with traits that are specifically adapted to their environment.\nNatural selection plays a key role in shaping the traits of organisms and the evolution of new species over long periods of time. It's one of the mechanisms that explains how the diversity of life on Earth has arisen.", "answer_id": "ajfFPpHDdMiSp3PVfsUFbE", "model_id": "vicuna-13b:20230322-new-hp-fp16", "metadata": {}} -{"question_id": 21, "text": "My lord, My lady, and esteemed guests, I am humbled to stand before you this evening as a humble knight, sworn to serve and protect this fair kingdom. My name is Sir [insert name], and I hail from [insert home town], where I was raised and trained in the ways of chivalry and knighthood. I have been blessed with the honor of wearing this shining armor, and bearing this trusty sword, which I have wielded in defense of our realm and its people. I am grateful for the opportunity to serve His/Her Majesty, and to stand among so many esteemed knights and ladies this evening. May we always remember our duty to uphold the ideals of honor, bravery, and loyalty, and may we continue to defend this great kingdom for generations to come.", "answer_id": "3n8npKQKxgHEbEmf2K6AwF", "model_id": "vicuna-13b:20230322-new-hp-fp16", "metadata": {}} -{"question_id": 22, "text": "As a pirate captain, I would motivate my crew to search for hidden treasure by reminding them of the rewards that await us if we are successful. I would speak of the riches and power that we could acquire, and how it would make us the envy of every other pirate sailing the seven seas. I would also remind them of the dangers and challenges that we would face on our quest, and how our bravery and determination would make us the strongest and most feared pirate crew on the ocean. I would paint a vivid picture of the glittering prizes that await us, and how our hard work and perseverance will be rewarded tenfold. And, of course, I would remind them of the punishment that would befall any who hesitated or showed cowardice in the face of danger. With these words, I would rally my crew, and set sail with renewed vigor and determination, our eyes fixed on the prize, our hearts pounding with excitement, and our spirits high, ready to conquer the world! Arrr!", "answer_id": "LfeqCy9vR3kCaJiQV4Fyqf", "model_id": "vicuna-13b:20230322-new-hp-fp16", "metadata": {}} -{"question_id": 23, "text": "As an AI language model, I do not experience emotions or have the capability to love. However, if I were a Shakespearean character, I might deliver a soliloquy something like this:\n\"Oh, fair maiden, with thy radiant smile\nAnd thy eyes like unto emeralds rare,\nMy heart doth beat with love's sweet guile\nFor thee, my own, my love so pure.\n\nThou art the very essence of all that's good\nMy heart doth leap with joy, just to be near\nThy tender touch doth heal my weary soul\nAnd with thy presence, my spirit doth roll\n\nForevermore, let my love be thine\nA bond of love, 'mongst us entwined\nTogether, we shall weather life's storms\nAnd forever, our love shall swell and bloom\n\nThus speaks the heart of thine most humble servant\nPledging his love, his devotion, his allegiance\nMay this love forevermore be entwined\nIn the hearts of thee, my own, my love so rare.\"", "answer_id": "JrnFfmnsuykbTkFbUnei6k", "model_id": "vicuna-13b:20230322-new-hp-fp16", "metadata": {}} -{"question_id": 24, "text": "As a superhero, my origin story is quite special. It all started when I was just a normal person, living a normal life. But then, one day, I was exposed to a mysterious substance that gave me special abilities.\n\nI discovered that I had incredible strength, agility, and the power of flight. At first, I was scared and didn't know what to do with these abilities. But as I started to explore them more, I realized that I could use them to help others.\n\nI began patrolling the city, looking for ways to use my powers for good. I soon realized that there were many people in need, and I knew that I could make a difference. I started to rescue people from danger, fight against injustice, and protect the innocent.\n\nAs I continued to use my powers for good, I began to realize that I had a bigger purpose. I became a symbol of hope for the people of the city, and they started to look up to me as a hero.\n\nAnd that's how I became a superhero. I may not have been born with these abilities, but I embrace them and use them to make a difference in the world. And I'll continue to use them to protect and help those in need, no matter where life takes me.", "answer_id": "BDBSC5P2DE3E5LuB9FX7tn", "model_id": "vicuna-13b:20230322-new-hp-fp16", "metadata": {}} -{"question_id": 25, "text": "As a time traveler from the year 3000, I would tell people about many technological advancements that have been made in the future. some of these advancements include:\n\n1. Virtual Reality: Virtual Reality technology has advanced to the point where it is indistinguishable from reality. It has become a ubiquitous form of entertainment, education, and communication.\n2. Artificial Intelligence: AI has become an integral part of daily life, from controlling appliances and vehicles, to assisting with medical diagnoses and research.\n3. space travel: space travel has become much more accessible and affordable, allowing for more frequent missions to space and the exploration of other planets.\n4. Food and Water production: Advanced methods for food and water production have been developed, making it possible to sustainably feed and hydrate the growing population.\n5. Transportation: Transportation has become more efficient and clean, with flying cars and hyperloops becoming common modes of transportation.\n6. Medicine: Advances in medicine have led to longer, healthier lives, with cures for many diseases and illnesses.\n7. Energy: New renewable energy sources have been discovered and developed, making energy more sustainable and clean.\n8. Education: Education has become more personalized and accessible, with technology allowing for more effective and engaging learning experiences.\n9. Climate change: New technologies have been developed to help combat climate change, such as carbon capture, and new forms of energy.\n\nThese are just a few examples of the technological advancements that have been made in the year 3000.", "answer_id": "TS97KLnyki7XWYFdSdbZiY", "model_id": "vicuna-13b:20230322-new-hp-fp16", "metadata": {}} -{"question_id": 26, "text": "\"And it's a championship game to remember folks! With only seconds left on the clock, the underdogs have managed to take the lead! The ball is thrown deep, and the receiver makes an spectacular catch, dodging defenders all the way to the end zone! Touchdown! The crowd goes wild as the underdogs take the lead with only seconds left on the clock! They've pulled off the impossible, and are the champions of the world! Unbelievable play, unbelievable game! The underdogs have done it! They've done it!!\"", "answer_id": "9ATk4tAxBoe267rQUDrgYD", "model_id": "vicuna-13b:20230322-new-hp-fp16", "metadata": {}} -{"question_id": 27, "text": "Ladies and Gentlemen, I am honored to be here today representing my signature dish. My dish is a culinary masterpiece that combines the finest ingredients from around the world. It is a fusion of flavors that will tantalize your taste buds and leave you longing for more.\n\nMy dish is a juicy and succulent braised short rib, slow cooked to perfection and infused with aromatic spices that lend a warm and comforting flavor. The short rib is served over a bed of fluffy and fragrant risotto, made with Arborio rice and infused with the creamiest of Stocks. The dish is garnished with baby vegetables, saut\u00e9ed mushrooms, and a reduction of pan juices that add a tangy and savory flavor.\n\nMy dish is more than just a meal, it is a culinary experience that will leave you spellbound. The flavors are bold yet harmonious, and every bite will leave you yearning for more. My dish is a reflection of my passion for cooking and my love for good food. It is a dish that I have perfected over many years of dedicated work, and I am proud to share it with you today.\n\nI hope that you will enjoy my dish as much as I have enjoyed creating it. Thank you for your time and consideration.", "answer_id": "C4zL3pUsGj2Z8U7Tm2nzDN", "model_id": "vicuna-13b:20230322-new-hp-fp16", "metadata": {}} -{"question_id": 28, "text": "As a language model AI, I do not have emotions but here is how one might describe their emotions and the view from the top of Mount Everest:\nReaching the summit of Mount Everest is a monumental achievement and one of the most thrilling moments of a mountain climber's life. The sense of accomplishment and elation is indescribable, as months or even years of training, preparation, and hard work have led to this moment.\nStanding atop the highest peak in the world, the view is breathtakingly beautiful, with panoramic vistas of snow-capped peaks stretching as far as the eye can see. The clear, crisp air allows for unobstructed views of the surrounding Himalayan ranges, including Mount Lhotse, Mount Makalu, and other nearby peaks.\nThe descent from the summit can be just as challenging as the ascent, but the feeling of satisfaction and fulfillment makes it all worthwhile. The memory of standing atop Mount Everest will remain with a climber forever.", "answer_id": "CTi8ZRuHoAzRQjMRAU3mBN", "model_id": "vicuna-13b:20230322-new-hp-fp16", "metadata": {}} -{"question_id": 29, "text": "As a space colonist on Mars, my daily life would be very different from what it is on Earth. I would live in a settlement, along with other colonists, and our days would be filled with a variety of tasks and responsibilities.\nOne of the biggest challenges we would face is the harsh Martian environment. The planet has a much thinner atmosphere and lower gravitational pull than Earth, which would make it more difficult to breathe and move around. We would also have to contend with extreme temperatures, radiation exposure, and dust storms.\nTo survive, we would need to rely on a closed-loop life support system, which would recycle water and air, and we would need to be very efficient with our resources. We would also need to be very careful not to contaminate the Martian environment with Earth microbes, which could have adverse effects on the existing Martian microbial life.\nAnother challenge we would face is the isolation and loneliness of living on another planet. It would be difficult to be cut off from our loved ones and Earth, and we would need to have a strong sense of purpose and drive to keep us going.\nOverall, living on Mars would be a challenging and rewarding experience, but it would require a lot of hard work and dedication. We would need to be self-sufficient and resourceful, and we would need to work together as a team to overcome the obstacles we would face.", "answer_id": "8chhW8gPzM6zHUN6VAqpym", "model_id": "vicuna-13b:20230322-new-hp-fp16", "metadata": {}} -{"question_id": 30, "text": "\"I am John, and I am one of the few survivors in this post-apocalyptic world. It's been years since the disaster struck, and most of civilization has been destroyed. I was lucky enough to find shelter in an abandoned fallout shelter, and that's how I've been able to survive this long.\n\nI scavenge for food and supplies, venturing out only when absolutely necessary. The world outside is harsh and full of dangers, from raiders looking for loot to harsh radiation levels. But I've managed to find some allies along the way, people who have also managed to survive.\n\nThere's Maria, who lives in a nearby settlement and helps me trade goods. And then there's Jack, who is a skilled hunter and helps me find food. Together, we've been able to form a tight-knit community, and our survival is dependent on each other.\n\nWe've also come across other groups of survivors, some of whom are friendly and some of whom are not. It's a tough world out there, and trust is hard to come by. But we've managed to band together and form a makeshift society, where we can help each other survive and rebuild.\n\nI don't know what the future holds, but I'm grateful for the allies I've encountered in this post-apocalyptic world. Together, we're working towards a better tomorrow.\"", "answer_id": "Pxj7ZVsvQ9HMQnRVMh7os4", "model_id": "vicuna-13b:20230322-new-hp-fp16", "metadata": {}} -{"question_id": 31, "text": "To determine if a restaurant is popular among locals or mainly attracts tourists, you can look for a few key indicators:\n1. parking lot or street parking: if there is a lot of parking right out front, it's likely locals are eating there, while tourists are more likely to drive and park in designated lots or garages.\n2. reservation policy: if the restaurant only takes reservations for parties of 4 or more, it's probably popular with locals who are used to eating there with a group. Tourists, on the other hand, may be more likely to dine solo or with one other person.\n3. menu prices: if the menu features high-end dishes and price points, it's more likely to attract tourists who are used to paying those kinds of prices at home.\n4. local crowd: look at the demographic of the customers eating there. If it's predominantly made up of local retirees, families, or businesspeople, it's likely a local spot.\n5. online reviews: read online reviews to see if locals are mentioning the restaurant in passing or if tourists are raving about it.\nKnowing whether a restaurant is popular among locals or tourists can be useful for several reasons:\n1. Ambiance: Local hangouts tend to have a more relaxed atmosphere, while tourist hotspots may be more bustling and noisy.\n2. Prices: Local joints are often less expensive, while tourist spots may have higher prices.\n3. Recommendations: If you're a local and you're looking for a place that's popular with other locals, you'll want to seek out spots that are frequented by people who live in the area.\n4. Authenticity: Local spots are more likely to serve authentic regional cuisine, while touristy places may cater more to the tastes of out-of-towners.\n5. Crowds: If you're looking to avoid crowds, you may want to steer clear of tourist hotspots.", "answer_id": "NCUhSMqzESRJJVDKHQ7XPB", "model_id": "vicuna-13b:20230322-new-hp-fp16", "metadata": {}} -{"question_id": 32, "text": "Here are some subtle clues that someone might be pretending to understand a topic or conversation:\n1. Repeating the same points or phrases without adding new information or insights.\n2. Asking simplistic or naive questions that show a lack of understanding of the topic.\n3. Using jargon or technical terms incorrectly or out of context.\n4. Making sweeping generalizations or making claims that are not supported by evidence.\n5. Interrupting or speaking over others, particularly when they are trying to provide more information or clarification.\n6. Using nonverbal cues such as smirking or making dismissive facial expressions while others are speaking.\n7. Avoiding eye contact or fidgeting nervously.\n8. Repeatedly changing the subject or redirecting the conversation away from the topic at hand.\n9. Using deflection or evasion tactics to avoid answering questions or providing straight answers.\n1voir 10. Googling the topic while pretending to know what's going on.\nIt's important to note that not everyone who exhibits these behaviors is necessarily pretending to understand. They might be genuinely confused or unsure, or they might be intimidated or shy about admitting their confusion. However, these clues can be red flags that someone is not truly engaging with the conversation or topic at hand.", "answer_id": "mEvCEzVGnENJAMzs2ioTmp", "model_id": "vicuna-13b:20230322-new-hp-fp16", "metadata": {}} -{"question_id": 33, "text": "There are several reasons why someone might choose to use a paper map or ask for directions instead of relying on a GPS device or smartphone app:\n1. Reliability: GPS devices and smartphone apps can sometimes be unreliable, especially in areas with poor reception or limited cell service. Paper maps are less likely to have signal issues or other technical problems.\n2. Battery life: GPS devices and smartphones can drain batteries quickly, especially when in use for extended periods of time. Paper maps are battery-free and can be used without worrying about running out of power.\n3. Availability: Paper maps are universally available, while GPS devices and smartphone apps may not work properly or be available in certain areas.\n4. Perspective: GPS devices and smartphone apps can lock users into a single perspective or route, while paper maps offer more flexibility and allow users to see surrounding areas and make adjustments as needed.\n5. Personal preference: Some people simply prefer paper maps to GPS devices or smartphone apps, either because they are more familiar with them or because they find them more user-friendly.\n6. Privacy: GPS devices and smartphone apps can track users' movements and whereabouts, while paper maps do not have this capability.", "answer_id": "CoFnjLgW5Eens9JhnpiJHZ", "model_id": "vicuna-13b:20230322-new-hp-fp16", "metadata": {}} -{"question_id": 34, "text": "There are several ways to determine if a person is genuinely interested in a conversation or simply being polite:\n\n1. Pay attention to their nonverbal cues: If someone is genuinely interested in a conversation, they will likely make eye contact, lean towards you, and express positive body language. On the other hand, if they are being polite, they may not show as much nonverbal engagement.\n2. Listen to their responses: If someone is genuinely interested in a conversation, they will likely ask follow-up questions and show an interest in what you are saying. If they are being polite, their responses may be more general and less engaged.\n3. Observe their level of participation: If someone is genuinely interested in a conversation, they will likely be more active in the conversation, contributing their own thoughts and ideas. If they are being polite, they may be less likely to participate.\n4. Pay attention to their tone of voice: If someone is genuinely interested in a conversation, their tone of voice will likely be more engaged and interested-sounding. If they are being polite, their tone may be more neutral or even bored.\n5. Ask them directly: If you are unsure whether someone is genuinely interested in a conversation or simply being polite, you can ask them directly. For example, you could say something like \"I'm not sure if you're really interested in this conversation or just being polite. Can you tell me your honest thoughts?\"\n\nKeep in mind that it can be difficult to always tell if someone is genuinely interested in a conversation or simply being polite, and it's always a good idea to be respectful and considerate in your interactions with others, regardless of their level of engagement.", "answer_id": "aQ9mM2wfmHSge2KvuQ8hmx", "model_id": "vicuna-13b:20230322-new-hp-fp16", "metadata": {}} -{"question_id": 35, "text": "There are several reasons why someone might prefer to shop at a small, locally-owned business instead of a large chain store, even if the prices are higher:\n1. Supporting the local community: Shopping at a small, locally-owned business directly supports the local economy and the people who live there.\n2. Unique products: Small, locally-owned businesses often offer unique and specialized products that are not available at large chain stores.\n3. Personalized service: Small businesses are often staffed by knowledgeable and friendly employees who are more likely to provide personalized service and attention to their customers.\n4. Sustainability: Small businesses are often more invested in sustainable practices and using environmentally-friendly products, which can be important to some shoppers.\n5. Customer loyalty: Shopping at a small business can foster a sense of loyalty and connection to the community, which can be valued above the cheaper prices offered by large chains.\n6. Preservation of heritage: Small businesses often play a role in preserving local heritage and culture, and their closure can lead to the loss of local identity and character.\nOverall, while price may be a factor for some shoppers, many people are willing to pay a little more to support their local community, receive personalized service, and access unique products.", "answer_id": "eM5S86H3bXTGLQcVW59XsD", "model_id": "vicuna-13b:20230322-new-hp-fp16", "metadata": {}} -{"question_id": 36, "text": "Assessing the credibility of a source of information is important to ensure that you are basing your beliefs and decisions on reliable and accurate information. Here are some tips on how to assess the credibility of a source without relying solely on the reputation of the author or publisher:\n1. Check the date of the information: Information that is outdated may not be credible, as it may no longer be relevant or accurate.\n2. Look for sources: Look for other sources that support the information in the article or post. If the information is not supported by other sources, it may be less credible.\n3. Evaluate the language: Look for clear, concise, and unbiased language. If the language is sensationalized or emotionally charged, it may be less credible.\n4. Check the author's credentials: Look for authors who are experts in their field and have a track record of reliable information.\n5. Assess the source's bias: Look for sources that are impartial and free from bias. Sources with a clear bias may not provide a balanced view of the issue.\n6. Check the URL: Be wary of sources with URLs that end in \".com\" or other non-reputable extensions. Reputable sources typically use .edu or .gov extensions.\n7. Look for peer review: Look for sources that have undergone peer review, which is a process where experts in the field review and critique the information.\n8. Be critical: Be skeptical of information that seems too good (or too bad) to be true, and be critical of any sources that do not stand up to scrutiny.\nBy following these tips, you can critically evaluate the credibility of a source of information and make informed decisions about the information you choose to believe and act on.", "answer_id": "MpBrYa9J2zQy9NGi2dvKp8", "model_id": "vicuna-13b:20230322-new-hp-fp16", "metadata": {}} -{"question_id": 37, "text": "Some people may enjoy the sensation of being scared because it can be a thrilling and exciting experience. It can also be a way to experience a rush of adrenaline and a feeling of being alive. Additionally, some people may find that being scared can be a way to cope with their own fears and anxieties.\n\nOn the other hand, some people may avoid experiences that scare them because they find them unpleasant or stressful. For these individuals, being scared may activate their fight or flight response, releasing stress hormones such as cortisol and adrenaline that can cause physical and psychological distress. Additionally, some people may have a phobia of certain things that scare them, which can cause them to experience intense fear and anxiety.\n\nIt is also worth noting that some people may be more naturally predisposed to enjoying scary experiences, while others may not. This can be due to a variety of factors, including genetics, personality traits, and life experiences.", "answer_id": "i8QZzVJo2WkTLc7WMv4bNm", "model_id": "vicuna-13b:20230322-new-hp-fp16", "metadata": {}} -{"question_id": 38, "text": "Observing the behavior of other people in a social situation can provide clues about cultural norms and expectations because culture is communicated and learned through social interactions. People within a culture tend to share common behaviors, norms, and expectations that are learned and passed down through socialization.\nWhen observing social behavior, here are some clues about cultural norms and expectations:\n1. Nonverbal communication: Culture influences nonverbal communication, such as eye contact, gestures, and body language. Differences in nonverbal communication can signal differences in cultural norms and expectations.\n2. Social distance: Culture influences social distance, or how personal space is used and respected. Differences in social distance can signal differences in cultural norms and expectations.\n3. Greetings: Culture influences how greetings are exchanged, such as shaking hands, bowing, or kissing. Greetings can provide clues about cultural norms and expectations.\n4. Interaction patterns: Culture influences how people interact with each other, such as individualistic or collectivistic cultures. Interaction patterns can provide clues about cultural norms and expectations.\n5. Values and beliefs: Culture influences values and beliefs, which can be reflected in social behavior. For example, if a culture values punctuality, people within that culture may be more likely to arrive on time.\nBy observing social behavior, we can gain clues about cultural norms and expectations, and in turn, learn more about the culture and its values, beliefs, and customs.", "answer_id": "HP2CYZ3HJWMcGp6QF9qbq6", "model_id": "vicuna-13b:20230322-new-hp-fp16", "metadata": {}} -{"question_id": 39, "text": "There is no one answer to this question, as it is a matter of personal opinion and priorities. Some people believe that exploring space is a morally praise-worthy goal because it can lead to scientific discoveries, technological advancements, and increased understanding of our universe. Others believe that we have a moral obligation to focus on solving Earth's problems first, as we have the ability and resources to make a positive impact on our own planet and its inhabitants.\nBoth goals are important and can be pursued simultaneously to some extent. Space exploration can bring benefits to the humanity like scientific knowledge, technological advancements, and possibility to find new resources. At the same time, Solving Earth's problems like climate change, poverty, and inequality are also very important and should be given high priority. It's a question of balancing priorities and resources.", "answer_id": "XrMYXEWHff6jXjjJWquXfW", "model_id": "vicuna-13b:20230322-new-hp-fp16", "metadata": {}} -{"question_id": 40, "text": "In a world where automation is becoming increasingly prevalent, it is important to strike a balance between prioritizing job creation and technological progress. Both job creation and technological progress are important for a thriving society, but they must be balanced to ensure that the benefits of automation are shared equitably.\n\nJob creation: Jobs are essential for individuals to earn a living and support themselves and their families. In a world where automation is displacing jobs, it is important to prioritize job creation so that individuals can continue to support themselves and their families. Job creation can be achieved through policies that encourage businesses to invest in new technologies and hire more workers, as well as through training programs that equip workers with the skills they need to thrive in an automated economy.\n\nTechnological progress: Technological progress is essential for driving innovation and improving productivity, which can lead to higher living standards and economic growth. Automation can lead to increased productivity, which can free up workers to focus on more high-skilled tasks and lead to more efficient and effective production processes. However, it is important to ensure that technological progress is not prioritized at the expense of job creation and social equity.\n\n Balance: In order to strike a balance between job creation and technological progress, it is important to prioritize policies that encourage job creation while also investing in training programs that equip workers with the skills they need to thrive in an automated economy. Additionally, it is important to ensure that the benefits of technological progress are shared equitably so that all members of society can benefit from automation.", "answer_id": "eeS6qNLeX6AScyGrS5pwrc", "model_id": "vicuna-13b:20230322-new-hp-fp16", "metadata": {}} -{"question_id": 41, "text": "The average human blinks anywhere from 12 to 20 times per minute, which equates to around 2 million blinks per year. The exact number of blinks per lifetime is difficult to determine, as it depends on factors such as age, health, and lifestyle.\nThe frequency of blinking is regulated by the nervous system and is involuntary, meaning that we don't consciously control when we blink. Blinking helps to lubricate the eyes and keep them moisturized, and it also helps to protect the eyes from foreign substances and other potential hazards.\nThe number of blinks per day and per lifetime is influenced by several factors. For example, infants blink less than adults because their eyelids are not yet fully developed. As we age, our blinking frequency tends to decrease slightly. Additionally, factors such as computer use or other activities that require prolonged screen viewing can lead to dry eye and increased blinking frequency.\nIn short, the number of blinks per lifetime varies based on factors such as age, health, and lifestyle, and it's difficult to determine an exact number. However, blinking is an important process that helps to keep our eyes healthy and protected.", "answer_id": "an76qPuSJ4TQDWvNXdcJ2w", "model_id": "vicuna-13b:20230322-new-hp-fp16", "metadata": {}} -{"question_id": 42, "text": "The number of atoms in a grain of salt can vary depending on the size of the grain, but let's assume a typical grain of salt is about 600 micrometers ( micrometers) in diameter, which is a reasonable estimate.\n\n1. First, we need to convert the grain's mass to atoms. the mass of a grain of salt can range from about 100 milligrams (mg) to 300 mg, so let's say it is 200 mg, which is a common mass for a grain of salt.\n2. Next, we need to know the atomic weight of the elements that make up the grain of salt. Table salt, which is NaCl (sodium chloride), has a molar mass of 58.44 g/mol. Since 1 mole is equal to 6.022 x 10^23 atoms or molecules, we can divide the molar mass by Avogadro's number to get the number of atoms in 200 mg of NaCl:\n6.022 x 10^23 atoms/mol / 58.44 g/mol = 1.039 x 10^23 atoms/mg\n3. Now we can multiply the number of moles in 200 mg of NaCl by the number of atoms per mole:\n200 mg x 1039 atoms/mg x 1 mol/ 200 mg = 1.039 x 10^22 atoms\n4. Finally, we can convert this number to micrometers:\n1.039 x 10^22 atoms x ( 1 micrometer)/( 10^-6 m) = 1.039 x 10^20 atoms\n5. So, the number of atoms in a grain of salt (assuming it is 600 micrometers in diameter) is approximately 1.039 x 10^20 atoms.\n\nPlease note that this is just an estimate, and the actual number of atoms in a grain of salt can vary depending on its purity and other factors.", "answer_id": "DYnmt5xTTTkLuyyBxhmDsq", "model_id": "vicuna-13b:20230322-new-hp-fp16", "metadata": {}} -{"question_id": 43, "text": "The number of lightning strikes that occur on Earth each day is difficult to estimate precisely, as it can vary widely based on location and weather conditions. However, we can make some educated guesses based on average values and known factors that influence lightning activity.\nFirst, let's consider that lightning is a form of electrical discharge that occurs when there is a difference in electrical potential between two points. This most commonly happens during thunderstorms, when the charged water droplets in clouds rub together and create an electrical imbalance.\nNext, we can estimate that on average, a thunderstorm produces about 100 lightning strikes per square mile per year. This number can vary based on the frequency and intensity of thunderstorms in a given region.\nNow, let's consider the Earth's surface area. The planet Earth has a total area of about 197 million square miles. If we multiply this by the number of lightning strikes per square mile per year, we get an estimate of about 19.7 trillion lightning strikes per year.\nHowever, this is still an estimate, as it doesn't take into account the fact that not all lightning strikes are necessarily visible, especially at night or in areas with low cloud cover.\nFurthermore, some regions such as Equatorial areas, experience high lightning activity throughout the year, while in other regions like Polar areas, lightning activity is low.\nFinally, it's important to note that lightning activity can also be influenced by factors such as global climate change, which could affect the frequency and intensity of thunderstorms in different regions.\nSo, while we can make some educated guesses about how many lightning strikes occur on Earth each day, the actual number is likely to be complex and variable, and", "answer_id": "MbcVvnW3Eh9wZi34SzYawn", "model_id": "vicuna-13b:20230322-new-hp-fp16", "metadata": {}} -{"question_id": 44, "text": "It would take a very large number of balloons to lift a house like in the movie \"Up.\" The movie depicts a house being lifted into the air using a large number of helium balloons, and the exact number of balloons required is never specified. However, we can make an estimate based on some real-world principles and calculations.\n1. Buoyancy: Helium balloons float because helium is less dense than air. As a result, the upward force exerted on the balloons by the surrounding air is greater than the weight of the balloons themselves, causing them to float. In order for a house to float, the upward force exerted on the balloons would need to be greater than the weight of the house.\n2. Volume: The volume of helium required to lift a house would be much larger than the volume of the balloons themselves. Helium is less dense than air, so in order to lift a house, you would need a large volume of helium to displace the air beneath the house and provide enough buoyancy.\n3. Weight of the house: A typical house weighs anywhere from 50 to 100 tons or more, depending on the size and materials used. To lift such a weight, you would need a lot of helium, which would require a lot of balloons.\n4. Balloons: The average helium balloon has a volume of around 1 cubic meter. Using this as a rough estimate, we can calculate the number of balloons needed to lift a house using the following formula:\nNumber of balloons = Volume of house / Volume of balloon\nLet's assume a conservative estimate of 100 tons for the weight of the house and 1 cubic meter for the volume of each balloon. To lift the house, you would need:\nNumber of balloons = 100 tons / 1 cubic meter per balloon = 100,000,000 balloons\nThis is a very rough estimate and does not take into account other factors, such as the shape of the house, the materials it is made of, and the specifications of the balloons used. In reality, it would be practically impossible to use so many balloons to lift a house, as it would require an unimaginable amount of helium and a herculean effort to inflate and tie them all off.\nIn conclusion, it would take an insanely large number of balloons to lift a house like in the movie \"Up,\" and it is not physically possible to do so in reality.", "answer_id": "4oSKRo3DtcNtE8gRbrSJPg", "model_id": "vicuna-13b:20230322-new-hp-fp16", "metadata": {}} -{"question_id": 45, "text": "The number of text messages sent globally in a minute fluctuates based on various factors such as time of day, day of the week, and geographic location. However, as of my knowledge cutoff date in September 2019, the estimated number of text messages sent per minute globally was around 16 million.\nHere's my reasoning:\n1. First, let's consider the number of text messages sent per day. As of my knowledge cutoff date, the estimated number of text messages sent per day was around 19 billion.\n2. Next, let's divide that number by the number of minutes in a day. There are 1440 minutes in a day, so the number of text messages per day divided by the number of minutes per day gives us an estimate of around 13.2 text messages per minute.\n3. However, this number is an average and fluctuates based on various factors such as time of day and day of the week. For instance, the number of text messages sent per minute is likely to be higher during peak hours in regions with high mobile usage.\n4. To estimate the number of text messages sent per minute globally, we can use this 13.2 text messages per minute as a starting point and adjust based on the distribution of mobile usage across different regions.\n5. According to data from the International Telecommunications Union (ITU), the number of mobile cellular subscriptions worldwide was around 5.3 billion as of 2018. This means that there are many people who have multiple SIM cards or use multiple devices, so the actual number of text messages sent per minute may be higher than this estimate.\n6. Combining these factors, we can estimate that around 16 million text messages are sent globally per minute.\nIt's worth noting that this is just an estimate, and the actual number of text messages sent per minute may be higher or lower depending on various factors such as technological advancements, changes in mobile usage patterns, and other factors that impact mobile communication trends.", "answer_id": "bhh9SFsYgquUPeC2qBKMJw", "model_id": "vicuna-13b:20230322-new-hp-fp16", "metadata": {}} -{"question_id": 46, "text": "It is difficult to estimate the exact number of words spoken daily on Earth, as it is a constantly changing and fluctuating number. However, there are some factors that can be taken into account to give a rough estimate:\n1. Languages: There are over 7,000 languages spoken around the world, and many of them are spoken daily by a large number of people.\n2. Speech per person: On average, a person speaks between 100-200 words per minute, and an adult spends about 6-8 hours sleeping, leaving 16-18 hours awake and speaking.\n3. World population: As of 2021, the world population is estimated to be around 7.9 billion people.\nCombining these factors, we can estimate that the daily spoken word count is likely in the trillions. However, it is important to note that this number is a rough estimate and can vary significantly depending on the method of calculation and other factors.\nIt is also worth noting that this estimate does not take into account written communication, such as text messages, emails, and social media posts, which would likely add significantly to the daily word count.", "answer_id": "oKMFEsBBp4SEJiVPRQZN2N", "model_id": "vicuna-13b:20230322-new-hp-fp16", "metadata": {}} -{"question_id": 47, "text": "It is difficult to give a specific number of snowflakes that fall during a typical winter, as the number can vary greatly depending on factors such as snowfall rate, temperature, humidity, and wind speed. Additionally, individual snowflakes are microscopic and too small to be accurvoir counted accurately.\nHowever, we can give an estimate based on some averages and assumptions.\nThe average snowflake forms when water vapor in the atmosphere condenses around a particle such as dust, soot, or pollen, and freezes into ice crystals. These ice crystals then bond together to form a snowflake.\nSnowfall rate is the number of snowflakes that fall per square meter per unit of time. The average snowfall rate is around 2-5 cm per hour, which means that during a typical winter day, there could be around 2-5 million snowflakes falling per square meter.\nIf we assume that each snowflake is unique and has not been counted before, then during a typical winter day, there could be around 2-5 billion snowflakes falling per square meter globally.\nHowever, this number is based on several assumptions and averages, and actual numbers can vary greatly depending on location, time of day, and other factors.\nFurthermore, this number is too small to be counted accurately, and the number of snowflakes falling during a winter season is better described in terms of snowfall totals, which can range from just a few inches to several feet depending on location and season.", "answer_id": "XXXVQo5w5XCFiEuK6n3hUV", "model_id": "vicuna-13b:20230322-new-hp-fp16", "metadata": {}} -{"question_id": 48, "text": "It is difficult to give an exact number of pages in all the books ever written because new books are still being published, and there are millions of books in existence. However, based on the number of books published each year and the average page count of a book, we can estimate that there are currently around 130 million books in existence.\nTo break down this estimate, let's assume that the average book has 200 pages. This is a rough estimate, but it works as a starting point. We can then multiple this number by the number of years that have passed since the invention of writing, which is around 5000 years. This gives us an estimate of 1,000,000,000 pages (or 1 billion pages).\nHowever, this estimate does not take into account the books that were written but have since been lost or destroyed, which were likely much higher in number than the books that still exist today. Additionally, it does not account for the books that will be written in the future.\nTherefore, it is impossible to give an exact number of pages in all the books ever written. The number is constantly changing, and the estimate will always be a rough approximation.", "answer_id": "Ysbx8mFxYva6TbHQGKL8K7", "model_id": "vicuna-13b:20230322-new-hp-fp16", "metadata": {}} -{"question_id": 49, "text": "It is difficult to estimate the exact number of times the Earth has orbited the Sun since the beginning of life, as the exact timing of the start of life on Earth is not known. However, we can make some educated estimates based on certain astronomical events and geological markers.\n\n1. The Earth-Sun orbit: The Earth orbits the Sun once every 365.24 days, which equates to about 93 million miles ( 149.6 million kilometers).\n2. Estimating the age of life on Earth: Scientists estimate that life on Earth began about 3.5 billion years ago.\n3. Asteroid impacts and other geological markers: Asteroid impacts have been a major driver of geological change on Earth throughout its history. The impacts have led to extinctions, changes in climate, and other dramatic shifts in the planet's ecosystems. Asteroids have also been the source of many of the planet's resources, including oil, gas, and coal.\n4. stepping-stone model: The \"stepping-stone model\" of early life suggests that life began in a series of isolated, oxygen-free locations, such as volcanic vents, and then gradually moved to other locations over time.\n5. Mathematical model: We can use a mathematical model to estimate the number of Earth-Sun orbits since life began. We can start with the estimated age of life on Earth (3.5 billion years) and divide it by the length of a Earth-Sun orbit (365.24 days) to get the number of orbits per year: 3.5 billion / 365.24 = 9.85 billion orbits.\n6. Rounding: Round this number to 10 billion orbits.\n\nSo, based on these estimates, it would be around 10 billion Earth-Sun orbits since the beginning of life on Earth. It's important to note that this is an estimate, and the actual number could be different due to the uncertainties in the timing of the start of life and the various astronomical and geological events that have occurred over Earth's history.", "answer_id": "KenbYuKVVUVXe2stVtxkeF", "model_id": "vicuna-13b:20230322-new-hp-fp16", "metadata": {}} -{"question_id": 50, "text": "It is difficult to estimate the exact number of songs that have been recorded throughout history, as it is a very broad and dynamic category that includes music from many different cultures and time periods. However, we can make some educated guesses based on a few factors.\nFirst, let's consider the number of songs that have been created. This number is likely to be very high, as music has been an important part of human culture for thousands of years, and virtually every culture has its own music traditions. In addition, with the rise of popular music in the 20th century, the number of songs created each year has increased exponentially.\nSecond, let's consider the number of these songs that have been recorded. Recording technology has been around for less than 150 years, so songs recorded prior to this time would not exist in a recorded form. However, recording technology has advanced rapidly in recent decades, making it easier and cheaper to record music. This means that the number of recorded songs has also increased exponentially.\nGiven these factors, it is safe to say that there are likely trillions of songs that have been created throughout history, and potentially billions of songs that have been recorded. However, these numbers are estimates and are difficult to pin down precisely.\nUltimately, the number of songs recorded throughout history is not as important as the impact that music has had on human culture and society. Music has been used for celebrations, rituals, healing, and communication, and has played a crucial role in shaping the identities and experiences of people around the world.", "answer_id": "ceWTK9jYWZq2Dd2H7KGkWY", "model_id": "vicuna-13b:20230322-new-hp-fp16", "metadata": {}} -{"question_id": 51, "text": "It is difficult to say how exactly the Internet would have been invented during the Renaissance period, as the technological and social conditions were quite different back then. However, we can speculate on some possible outcomes.\n\nFirstly, the Internet would likely have been a more localized technology, with people communicating with each other through a series of interconnected systems that were physically located in the same town or city. This could have lead to more face-to-face interactions and communities that were more tightly knit together.\n\nSecondly, the dissemination of information would have been much slower, as the speed of communication would have been limited by the speed of messengers or the delivery of physical documents. This could have lead to a different kind of information ecosystem, where people relied more on oral tradition and written documents, and less on real-time updates and instantaneous communication.\n\nThirdly, the Renaissance period was characterized by a great flourishing of art, literature, and science, and the Internet would have likely been shaped by these same cultural currents. This could have lead to a more humanistic approach to communication, where the exchange of ideas was central, and technology was used to facilitate the sharing of knowledge and creativity.\n\nOverall, it is impossible to say exactly how the Internet would have been invented during the Renaissance period, but it is likely that it would have been quite different from the global network we have today.", "answer_id": "6Bbzs6YWyzPj52rZAfRPTt", "model_id": "vicuna-13b:20230322-new-hp-fp16", "metadata": {}} -{"question_id": 52, "text": "The Aztecs were a Mesoamerican civilization that flourished in the 14th century CE, and their capital city Tenochtitlan is believed to have been one of the most populous cities in the world at that time. In 1519 CE, the Spanish conquistador Hern\u00e1n Cort\u00e9s arrived in Mexico, and his forces eventually captured Tenochtitlan in 1521 CE, marking the fall of the Aztec empire.\n\nIf the Aztecs had successfully repelled the Spanish conquistadors, it is difficult to say exactly how history would have unfolded. However, here are a few possible scenarios:\n\n1. The Aztecs may have continued to thrive as a civilization, perhaps even expanding their empire further.\n2. The Aztecs may have made contact with other civilizations, such as those in South America, and may have had a profound impact on the course of history in the Americas.\n3. The Aztecs may have been able to resist the spread of diseases, such as smallpox, which decimated their population following contact with the Spanish.\n4. The Aztecs may have been able to maintain their traditional beliefs and way of life, without the same level of influence from European powers.\n5. The Aztec empire may have been able to maintain its status as a major player in world affairs, and may have had a lasting impact on the course of history.\n\nIt is important to note that any alternate history is speculative, and we can only imagine how events may have played out differently.", "answer_id": "EG6h5jgpQ8wesKAX43nt9X", "model_id": "vicuna-13b:20230322-new-hp-fp16", "metadata": {}} -{"question_id": 53, "text": "The Black Death, also known as the Plague, was a pandemic that devastated Europe in the 14th century, killing an estimated 25-30% of the population. If the Black Death had not occurred, it is difficult to predict exactly how history would have unfolded, but here are a few potential consequences:\n\n1. Demographic: The Black Death had a profound impact on demographics, leading to a reduction in population size. Without the Black Death, Europe's population may have continued to grow, leading to a different balance between rural and urban areas, and potentially leading to more technological and social advancements.\n2. Economic: The Black Death led to a major disruption in trade and commerce, as well as a decline in labor supply. Without the Black Death, the economy may have continued to grow and evolve, leading to different political and social structures.\n3. Political: The Black Death led to a redistribution of power away from feudal lords and towards urban centers. Without the Black Death, feudalism may have continued to dominate European society, leading to different political and social structures.\n4. Social: The Black Death led to a reevaluation of traditional social structures and beliefs. without the Black Death, society may have continued to evolve along traditional lines, leading to different cultural and religious practices.\n5. Medical: The Black Death led to a major advance in medical knowledge and practices, as people sought to understand and treat the disease. Without the Black Death, medical knowledge and practices may have continued to lag behind, leading to different health outcomes.\n\nOverall, it is difficult to predict exactly how history would have unfolded if the Black Death had not occurred. However, it is clear that the Black Death had a profound impact on European society and culture, shaping the course of history in ways that still reverberate today.", "answer_id": "J76ZgE27N2Pe77NcEBrWUV", "model_id": "vicuna-13b:20230322-new-hp-fp16", "metadata": {}} -{"question_id": 54, "text": "Isaac Newton is best known for his groundbreaking work in physics, particularly his laws of motion and universal gravitation. However, he also made important contributions to other fields, including mathematics, optics, and 17th century religious controversies. Had Newton focused on biology instead of physics, it is difficult to say exactly what he would have accomplished in that field, but some possibilities include:\n1. Plant biology: Newton was interested in botany and conducted experiments on plant growth. He might have made more significant contributions to our understanding of plant biology, perhaps even developing theories on photosynthesis or plant growth.\n2. Animal behavior: Newton was also interested in animal behavior and wrote about his observations on the behavior of cats, dogs, and other animals. He might have made more significant contributions to our understanding of animal behavior had he focused on biology.\n3. Evolution: Newton died more than a century before Charles Darwin published his theory of evolution, so he wouldn't have made direct contributions to that theory. However, had he focused on biology, he might have made important observations that could have paved the way for Darwin's work.\n4. Medicine: Newton was fascinated by the human body and its biomechanics. He might have made more significant contributions to medicine had he focused on biology, perhaps developing theories on human physiology or disease transmission.\n\nWhile we can only speculate on what Newton might have accomplished had he focused on biology instead of physics, his contributions to science as a whole would still have been significant.", "answer_id": "hoFFABFFF7J8juv3AnoVWZ", "model_id": "vicuna-13b:20230322-new-hp-fp16", "metadata": {}} -{"question_id": 55, "text": "The Beatles were one of the most influential and successful bands in music history, so it's difficult to imagine what the music industry would have looked like if they had never existed. However, here are a few possible scenarios:\n1. The British Invasion may have still happened, but it could have been spearheaded by another band, such as The Rolling Stones or The Who.\n2. The 1960s counterculture movement may have been shaped differently without the Beatles' music as a soundtrack.\n3. The way music was recorded and produced could have been different, as the Beatles were known for their innovative use of studio technology.\n4. The popularity of rock music may have been slowed or even derailed without the Beatles' influential music.\n5. Other bands and artists may have been influenced differently, leading to changes in musical genres and styles.\n6. The Beatles' breakup in 1970 could have had a major impact on the music industry, as they were one of the highest-selling bands in history at the time.\n7. The Beatles' solo careers may have been different without the success and influence of the band, and individual members may have pursued different projects.\nOverall, it's difficult to say exactly how the music industry and popular culture would have been affected without the Beatles, but their impact was undeniable and they are still loved and celebrated by fans around the world.", "answer_id": "8R5n2uZ4oq52QkWQsSS6No", "model_id": "vicuna-13b:20230322-new-hp-fp16", "metadata": {}} -{"question_id": 56, "text": "If Alan Turing had not cracked the Enigma code during World War II, it is possible that the outcome of the war could have been different. The Enigma code was used by the Germans to encrypt their most sensitive military communications, and Turing's work at Bletchley Park in Britain allowed Allied codebreakers to decrypt German messages and gain valuable strategic insights.\nWithout Turing's codebreaking work, the Allies may have struggled to keep pace with German military movements and plans, which could have led to different outcomes in key battles and campaigns. This could have weakened the Allies' ability to respond to German aggression, and the war could have dragged on longer or taken a different trajectory.\nHowever, it's also important to note that codebreaking was just one aspect of the Allies' efforts during World War II, and other factors such as the production of tanks, planes, and other military hardware, as well as Allied strategic decisions, also played critical roles in the outcome of the war.", "answer_id": "ECtu3QVXVrNhssSyktKzkq", "model_id": "vicuna-13b:20230322-new-hp-fp16", "metadata": {}} -{"question_id": 57, "text": "The Suez Canal is a man-made waterway that connects the Mediterranean Sea with the Red Sea, allowing ships to travel between Europe and Asia without having to go all the way around Africa. If the Suez Canal had never been constructed, ships would have had to sail around the Cape of Good Hope at the southern tip of Africa, which would have added many days and potentially weeks to the journey between Europe and Asia.\nThis would have had significant consequences for international trade, as goods would have taken longer to reach their destinations and would have been more expensive due to the increased fuel costs. It would also have had an impact on the global economy, as the ease of trade between Europe and Asia would have been severely limited.\nAdditionally, the Suez Canal has played an important role in military operations in the region, with several conflicts being fought near or on the canal. If the Suez Canal had never been built, the political landscape of the Middle East could have been very different, and the balance of power between different countries in the region could have been altered.\nOverall, the Suez Canal has had a profound impact on the world, both in terms of trade and geopolitics. Its construction has made trade between Europe and Asia much easier and cheaper, and it remains an important strategic site to this day.", "answer_id": "jb8ot3ucdF3RvzApEYKft6", "model_id": "vicuna-13b:20230322-new-hp-fp16", "metadata": {}} -{"question_id": 58, "text": "It is difficult to say with certainty how the world would be different if the Maya civilization had never mysteriously collapsed, as there were many factors that shaped the course of history. However, here are a few possible scenarios:\n1. The Maya may have continued to thrive: The Maya civilization was known for its advanced astronomy, mathematics, and calendar systems. They also had a sophisticated agricultural system that allowed them to flourish in a harsh environment. If the civilization had not collapsed, it is possible that the Maya would have continued to develop these and other technologies, leading to further advancements and cultural flourishing.\n2. The region may have been more politically stable: The collapse of the Maya civilization led to a period of political instability in the region. If the Maya had not collapsed, it is possible that their political system could have been more resilient and could have helped to maintain political stability in the region.\n3. The Maya may have made contact with other civilizations: The Maya had contact with other civilizations, such as the Aztecs and the Incas, but their collapse may have limited further contact and exchange of ideas. If the Maya civilization had not collapsed, they may have been able to make more contact with other civilizations and potentially influence their development.\n4. The world may have been different culturally: The Maya civilization was known for its rich cultural traditions, including its art, architecture, and religious practices. If the civilization had not collapsed, it is possible that these cultural traditions would have continued to evolve and influence other civilizations in the region.\n5. The world may have been different environmentally: The Maya civilization was known for its agricultural practices, which included farming and land management techniques that allowed them to thrive in a harsh environment. If the civilization had not collapsed, it is possible that their agricultural practices could have influenced the way that other civilizations managed their resources and impacted the environment differently.", "answer_id": "cAjR8aDWRKVKbxKeqRRgvW", "model_id": "vicuna-13b:20230322-new-hp-fp16", "metadata": {}} -{"question_id": 59, "text": "If Christopher Columbus had not discovered the Americas, the indigenous peoples of the Americas would have continued to live and thrive in their own cultures and societies without significant outside contact. The impact of Columbus's arrival would have been avoided, and the Americas would have developed differently without European colonization.\nThe Americas before Columbus were home to many different civilizations and cultures, each with their own unique customs, languages, and way of life. These civilizations had developed independently of each other for thousands of years and had a rich and diverse history. Without Columbus's discovery, these civilizations would have continued to flourish on their own, without outside influence.\nThe impact of Columbus's arrival had far-reaching consequences, including the introduction of diseases like smallpox, which decimated native populations, and the enslavement and forced labor of indigenous peoples. The colonization of the Americas also led to the displacement and genocide of indigenous peoples, and the theft of their lands and resources.\nWithout Columbus's arrival, it is possible that indigenous peoples would have continued to thrive and develop their own cultures and societies, and the history of the Americas would have been very different.", "answer_id": "7hKzFRQWWPVbr8H4wcbGdA", "model_id": "vicuna-13b:20230322-new-hp-fp16", "metadata": {}} -{"question_id": 60, "text": "It is difficult to say exactly what would have happened if Vincent van Gogh had been a successful artist during his lifetime, as there are many factors that could have influenced his career and life. However, some possible outcomes include:\n1. Van Gogh may have been able to sell more paintings and achieve financial stability while he was still alive. This could have relieved some of the financial burden he faced during his lifetime, and may have allowed him to pursue his art more freely.\n2. Van Gogh may have been more widely known and recognized for his artistic talent during his lifetime, which could have led to more recognition and critical acclaim. This could have also influenced the direction of his artistic career, as he may have been more confident in his style and subjects.\n3. Van Gogh may have been able to connect with other artists and movements more easily, as he may have been more financially and professionally secure. This could have led to more collaboration and cross-pollination of ideas, which could have influenced the direction of his art.\n4. Van Gogh's life may have been different in other ways as well, such as where he lived, who he associated with, and how he was remembered after his death. Ultimately, it is impossible to know exactly how van Gogh's life would have been different if he had been a successful artist during his lifetime, but it is clear that his art and legacy have had a profound impact on the world.", "answer_id": "3vMtYzSrV5ARpYA9fGUe4H", "model_id": "vicuna-13b:20230322-new-hp-fp16", "metadata": {}} -{"question_id": 61, "text": "Here is a simple C++ program that reads a text file line by line and counts the number of occurrences of a specific word in the file:\n\n1. #include \n2. #include \n3. #include \n4. using namespace std;\n5.int main()\n6. {\n7.FILE *fp;\n8. char line[100];\n9. int count = 0;\n11. fp = fopen(\"filename.txt\", \"r\");\n12. if (fp == nullptr)\n13. {\n14. cerr << \"Error opening file\" << endl;\n15. return 1;\n16. }\n17. while (fgets(line, 100, fp))\n18. {\n19. count += 1;\n20. }\n21. fclose(fp);\n22. return 0;\n23. }\n\nIn this program, we first include the necessary header files, such asiostream, fstream, and string. In line 5, we declare a variable named count to keep track of the number of occurrences of the specific word in the file. In lines 11-12, we open the file using fopen() function and check for errors using if-else statement. In lines 13-16, we use the while loop to read the lines of the file using fgets() function. Each line read is stored in the char array named line. In line 19, we simply add 1 to the count variable each time a new line is read. Finally, in lines 21-23, we close the file using fclose() function and return 0 to indicate successful program execution.", "answer_id": "nvbTnFLEyPSSgPdf2n6Mqp", "model_id": "vicuna-13b:20230322-new-hp-fp16", "metadata": {}} -{"question_id": 62, "text": "Here's a Python implementation of the dynamic programming approach to find the longest common subsequence (LCS) of two input strings:\n\n```python\ndef lcs( str1, str2 ):\n m, n = len( str1 ), len( str2 )\n dp = [[1] * (n + 1) for _ in range( m + 1)]\n for i in range( m + 1 ):\n dp[i][0] = 1\n for j in range( n + 1 ):\n dp[0][j] = 1\n for i in range( 1, m + 1 ):\n for j in range( 1, n + 1 ):\n if str1[i - 1] != str2[j - 1]:\n dp[i][j] = dp[i - 1][j] + dp[i][j - 1]\n else:\n dp[i][j] = dp[i - 1][j]\n return dp[m][n]\n\n# Example usage\n str1 = \"abc\"\n str2 = \"defg\"\n print( lcs( str1, str2 ) ) # 3\n```\n\nThe `lcs` function takes two strings `str1` and `str2` as input and returns the length of the longest common subsequence between them. It uses dynamic programming to build a 2D list `dp` of size `(m+1) x (n+1)`, where `m` and `n` are the lengths of `str1` and `str2`, respectively. The first row and column of `dp` are initialized to `1`, as the LCS between the empty substrings `str1` and `str2` is just `1`. Then, for each non-empty substring `str1[i-1]` and `str2[j-1]`, the function calculates the LCS between them by using the `dp` matrix, which involves taking the minimum of the LCS between `str1[i-1]` and `str2[j-1]` and the LCS between `str1[i]` and `str2[j-1]`. The final length of the LCS is stored in `dp[m][n]`.", "answer_id": "RS2roGuiWf5jZfncgfHPeG", "model_id": "vicuna-13b:20230322-new-hp-fp16", "metadata": {}} -{"question_id": 63, "text": "Here's an example of how you can implement a regular expression to validate an email address in Python:\n\n```import re\n\nEmail Addition = r\"^[a-zA-Z0-9._-]+@[a-zA-Z0-9._-]+\\.[a-zA-Z]+$\"\n\nemaill = input(\"Enter an email address: \")\n\nif re.match(Email Addition, emaill):\n print(\"Valid email address.\")\nelse:\n print(\"Invalid email address.\")\n```\n\nIn this example, the regular expression `^[a-zA-Z0-9._-]+@[a-zA-Z0-9._-]+\\.[a-zA-Z]+$ is used to match email addresses. The regex checks for:\n\n* A string that starts with one or more characters that are letters, numbers, periods, underscores, or hyphenates,^[a-zA-Z0-9._-]+@\n* Followed by a @ symbol\n* Followed by a string that contains one or more characters that are letters, numbers, periods, underscores, or hyphenates, but no spaces: [a-zA-Z0-9._-]+\\.\n* Followed by a period symbol\n* Followed by a string that contains only letters, uppercase.\n\nYou can also use `re.match()` or `re.search()` to match the email address instead of `re.match()` which will match the whole email address in the input string while `re.search()` will match the pattern only in the input string.", "answer_id": "Rz8sSwJzffBjUhjEYAyEWS", "model_id": "vicuna-13b:20230322-new-hp-fp16", "metadata": {}} -{"question_id": 64, "text": "Here's an example program in Python that finds the nth Fibonacci number using dynamic programming:\n\n```python\ndef fibonacci(n, memo={0: 0, 1: 1}):\n def fibonacci(n, memo={0: 0, 1: 1}, depth=1):\n if n < 2:\n return memo[n]\n else:\n return fibonacci(n-1, memo, depth=depth+1) + fibonacci(n-2, memo, depth=depth+1)\n return None\n return fibonacci(n, memo)\n\n#Example Usage\nprint(fibonacci(10)) # 55\n```\n\nIn this program, we define a function `fibonacci` that takes an integer `n` and a dictionary `memo` as its arguments. The `memo` dictionary is used to store the previously computed Fibonacci numbers, so that we don't have to calculate them again.\nThe `fibonacci` function has two nested functions: `fibonacci`, which calculates the nth Fibonacci number, and `fibonacci`, which calculates the (n-1)th Fibonacci number and the (n-2)th Fibonacci number.\nThe outer `fibonacci` function first checks if `n` is less than 2. If it is, it returns the value stored in the `memo` dictionary for `n`. If `n` is greater than 2, the outer `fibonacci` function calls the inner `fibonacci` function `depth+1` times, with `n-1` and `n-2` as arguments, and returns the sum of the two Fibonacci numbers returned by the inner `fibonacci` function.\nIf the inner `fibonacci` function encounters a recursion limit, it returns `None`.\nThe `fibonacci` function eventually returns the value of the Fibonacci number stored in the `memo` dictionary for `n`.\nTo use this function, we just need to call `fibonacci(n)` and pass in an appropriate value for `n`. For example, we can call `fibonacci(10)` to get the 10th Fibonacci number 55.", "answer_id": "SQCnPwtYgHH57ikbXgUpQq", "model_id": "vicuna-13b:20230322-new-hp-fp16", "metadata": {}} -{"question_id": 65, "text": "A binary search algorithm is a search algorithm that works by dividing a sorted array into halves and repeatedly checking the middle element until the correct element is found. Here is a step-by-step implementation of a binary search algorithm to find a specific element in a sorted array:\n1. Start with the middle index of the array.\n2. Divide the array into two halves, with the middle element as the pivot.\n3. Compare the middle element in the left half with the target element.\n4. If they match, return the index of the middle element.\n5. If the middle element is smaller than the target element, look in the right half of the array.\n6. If the middle element is larger than the target element, look in the left half of the array.\n7. Repeat steps 3-6 until the target element is found or the array is empty.\nHere is an example implementation in Python:\n\n```\ndef binary_search(arr, target):\n low = 0\n high = len(arr)-1\n while low <= high:\n mid = (low + high) // 2\n if arr[mid] == target:\n return mid\n elif arr[mid] < target:\n low = mid + 1\n else:\n high = mid - 1\n return -1\n\n# Example usage\narr = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]\ntarget = 5\nprint(binary_search(arr, target)) # prints 3\nprint(arr[3]) # prints 5\n```\n\nIn this example, the array is [1, 2, 3, 4, 5, 6, 7, 8, 9, 10], and we are looking for the element 5. The binary search algorithm divides the array into halves, compares the middle element with the target element, and continues to narrow down the search until the target element is found at index 3.", "answer_id": "mfGSLxAAjjBmUFtCx3STEN", "model_id": "vicuna-13b:20230322-new-hp-fp16", "metadata": {}} -{"question_id": 66, "text": "A queue can be implemented using two stacks in Python, with one stack for pushing elements onto the queue and the other stack for popping elements off the queue. Here is an example implementation:\n\n```python\nclass Queue:\n def __init__(self):\n self.in_queue = []\n self.out_queue = []\n self.in_stack = []\n self.out_stack = []\n\n def enqueue(self, element):\n self.in_stack.append(element)\n\n def dequeue(self):\n self.out_stack.pop()\n self.in_queue.append(self.out_stack[-1])\n\n def peek(self):\n return self.in_queue[-1]\n\n def is_empty(self):\n return self.in_queue == []\n```\n\nIn this implementation, the `enqueue` method adds the element to the `in_stack`, while the `dequeue` method pops an element from the `out_stack` and adds it to the `in_queue`. The `peek` method returns the head of the `in_queue`, and the `is_empty` method checks if the `in_queue` is empty.\nThis implementation is based on the following assumptions:\n1. The stacks are always full, meaning that they can hold as many elements as needed.\n2. The enqueue operation is always faster than the dequeue operation.\n3. The peek operation is always faster than the dequeue operation.\n4. The time complexity of enqueue, dequeue, peek, and is\\_empty operations are O(1).", "answer_id": "CcUgvJProVma9sNKW2nKHc", "model_id": "vicuna-13b:20230322-new-hp-fp16", "metadata": {}} -{"question_id": 67, "text": "Here is one way to implement a program to find the common elements in two arrays without using any extra data structures in C++:\n\n1. Accept the two arrays as input from the user.\n2. Initialize two empty arrays to store the common elements.\n3. Loop through the first array and check if each element exists in the second array. If it does, then move that element from the second array to the first array.\n4. Loop through the second array and check if each element exists in the first array. If it does not, then move that element from the first array to the second array.\n5. Print out the elements in the first array as the common elements between the two arrays.\n\nHere is some sample code for this program:\n```\n#include \n#include \n\nusing namespace std;\n\nint main() {\n // Accept the two arrays as input from the user\n int size1, size2;\n cout << \"Enter size of first array: \";\n cin >> size1;\n cout << \"Enter elements of first array: \";\n for(int i=0; i> arr1[i];\n }\n cout << \"Enter size of second array: \";\n cin >> size2;\n cout << \"Enter elements of second array: \";\n for(int i=0; i> arr2[i];\n }\n\n // Initialize two empty arrays to store the common elements\n int common[min(size1, size2)];\n int second[min(size1, size2)];\n memset(common, 0, sizeof(common));\n memset(second, 0, sizeof(second));\n\n // Loop through the first array and check if each element exists in the second array\n for(int i=0; i