- Install docker
sudo apt install docker.io
- Install poetry
pip install poetry
- Create a docker file like this
example
. - Create a
.toml file
- Add dependencies there e.g.
poetry add Flask==2.1.2
and set dicrectory name:name = "poetry_docker"
- Run
poetry lock
which creates apoetry.lock
file - Run
poetry install
- Build the docker image:
docker build -t docker_test:0.0.1 .
- Run the docker image:
docker run -p 5000:5000 -t -i docker_test:0.0.1
- Check docker images:
docker images
- Push image to the docker hub:
docker push USER_NAME/docker_test:0.0.1
- Pull image from the docker hub:
docker pull USER_NAME/docker_test:0.0.1
-
docker login kidev.azurecr.io
-
docker build -t USER_NAME/kicamp_frontend:0.0.1 .
-
docker tag USER_NAME/kicamp_frontend:0.0.1 kidev.azurecr.io/USER_NAME/kicamp_frontend:0.0.1
-
docker push kidev.azurecr.io/USER_NAME/kicamp_frontend:0.0.1
-
poetry shell
-
pre-commit run --all-files
-
pre-commit run --files [chunking.py, queue_data.py]
-
.pre-commit-config.yaml
-
pip install fastapi
-
pip install uvicorn
-
uvicorn restapi:app --reload
-
uvicorn backend.api.restapi:app --reload
-
uvicorn main:app port=800
- activate the .venv enviorment
.venv\Scripts\activate
-
pip install notebook
-
python -m pip uninstall pyzmq
-
pip install pyzmq==25.1.2
-
import os
-
from dotenv import load_dotenv
-
load_dotenv()
-
GCP_PROJECT_ID = os.getenv('GCP_PROJECT_ID')
-
pip list
-
pip show gensim
-
pip freeze
-
pip check
-
pip install -r requirements.txt
-
pip install --quiet --upgrade gensim
-
pip install git+https://github.com/Unbabel/COMET.git
-
git add .
-
git add [file]
-
git reset [file]
-
git diff
-
git diff branchB...branchA
-
git commit -m "[descriptive message]"
-
git init
-
git branch [branch-name]
-
git status
-
git checkout [-b][branch_name]
-
git checkout -- [file]
-
git merge [branch]
-
git log [-n count]
-
git show [SHA]
-
git push [alias] [branch]
-
git fetch [alias]
-
git rebase [branch]
-
git pull [alias]
-
git reset --hard [commit]
-
git stash
-
cat -<EOF > .gitignore
-
git commit -m "feat: ..."
,fix:
,docs:
,refactor:
,test:
,style:
,chore:
-
python3 --version
-
python3 -m venv py_env
-
source py_env/bin/activate
-
ls
-
ls -a
shows hidden files and directories in the current directory. -
ls -l
displays the contents of the current directory in a long listing format -
ls -t
displays the most recently modified file first as the file -
pwd
-
cd
-
cd ..
-
cd ~-
-
sudo apt install gimp
-
chmod +x file_name.sh
-
cat long_text_file.txt
-
nano file.txt
-
ping google.com
-
ssh salehi@server_name.de
-
source venv/bin/activate
-
cp file_to_copy.txt new_file.txt
-
cp -r dir_to_copy/ new_copy_dir/
-
scp -r ~/workspace/file.txt salehi@server_name.de:~/practice
-
rm file_to_copy.txt
-
rm -rf Ego_Twitter
-
rm -r dir_to_remove/
-
mv command_list.txt commands/
-
mv /home/kinsta/BestMoviesOfAllTime ./
-
mkdir images/
-
mkdir -p movies/2004/
-
touch -c file.txt
-
touch -m file.txt
-
exit
-
sudo cd /root/
-
htop
-
unzip images.zip
-
ps
reports a snapshot of the current processes. -
history
-
passwd
-
whoami
-
wget https://github.com/fatemehsrz/MLflow_Tutorial/blob/main/MLflow_Tutorial.ipynb
-
curl -fsSL nebula-up.siwei.io/install.sh | bash
-
/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)"
-
sudo apt install openssh-server
-
sudo systemctl status ssh
-
sudo ufw allow ssh
-
ip a
-
ip -4 a
-
ping -c 4 www.google.com
-
shutdown
import logging
from abc import ABC, abstractmethod
class DataPipeline(ABC):
def __init__(self) -> None:
logging.basicConfig( level=logging.INFO, filename="ingestionpipelinelogger.log",
filemode="w", format="%(asctime)s %(levelname)s %(message)s")
self.logger = logging.getLogger()
@abstractmethod
def fetch_documents(self) -> None:
self.logger.info("[+] fetched documents sucessfully")
Pydantic is a powerful data validation library for Python, engineered to enhance the robustness and reliability of the codebase. import pydantic
from datetime import date
from uuid import UUID, uuid4
from enum import Enum
from pydantic import BaseModel, EmailStr
class Department(Enum):
HR = "HR"
SALES = "SALES"
IT = "IT"
ENGINEERING = "ENGINEERING"
class Employee(BaseModel):
employee_id: UUID = uuid4()
name: str
email: EmailStr
date_of_birth: date
salary: float
department: Department
elected_benefits: bool
# Easy reversing
phrase: str = 'Hello, Bob!'
phrase[::-1]
# If statement in return
def valid_length_elvis(user_input: str) -> str:
return 'Yes case' if len(user_input) > 10 else 'No case'
# Nested list flatten
from typing import Callable, Any
flatten = lambda target: sum((flatten(sub) if isinstance(sub, list)
else [sub] for sub in target), [])
# Generate a secret password
from secrets import choice
from string import ascii_letters, digits, punctuation
pass_gen = lambda x: ''.join(choice(ascii_letters + digits + punctuation) for _ in range(x))
# Get email from content
get_emails = lambda text: re.findall(r'\b[A-Za-z0-9._%+-]+
@[A-Za-z0-9.-]+\.[A-Z|a-z]{2,}\b', text)
upper_case = list(map(lambda x: x.upper(), ['apple', 'banana', 'cherry']))
sentence = ' '.join(words)
print("%.2f" % a)
print("%.2f" % round(a, 2))
print("{:.2f}".format(a))
from utils import TextLoader
import argparse
data_utils = TextLoader(input= './data/emotion_dataset.csv', device= 'cpu' )
parser= argparse.ArgumentParser( description= "load data")
parser.add_argument ("action_type", help= "action_type" )
args= parser.parse_args()
if args.action_type=="cleantext":
df= data_utils.load_data()
df["Text"] = df["Text"].apply(lambda x: data_utils.clean_text(x))
if args.action_type=="savefile":
df= data_utils.load_data()
df["Text"] = df["Text"].apply(lambda x: data_utils.clean_text(x))
data_utils .save_csv(df)
$workspaceFolder
├── backend
│ └── my_package
│ ├── __init__.py
│ └── classes.py
└── test
└── test_azureblob.py
{
"version": "0.2.0",
"configurations": [
{
"name": "Python: Module",
"type": "python",
"request": "launch",
"module": "test_azureblob",
"env": {"PYTHONPATH": "${workspaceFolder}/../backend"}
}
]
}