Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

👻 Config changes #526

Merged
merged 16 commits into from
Jan 13, 2025
Merged
Show file tree
Hide file tree
Changes from 14 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
27 changes: 20 additions & 7 deletions .github/workflows/build-and-push-binaries.yml
Original file line number Diff line number Diff line change
Expand Up @@ -189,23 +189,36 @@ jobs:
cp ./dist/kai-rpc-server ./example/analysis/
cp ./kai_analyzer_rpc/kai-analyzer-rpc ./example/analysis/

cat << EOF > ./example/config.toml
rm -f ./example/initialize.toml

cat << EOF > ./example/initialize.toml
root_path = "./coolstore"
analyzer_lsp_java_bundle_path = "./analysis/bundle.jar"
analyzer_lsp_lsp_path = "./analysis/jdtls/bin/jdtls"
analyzer_lsp_rpc_path = "./analysis/kai-analyzer-rpc"
analyzer_lsp_rules_path = "./analysis/rulesets/default/generated"
analyzer_lsp_dep_labels_path = "./analysis/maven.default.index"

demo_mode = true
enable_reflection = false

[log_config]
log_level = "debug"
file_log_level = "debug"
log_dir = "/home/runner/work/kai/kai/kai/../../logs/"
demo_mode = true
[models]
log_dir_path = "/home/runner/work/kai/kai/kai/../../logs/"

[model_provider]
provider = "${{ matrix.models.provider }}"
[models.args]
[model_provider.args]
model_id = "${{ matrix.models.model_id }}"
EOF

if [[ -n "${{ matrix.models.max_new_tokens }}" ]]; then
cat << EOF >> ./example/config.toml
cat << EOF >> ./example/initialize.toml
parameters.max_new_tokens = ${{ matrix.models.max_new_tokens }}
EOF
fi
cat ./example/config.toml
cat ./example/initialize.toml

which python
cd example
Expand Down
1 change: 1 addition & 0 deletions .trunk/trunk.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,7 @@ lint:
- kai_solution_server/samples/macos/**
- example/analysis/**
- example/default_rules/**
- kai/data/**
# This file is from https://github.com/rh-aiservices-bu/llm-on-openshift
# It is included here only for convenience
- notebooks/jms_to_smallrye_reactive/caikit_tgis_langchain.py
Expand Down
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ run-demo:

run-debug-driver:
python kai/reactive_codeplanner/main.py \
--kai-config example/config.toml \
--kai-config example/initialize.toml \
--source-directory example/coolstore \
--rules-directory example/analysis/rulesets/default/generated \
--analyzer-lsp-server-binary example/analysis/kai-analyzer-rpc \
Expand Down
23 changes: 23 additions & 0 deletions example/initialize.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
# This file will transformed into a json-rpc request object and sent to the
# server to its `initialize` request endpoint

root_path = "./coolstore"
analyzer_lsp_java_bundle_path = "./analysis/bundle.jar"
analyzer_lsp_lsp_path = "./analysis/jdtls/bin/jdtls"
analyzer_lsp_rpc_path = "./analysis/kai-analyzer-rpc"
analyzer_lsp_rules_path = "./analysis/rulesets/default/generated"
analyzer_lsp_dep_labels_path = "./analysis/maven.default.index"

demo_mode = true
enable_reflection = false

[log_config]
log_level = "INFO"
file_log_level = "DEBUG"
log_dir_path = "./logs"

[model_provider]
provider = "ChatIBMGenAI"

[model_provider.args]
model_id = "meta-llama/llama-3-1-70b-instruct"
70 changes: 32 additions & 38 deletions example/run_demo.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
import time
from io import BufferedReader, BufferedWriter
from pathlib import Path
from typing import Generator, Optional, cast
from typing import Generator, cast

from opentelemetry import trace
from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter
Expand All @@ -22,8 +22,7 @@
from kai.jsonrpc.core import JsonRpcServer
from kai.jsonrpc.models import JsonRpcError, JsonRpcResponse
from kai.jsonrpc.streams import LspStyleStream
from kai.kai_config import KaiConfig
from kai.logging.logging import get_logger, init_logging_from_config
from kai.logging.logging import get_logger, init_logging_from_log_config
from kai.rpc_server.server import (
GetCodeplanAgentSolutionParams,
KaiRpcApplication,
Expand All @@ -38,14 +37,9 @@ def get_binary_path(path: str) -> Path:


SERVER_URL = "http://0.0.0.0:8080"
APP_NAME = "coolstore"
SAMPLE_APP_DIR = Path("coolstore")
ANALYSIS_BUNDLE_PATH = Path(".", "analysis", "bundle.jar")
ANALYSIS_LSP_PATH = Path(".", "analysis", "jdtls", "bin", "jdtls")
ANALYSIS_RPC_PATH = get_binary_path("./analysis/kai-analyzer-rpc")
RPC_BINARY_PATH = get_binary_path("./analysis/kai-rpc-server")
ANALYSIS_RULES_PATH = Path(".", "analysis", "rulesets", "default", "generated")
ANALYSIS_DEP_LABELS_FILE = Path(".", "analysis", "maven.default.index")
TRACING_ENABLED = "ENABLE_TRACING"

KAI_LOG = get_logger("run_demo")
Expand All @@ -57,11 +51,6 @@ def get_binary_path(path: str) -> Path:

def pre_flight_checks() -> None:
for path in [
SAMPLE_APP_DIR,
ANALYSIS_BUNDLE_PATH,
ANALYSIS_LSP_PATH,
ANALYSIS_RPC_PATH,
ANALYSIS_DEP_LABELS_FILE,
RPC_BINARY_PATH,
]:
if not path.exists():
Expand All @@ -73,32 +62,29 @@ def pre_flight_checks() -> None:

@contextlib.contextmanager
def initialize_rpc_server(
kai_config: KaiConfig,
config: KaiRpcApplicationConfig,
) -> Generator[JsonRpcServer, None, None]:

cache_dir: Optional[Path] = None
if kai_config.cache_dir is not None:
cache_dir = Path(kai_config.cache_dir)
# NOTE: This is a hack. Config should probably be globally accessible in
# this script.
global SAMPLE_APP_DIR
SAMPLE_APP_DIR = config.root_path

log = get_logger("client")
config = KaiRpcApplicationConfig(
process_id=None,
root_path=SAMPLE_APP_DIR,
kai_backend_url=SERVER_URL,
log_dir_path=Path("./logs"),
model_provider=kai_config.models,
demo_mode=True,
cache_dir=cache_dir,
analyzer_lsp_java_bundle_path=ANALYSIS_BUNDLE_PATH,
analyzer_lsp_lsp_path=ANALYSIS_LSP_PATH,
analyzer_lsp_rpc_path=ANALYSIS_RPC_PATH,
analyzer_lsp_rules_path=ANALYSIS_RULES_PATH,
analyzer_lsp_dep_labels_path=ANALYSIS_DEP_LABELS_FILE,
enable_reflection=False,
)

rpc_subprocess = subprocess.Popen( # trunk-ignore(bandit/B603)
[RPC_BINARY_PATH, "-c", "config.toml"],
[
RPC_BINARY_PATH,
"--log-level",
str(config.log_config.log_level),
"--stderr-log-level",
str(config.log_config.stderr_log_level),
"--file-log-level",
str(config.log_config.file_log_level),
"--log-dir-path",
config.log_config.log_dir_path,
"--log-file-name",
config.log_config.log_file_name,
],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
env=os.environ,
Expand Down Expand Up @@ -132,13 +118,21 @@ def initialize_rpc_server(
raise Exception(
f"Failed to initialize RPC server - {response.code} {response.message}"
)
elif response.error is not None:
if isinstance(response.error, str):
raise Exception(f"Failed to initialize RPC server - {response.error}")
else:
raise Exception(
f"Failed to initialize RPC server - {response.error.code} {response.error.message}"
)

yield rpc_server
except Exception as e:
log.error("Failed to initialize the server:", e)
finally:
# send shutdown
response = rpc_server.send_request("shutdown", params={})
log.debug(f"shutdown resposne -- {response}")
log.debug(f"shutdown response -- {response}")
log.info("Stopping RPC Server")
rpc_subprocess.wait()
log.info("Stopped RPC Server")
Expand Down Expand Up @@ -237,8 +231,8 @@ def run_demo(report: Report, server: JsonRpcServer) -> None:


def main() -> None:
kai_config = KaiConfig.model_validate_filepath("config.toml")
init_logging_from_config(kai_config)
kai_config = KaiRpcApplicationConfig.model_validate_filepath("initialize.toml")
init_logging_from_log_config(kai_config.log_config)
start = time.time()

tracer_provider: TracerProvider | None = None
Expand All @@ -254,7 +248,7 @@ def main() -> None:
report = Report.load_report_from_file(coolstore_analysis_dir)
try:
pre_flight_checks()
with initialize_rpc_server(kai_config=kai_config) as server:
with initialize_rpc_server(kai_config) as server:
run_demo(report, server)
KAI_LOG.info(
f"Total time to process '{coolstore_analysis_dir}' was {time.time()-start}s"
Expand Down
6 changes: 3 additions & 3 deletions kai/analyzer.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,12 +17,12 @@
CONST_KAI_ANALYZER_LOG_FILE = "kai-analyzer-server.log"


def get_logfile_dir() -> str:
def get_logfile_dir() -> Path:
if not log:
return PATH_KAI
for h in log.handlers:
if isinstance(h, logging.FileHandler):
return os.path.dirname(h.baseFilename)
return Path(os.path.dirname(h.baseFilename))
return PATH_KAI


Expand All @@ -41,7 +41,7 @@ def __init__(
analyzer_java_bundle_path: Path,
dep_open_source_labels_path: Optional[Path],
) -> None:
"""This will start and analyzer-lsp jsonrpc server"""
"""This will start an analyzer-lsp jsonrpc server"""
# trunk-ignore-begin(bandit/B603)
args: list[str] = [
str(analyzer_lsp_server_binary),
Expand Down
36 changes: 19 additions & 17 deletions kai/constants.py
Original file line number Diff line number Diff line change
@@ -1,38 +1,40 @@
import os
import pathlib
import sys
from pathlib import Path
from typing import Any

"""
This file exists because we need to define some constants - specifically file
paths - that are used in multiple places in the codebase. There might be a more
robust solution, but for now, this should suffice
robust solution, but for now, this should suffice.

Note: We sometimes use `os.path.abspath` as opposed to `Path.resolve()` because
the former will resolve relative paths, but not symlinks. This is in line with
what Go does.
"""

PATH_KAI = os.path.dirname(os.path.abspath(__file__))
PATH_KAI = Path(os.path.dirname(os.path.abspath(__file__)))

# pyinstaller sets sys attributes to help determine when program runs in bin
if getattr(sys, "frozen", False) and hasattr(sys, "_MEIPASS"):
PATH_KAI = sys._MEIPASS
PATH_KAI = Path(sys._MEIPASS)

PATH_GIT_ROOT = os.path.join(PATH_KAI, "..")
PATH_GIT_ROOT = Path(os.path.abspath(os.path.join(PATH_KAI, "..")))

PATH_SOLUTION_SERVER_ROOT = os.path.join(PATH_GIT_ROOT, "kai_solution_server")
PATH_SOLUTION_SERVER_ROOT = PATH_GIT_ROOT / "kai_solution_server"

PATH_DATA = os.path.join(PATH_SOLUTION_SERVER_ROOT, "data")
PATH_DATA = PATH_SOLUTION_SERVER_ROOT / "data"

PATH_LLM_CACHE = os.path.join(PATH_KAI, "data", "llm_cache")
PATH_BENCHMARKS = os.path.join(PATH_DATA, "benchmarks")
PATH_MISC = os.path.join(PATH_DATA, "misc")
PATH_SQL = os.path.join(PATH_DATA, "sql")
PATH_TEMPLATES = os.path.join(PATH_DATA, "templates")
PATH_LLM_CACHE = PATH_KAI / "data" / "llm_cache"
PATH_BENCHMARKS = PATH_DATA / "benchmarks"
PATH_MISC = PATH_DATA / "misc"
PATH_SQL = PATH_DATA / "sql"
PATH_TEMPLATES = PATH_DATA / "templates"

PATH_LOCAL_REPO = os.path.join(
PATH_GIT_ROOT, "kai_solution_server/samples/sample_repos"
)
PATH_LOCAL_REPO = PATH_GIT_ROOT / "kai_solution_server" / "samples" / "sample_repos"

PATH_TESTS = os.path.join(PATH_GIT_ROOT, "tests")
PATH_TEST_DATA = pathlib.Path(os.path.join(PATH_GIT_ROOT, "tests/test_data"))
PATH_TESTS = PATH_GIT_ROOT / "tests"
PATH_TEST_DATA = PATH_GIT_ROOT / "tests" / "test_data"


def __clean_env() -> dict[str, Any]:
Expand Down
57 changes: 55 additions & 2 deletions kai/jsonrpc/util.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
import logging
from typing import Any
import os
from pathlib import Path
from typing import Annotated, Any

from pydantic import AliasChoices, AliasGenerator, BaseModel, ConfigDict
from pydantic import AfterValidator, AliasChoices, AliasGenerator, BaseModel, ConfigDict
from pydantic.alias_generators import to_camel


Expand All @@ -28,7 +30,58 @@ def log_record_to_dict(record: logging.LogRecord) -> dict[str, Any]:
}


AutoAbsPath = Annotated[Path, AfterValidator(lambda x: Path(os.path.abspath(x)))]
"""
`AutoAbsPath` is a type that can be used with Pydantic models to automatically
convert an inputted relative path to an absolute path. This is different than
`.resolve()` because it will resolve relative paths, but not symlinks. For
example:

```python
class TheModel(BaseModel):
the_path: AutoAbsPath

the_model = TheModel(the_path="build/build.spec")
print(the_model.the_path) #/path/to/build/build.spec
```
"""


def auto_abs_path_exists_validator(x: Path) -> Path:
x = Path(os.path.abspath(x))
if not x.exists():
raise Exception(f"Path `{x}` does not exist!")
return x


AutoAbsPathExists = Annotated[
Path,
AfterValidator(auto_abs_path_exists_validator),
]

AutoUpperStr = Annotated[str, AfterValidator(lambda x: x.upper())]
"""
`AutoUpperStr` is a type that can be used with Pydantic models to automatically
convert an inputted string to uppercase.
"""


class CamelCaseBaseModel(BaseModel):
"""
This class will accept both camelCase and snake_case keys when creating an
instance of the model. When serializing, it will produce camelCase keys.
For example:

```python
class TheModel(CamelCaseBaseModel):
the_thing: str

a = TheModel.model_validate({"theThing": "hello"}) # Works!
b = TheModel.model_validate({"the_thing": "hello"}) # Works!
c = TheModel(the_thing="hello").model_dump() # {"theThing": "hello"}
```
"""

model_config = ConfigDict(
alias_generator=AliasGenerator(
validation_alias=lambda field_name: AliasChoices(
Expand Down
Loading
Loading