Skip to content

Commit

Permalink
api -> endpoints, control -> api (#593)
Browse files Browse the repository at this point in the history
  • Loading branch information
steersbob authored Jan 8, 2024
1 parent 3ba2a55 commit 440c3ef
Show file tree
Hide file tree
Showing 40 changed files with 168 additions and 267 deletions.
3 changes: 0 additions & 3 deletions brewblox_devcon_spark/api/__init__.py

This file was deleted.

20 changes: 7 additions & 13 deletions brewblox_devcon_spark/app_factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,11 +7,9 @@
from fastapi.exceptions import RequestValidationError, ResponseValidationError
from fastapi.responses import JSONResponse

from . import (block_backup, broadcast, codec, command, connection, control,
datastore_blocks, datastore_settings, mqtt, state_machine,
synchronization, time_sync, utils)
from .api import (backup_api, blocks_api, blocks_mqtt_api, debug_api,
settings_api, sim_api, system_api)
from . import (block_backup, broadcast, codec, command, connection,
datastore_blocks, datastore_settings, endpoints, mqtt,
spark_api, state_machine, synchronization, time_sync, utils)
from .models import ErrorResponse

LOGGER = logging.getLogger(__name__)
Expand Down Expand Up @@ -119,9 +117,9 @@ def create_app() -> FastAPI:
codec.setup()
connection.setup()
command.setup()
control.setup()
spark_api.setup()
block_backup.setup()
blocks_mqtt_api.setup()
endpoints.setup()

# Create app
# OpenApi endpoints are set to /api/doc for backwards compatibility
Expand All @@ -135,11 +133,7 @@ def create_app() -> FastAPI:
add_exception_handlers(app)

# Include all endpoints declared by modules
app.include_router(blocks_api.router, prefix=prefix)
app.include_router(system_api.router, prefix=prefix)
app.include_router(settings_api.router, prefix=prefix)
app.include_router(sim_api.router, prefix=prefix)
app.include_router(backup_api.router, prefix=prefix)
app.include_router(debug_api.router, prefix=prefix)
for router in endpoints.routers:
app.include_router(router, prefix=prefix)

return app
12 changes: 6 additions & 6 deletions brewblox_devcon_spark/block_backup.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
from contextvars import ContextVar
from datetime import datetime, timedelta

from . import control, exceptions, state_machine, utils
from . import exceptions, spark_api, state_machine, utils
from .models import Backup, BackupApplyResult, BackupIdentity

LOGGER = logging.getLogger(__name__)
Expand All @@ -20,16 +20,16 @@ class BackupStorage:
def __init__(self):
self.config = utils.get_config()
self.state = state_machine.CV.get()
self.ctrl = control.CV.get()
self.api = spark_api.CV.get()

self.dir = self.config.backup_root_dir / self.config.name
self.dir.mkdir(mode=0o777, parents=True, exist_ok=True)

async def save_portable(self) -> Backup:
return await self.ctrl.make_backup()
return await self.api.make_backup()

async def load_portable(self, data: Backup) -> BackupApplyResult:
return await self.ctrl.apply_backup(data)
return await self.api.apply_backup(data)

async def all(self) -> list[BackupIdentity]:
return [BackupIdentity(name=f.stem)
Expand All @@ -55,14 +55,14 @@ async def write(self, data: Backup) -> Backup:
return data

async def save(self, ident: BackupIdentity):
data = await self.ctrl.make_backup()
data = await self.api.make_backup()
data.name = ident.name
await self.write(data)
return data

async def load(self, ident: BackupIdentity) -> BackupApplyResult:
data = await self.read(ident)
return await self.ctrl.apply_backup(data)
return await self.api.apply_backup(data)

async def run(self):
if self.state.is_synchronized():
Expand Down
6 changes: 3 additions & 3 deletions brewblox_devcon_spark/broadcast.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
from contextlib import asynccontextmanager
from datetime import timedelta

from . import const, control, mqtt, state_machine, utils
from . import const, mqtt, spark_api, state_machine, utils
from .block_analysis import calculate_claims, calculate_relations
from .models import HistoryEvent, ServiceStateEvent, ServiceStateEventData

Expand All @@ -19,7 +19,7 @@ class Broadcaster:

def __init__(self):
self.config = utils.get_config()
self.controller = control.CV.get()
self.api = spark_api.CV.get()

self.state_topic = f'{self.config.state_topic}/{self.config.name}'
self.history_topic = f'{self.config.history_topic}/{self.config.name}'
Expand All @@ -31,7 +31,7 @@ async def run(self):

try:
if state.is_synchronized():
blocks, logged_blocks = await self.controller.read_all_broadcast_blocks()
blocks, logged_blocks = await self.api.read_all_broadcast_blocks()

# Convert list to key/value format suitable for history
history_data = {
Expand Down
21 changes: 21 additions & 0 deletions brewblox_devcon_spark/endpoints/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
"""
Namespace for all REST API modules.
"""

from fastapi import APIRouter

from . import (http_backup, http_blocks, http_debug, http_settings, http_sim,
http_system, mqtt_blocks)

routers: list[APIRouter] = [
http_backup.router,
http_blocks.router,
http_debug.router,
http_settings.router,
http_sim.router,
http_system.router,
]


def setup():
mqtt_blocks.setup()
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
"""
REST API for handling backups of controller data
REST endpoints for handling backups of controller data
"""


import logging

from fastapi import APIRouter
Expand Down
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
"""
REST API for Spark blocks
REST endpoints for Spark blocks
"""

import logging

from fastapi import APIRouter

from .. import control, mqtt, utils
from .. import mqtt, spark_api, utils
from ..models import Block, BlockIdentity, BlockNameChange

LOGGER = logging.getLogger(__name__)
Expand Down Expand Up @@ -37,7 +37,7 @@ async def blocks_create(args: Block) -> Block:
"""
Create new block.
"""
block = await control.CV.get().create_block(args)
block = await spark_api.CV.get().create_block(args)
publish(changed=[block])
return block

Expand All @@ -47,7 +47,7 @@ async def blocks_read(args: BlockIdentity) -> Block:
"""
Read existing block.
"""
block = await control.CV.get().read_block(args)
block = await spark_api.CV.get().read_block(args)
return block


Expand All @@ -56,7 +56,7 @@ async def blocks_read_logged(args: BlockIdentity) -> Block:
"""
Read existing block. Data only includes logged fields.
"""
block = await control.CV.get().read_logged_block(args)
block = await spark_api.CV.get().read_logged_block(args)
return block


Expand All @@ -65,7 +65,7 @@ async def blocks_read_stored(args: BlockIdentity) -> Block:
"""
Read existing block. Data only includes stored fields.
"""
block = await control.CV.get().read_stored_block(args)
block = await spark_api.CV.get().read_stored_block(args)
return block


Expand All @@ -74,7 +74,7 @@ async def blocks_write(args: Block) -> Block:
"""
Write existing block. This will replace all fields.
"""
block = await control.CV.get().write_block(args)
block = await spark_api.CV.get().write_block(args)
publish(changed=[block])
return block

Expand All @@ -84,7 +84,7 @@ async def blocks_patch(args: Block) -> Block:
"""
Patch existing block. This will only replace provided fields.
"""
block = await control.CV.get().patch_block(args)
block = await spark_api.CV.get().patch_block(args)
publish(changed=[block])
return block

Expand All @@ -94,7 +94,7 @@ async def blocks_delete(args: BlockIdentity) -> BlockIdentity:
"""
Delete existing user block.
"""
ident = await control.CV.get().delete_block(args)
ident = await spark_api.CV.get().delete_block(args)
publish(deleted=[ident])
return ident

Expand All @@ -104,8 +104,8 @@ async def blocks_batch_create(args: list[Block]) -> list[Block]:
"""
Create multiple new blocks.
"""
ctrl = control.CV.get()
blocks = [await ctrl.create_block(block)
api = spark_api.CV.get()
blocks = [await api.create_block(block)
for block in args]
publish(changed=blocks)
return blocks
Expand All @@ -116,8 +116,8 @@ async def blocks_batch_read(args: list[BlockIdentity]) -> list[Block]:
"""
Read multiple existing blocks.
"""
ctrl = control.CV.get()
blocks = [await ctrl.read_block(ident)
api = spark_api.CV.get()
blocks = [await api.read_block(ident)
for ident in args]
return blocks

Expand All @@ -127,8 +127,8 @@ async def blocks_batch_write(args: list[Block]) -> list[Block]:
"""
Write multiple existing blocks. This will replace all fields.
"""
ctrl = control.CV.get()
blocks = [await ctrl.write_block(block)
api = spark_api.CV.get()
blocks = [await api.write_block(block)
for block in args]
publish(changed=blocks)
return blocks
Expand All @@ -139,8 +139,8 @@ async def blocks_batch_patch(args: list[Block]) -> list[Block]:
"""
Write multiple existing blocks. This will only replace provided fields.
"""
ctrl = control.CV.get()
blocks = [await ctrl.patch_block(block)
api = spark_api.CV.get()
blocks = [await api.patch_block(block)
for block in args]
publish(changed=blocks)
return blocks
Expand All @@ -151,8 +151,8 @@ async def blocks_batch_delete(args: list[BlockIdentity]) -> list[BlockIdentity]:
"""
Delete multiple existing user blocks.
"""
ctrl = control.CV.get()
idents = [await ctrl.delete_block(ident)
api = spark_api.CV.get()
idents = [await api.delete_block(ident)
for ident in args]
publish(deleted=idents)
return idents
Expand All @@ -163,7 +163,7 @@ async def blocks_all_read() -> list[Block]:
"""
Read all existing blocks.
"""
blocks = await control.CV.get().read_all_blocks()
blocks = await spark_api.CV.get().read_all_blocks()
return blocks


Expand All @@ -172,7 +172,7 @@ async def blocks_all_read_logged() -> list[Block]:
"""
Read all existing blocks. Only includes logged fields.
"""
blocks = await control.CV.get().read_all_logged_blocks()
blocks = await spark_api.CV.get().read_all_logged_blocks()
return blocks


Expand All @@ -181,7 +181,7 @@ async def blocks_all_read_stored() -> list[Block]:
"""
Read all existing blocks. Only includes stored fields.
"""
blocks = await control.CV.get().read_all_stored_blocks()
blocks = await spark_api.CV.get().read_all_stored_blocks()
return blocks


Expand All @@ -190,7 +190,7 @@ async def blocks_all_delete() -> list[BlockIdentity]:
"""
Delete all user blocks.
"""
idents = await control.CV.get().clear_blocks()
idents = await spark_api.CV.get().clear_blocks()
publish(deleted=idents)
return idents

Expand All @@ -200,7 +200,7 @@ async def blocks_cleanup() -> list[BlockIdentity]:
"""
Clean unused block IDs.
"""
idents = await control.CV.get().remove_unused_ids()
idents = await spark_api.CV.get().remove_unused_ids()
return idents


Expand All @@ -210,9 +210,9 @@ async def blocks_rename(args: BlockNameChange) -> BlockIdentity:
Rename existing block.
"""
config = utils.get_config()
ctrl = control.CV.get()
ident = await ctrl.rename_block(args)
block = await ctrl.read_block(ident)
api = spark_api.CV.get()
ident = await api.rename_block(args)
block = await api.read_block(ident)
old_ident = BlockIdentity(id=args.existing,
serviceId=config.name)
publish(changed=[block], deleted=[old_ident])
Expand All @@ -224,7 +224,7 @@ async def blocks_discover() -> list[Block]:
"""
Discover new automatically created blocks.
"""
blocks = await control.CV.get().discover_blocks()
blocks = await spark_api.CV.get().discover_blocks()
publish(changed=blocks)
return blocks

Expand All @@ -236,5 +236,5 @@ async def blocks_validate(args: Block) -> Block:
This checks whether the block can be serialized.
It will not be sent to the controller.
"""
block = await control.CV.get().validate(args)
block = await spark_api.CV.get().validate(args)
return block
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
"""
REST API for system debugging
REST endpoints for system debugging
"""

import logging
Expand Down
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
"""
REST API for persistent settings
REST endpoints for persistent settings
"""


import logging

from fastapi import APIRouter
Expand Down
File renamed without changes.
Loading

0 comments on commit 440c3ef

Please sign in to comment.