Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Feat: Add logging system #17

Merged
merged 24 commits into from
Jun 4, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
24 commits
Select commit Hold shift + click to select a range
c0aa053
feat: Add CLI `init` command
Ramimashkouk Mar 18, 2024
fb634dc
fix: Add deps to .toml file
Ramimashkouk Mar 18, 2024
2ce3497
chore: Divide processes into build and run ones
Ramimashkouk Mar 19, 2024
b05678d
feat: Add `/bot/builds` endpoint
Ramimashkouk Mar 19, 2024
f4283e2
chore: Add CORS mechanism
Ramimashkouk Mar 20, 2024
ade8497
refactor: Modify Preset body
Ramimashkouk Mar 20, 2024
2ad3d8e
feat: Return info with `/bot/builds` & `/bot/runs`
Ramimashkouk Mar 21, 2024
cb0f50a
chore: Update possible statuses of a process
Ramimashkouk Mar 21, 2024
96f0132
fix: Handle error to return `Failed` properly
Ramimashkouk Mar 21, 2024
867e574
chore: Fix "recording each log twice"
Ramimashkouk Mar 21, 2024
1127224
refactor: Unify executing subprocess.run
Ramimashkouk Mar 21, 2024
b9cc3d2
fix: Give up on bot logging to let the IO flow
Ramimashkouk Mar 22, 2024
02e510e
feat: Add DB
Ramimashkouk Mar 22, 2024
ec7b07f
chore: Define literal types to preset end_status
Ramimashkouk Mar 22, 2024
5c776b7
refactor: refactor ProcessManager
Ramimashkouk Mar 23, 2024
26bef73
feat: Update DB after each process start and stop
Ramimashkouk Mar 25, 2024
9734e36
fix: Check process statuses periodically
Ramimashkouk Mar 26, 2024
24451ed
feat: Add `/flows` endpoint
Ramimashkouk Mar 27, 2024
12bed2c
fix: Check run_id in build's runs before saving
Ramimashkouk Mar 27, 2024
5ad4854
feat: Introduce Docker implementation for backend
Ramimashkouk Mar 27, 2024
c65bc84
chore: Fix the used dff version
Ramimashkouk Mar 27, 2024
ec5f779
feat: Provide logging system
Ramimashkouk Mar 28, 2024
aeb67e9
fix: Avoid logging out of proj dir
Ramimashkouk Mar 29, 2024
9e3e04e
Merge branch 'dev' into feat/add_logging_system
Ramimashkouk Jun 4, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
42 changes: 31 additions & 11 deletions backend/df_designer/app/__init__.py
Original file line number Diff line number Diff line change
@@ -1,26 +1,36 @@
import aiofiles
import asyncio
from datetime import datetime
import logging
from pathlib import Path
import time
from typing import List
from omegaconf import OmegaConf

from app.core.logger_config import get_logger
from app.core.logger_config import get_logger, setup_logging
from app.core.config import settings

logger = get_logger(__name__)

def _map_to_str(params: dict):
for k, v in params.items():
if isinstance(v, datetime):
params[k] = v.strftime("%Y-%m-%dT%H:%M:%S")
elif isinstance(v, Path):
params[k] = str(v)


class Process:
def __init__(self, id_: int, preset_end_status = ""):
self.id: int = id_
self.preset_end_status: str = preset_end_status
self.status: str = "null"
self.timestamp: datetime = datetime.now()
self.log_path: str = "./logs/"
self.process: asyncio.subprocess.Process = None # pylint: disable=no-member #TODO: is naming ok?
self.log_path: Path
self.process: asyncio.subprocess.Process # pylint: disable=no-member #TODO: is naming ok?
self.logger: logging.Logger

async def start(self, cmd_to_run):
async with aiofiles.open("process_io.log", "a", encoding="UTF-8") as file: #TODO: log to files
async with aiofiles.open(self.log_path, "a", encoding="UTF-8") as file: #TODO: log to files
self.process = await asyncio.create_subprocess_exec(
*cmd_to_run.split(),
stdout=asyncio.subprocess.PIPE,
Expand All @@ -31,7 +41,7 @@ async def start(self, cmd_to_run):
def get_full_info(self) -> dict:
self.check_status()
return {
key: getattr(self, key) for key in self.__dict__ if key != "process"
key: getattr(self, key) for key in self.__dict__ if key not in ["process", "logger"]
}

def set_full_info(self, params_dict):
Expand All @@ -44,7 +54,7 @@ def update_db_info(self):
def periodically_check_status(self):
while True:
self.update_db_info() # check status and update db
logger.info("Status of process '%s': %s",self.id, self.status)
self.logger.info("Status of process '%s': %s",self.id, self.status)
if self.status in ["stopped", "completed", "failed"]:
break
time.sleep(2) #TODO: ?sleep time shouldn't be constant
Expand All @@ -70,7 +80,7 @@ def check_status(self) -> str:
elif self.process.returncode == -15:
self.status = "stopped"
else:
logger.warning(
self.logger.warning(
"Unexpected code was returned: '%s'. A non-zero return code indicates an error.",
self.process.returncode
)
Expand All @@ -81,7 +91,7 @@ async def stop(self):
if self.process is None: # Check if a process has been started
raise RuntimeError(f"Cannot stop a process '{self.id}' that has not started yet.")
try:
logger.debug("Terminating process '%s'", self.id)
self.logger.debug("Terminating process '%s'", self.id)
self.process.terminate()
await self.process.wait()
except ProcessLookupError as exception:
Expand All @@ -104,11 +114,17 @@ def __init__(self, id_: int, build_id: int = None, preset_end_status: str = ""):
super().__init__(id_, preset_end_status)
self.build_id: int = build_id

log_name: str = "_".join([str(id_), datetime.now().time().strftime("%H%M%S")])
self.log_path: Path = setup_logging("runs", log_name)
Ramimashkouk marked this conversation as resolved.
Show resolved Hide resolved
self.logger = get_logger(str(id_), self.log_path)

def update_db_info(self):
# save current run info into RUNS_PATH
runs_conf = settings.read_conf(settings.RUNS_PATH)

run_params = self.get_full_info()
run_params["timestamp"] = run_params["timestamp"].strftime("%Y-%m-%dT%H:%M:%S")
_map_to_str(run_params)

for run in runs_conf:
if run.id == run_params["id"]:
for key, value in run_params.items():
Expand Down Expand Up @@ -139,12 +155,16 @@ def __init__(self, id_: int, preset_end_status: str = ""):
super().__init__(id_, preset_end_status)
self.runs: List[int] = []

log_name: str = "_".join([str(id_), datetime.now().time().strftime("%H%M%S")])
self.log_path: Path = setup_logging("builds", log_name)
self.logger = get_logger(str(id_), self.log_path)

def update_db_info(self):
# save current build info into BUILDS_PATH
builds_conf = settings.read_conf(settings.BUILDS_PATH)

build_params = self.get_full_info()
build_params["timestamp"] = build_params["timestamp"].strftime("%Y-%m-%dT%H:%M:%S")
_map_to_str(build_params)

for build in builds_conf:
if build.id == build_params["id"]:
Expand Down
2 changes: 1 addition & 1 deletion backend/df_designer/app/api/api_v1/endpoints/bot.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def _check_process_status(pid: int, process_manager: ProcessManager):
@router.post("/build/start", status_code=201)
async def start_build(preset: Preset, background_tasks: BackgroundTasks, build_manager: BuildManager = Depends(deps.get_build_manager)):
await asyncio.sleep(preset.wait_time)
await build_manager.start(preset) #TODO: Think about making BuildManager and RunManager
await build_manager.start(preset)
build_id = build_manager.get_last_id()
background_tasks.add_task(build_manager.check_status, build_id)
logger.info("Build process '%s' has started", build_id)
Expand Down
4 changes: 3 additions & 1 deletion backend/df_designer/app/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,12 +9,12 @@
from app.core.config import settings
from app.core.logger_config import get_logger

logger = get_logger(__name__)

cli = typer.Typer()


def _execute_command(command_to_run):
logger = get_logger(__name__)
Ramimashkouk marked this conversation as resolved.
Show resolved Hide resolved
try:
process = subprocess.run(command_to_run.split(),check=False)

Expand All @@ -35,6 +35,7 @@ def build_bot(
project_dir: str = settings.WORK_DIRECTORY,
preset: str = "success"
):
logger = get_logger(__name__)
presets_build_path = os.path.join(project_dir, "df_designer", "presets", "build.json")
with open(presets_build_path) as file:
presets_build_file = json.load(file)
Expand All @@ -54,6 +55,7 @@ def run_bot(
project_dir: str = settings.WORK_DIRECTORY,
preset: str = "success"
):
logger = get_logger(__name__)
presets_run_path = os.path.join(project_dir, "df_designer", "presets", "run.json")
with open(presets_run_path) as file:
presets_run_file = json.load(file)
Expand Down
15 changes: 7 additions & 8 deletions backend/df_designer/app/clients/process_manager.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
from pathlib import Path
from typing import List, Type

from app.core.logger_config import get_logger
Expand Down Expand Up @@ -25,7 +26,7 @@ def check_status(self, pid):
def get_status(self, pid):
return self.processes[pid].check_status()

def get_full_info(self, id_, path: str, processclass: Type[Process]):
def get_full_info(self, id_, path: Path, processclass: Type[Process]):
if id_ in self.processes:
process = self.processes[id_]
return process.get_full_info()
Expand Down Expand Up @@ -55,17 +56,16 @@ async def start(self, build_id: int, preset: Preset):
self.processes[id_] = process

def get_min_info(self) -> List[dict]:
conf_path=settings.RUNS_PATH
builds_conf = settings.read_conf(conf_path)
runs_conf = settings.read_conf(settings.RUNS_PATH)
minimum_params = ["id", "build_id", "preset_end_status", "status", "timestamp"]

minimum_info = []
for build in builds_conf:
minimum_info.append({param: getattr(build, param) for param in minimum_params})
for run in runs_conf:
minimum_info.append({param: getattr(run, param) for param in minimum_params})

return minimum_info

def get_full_info(self, id_, path: str = settings.RUNS_PATH, processclass: Type[Process] = RunProcess):
def get_full_info(self, id_, path: Path = settings.RUNS_PATH, processclass: Type[Process] = RunProcess):
return super().get_full_info(id_, path, processclass)


Expand All @@ -84,7 +84,6 @@ async def start(self, preset: Preset):
process = BuildProcess(id_, preset.end_status)
await process.start(cmd_to_run)
self.processes[id_] = process
process.update_db_info()

def get_min_info(self) -> List[dict]:
conf_path=settings.BUILDS_PATH
Expand All @@ -102,5 +101,5 @@ def get_min_info(self) -> List[dict]:
minimum_info.append(info)
return minimum_info

def get_full_info(self, id_, path: str = settings.BUILDS_PATH, processclass: Type[Process] = BuildProcess):
def get_full_info(self, id_, path: Path = settings.BUILDS_PATH, processclass: Type[Process] = BuildProcess):
return super().get_full_info(id_, path, processclass)
13 changes: 6 additions & 7 deletions backend/df_designer/app/core/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,10 @@ class Settings(BaseSettings):
UI_PORT: int = 3000
LOG_LEVEL: str = "debug"
CONF_RELOAD: bool = True # Enable auto-reload for development mode
BUILDS_PATH: str = f"{WORK_DIRECTORY}/df_designer/builds.yaml"
RUNS_PATH: str = f"{WORK_DIRECTORY}/df_designer/runs.yaml"
DIR_LOGS: str = f"{WORK_DIRECTORY}/logs.log" #TODO: Ensure this's a good path
FRONTEND_FLOWS_PATH : str = f"{WORK_DIRECTORY}/df_designer/frontend_flows.yaml"
BUILDS_PATH: Path = Path(f"{WORK_DIRECTORY}/df_designer/builds.yaml")
RUNS_PATH: Path = Path(f"{WORK_DIRECTORY}/df_designer/runs.yaml")
DIR_LOGS: Path = Path(f"{WORK_DIRECTORY}/df_designer/logs")
FRONTEND_FLOWS_PATH : Path = Path(f"{WORK_DIRECTORY}/df_designer/frontend_flows.yaml")
# database_file = "database.sqlite"
server: uvicorn.Server = uvicorn.Server(
uvicorn.Config(APP, HOST, BACKEND_PORT, LOG_LEVEL, CONF_RELOAD, reload_dirs=WORK_DIRECTORY)
Expand All @@ -34,8 +34,7 @@ def setup_server(self, ip_address: str, port: int, conf_reload: str, project_dir
)
self.server = uvicorn.Server(config)

def read_conf(self, path: str):
path_conf = Path(path)
return OmegaConf.load(path_conf)
def read_conf(self, path: Path):
return OmegaConf.load(path)

settings = Settings()
28 changes: 26 additions & 2 deletions backend/df_designer/app/core/logger_config.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,8 @@
from datetime import datetime
import logging
from pathlib import Path
from typing import Optional
import os

from app.core.config import settings

Expand All @@ -10,14 +14,34 @@
"debug": logging.DEBUG,
}

def setup_logging(log_type: str, log_name: str) -> Path:
# Ensure log_type is either 'builds' or 'runs'
Ramimashkouk marked this conversation as resolved.
Show resolved Hide resolved
if log_type not in ['builds', 'runs']:
raise ValueError("log_type must be 'builds' or 'runs'")

today_date = datetime.now().strftime("%Y%m%d")
log_directory = settings.DIR_LOGS / log_type / today_date

os.makedirs(log_directory, exist_ok=True)
Ramimashkouk marked this conversation as resolved.
Show resolved Hide resolved

log_file = log_directory / f"{log_name}.log"
if not os.path.exists(log_file):
open(log_file, 'w', encoding="UTF-8").close()
Ramimashkouk marked this conversation as resolved.
Show resolved Hide resolved
return log_file

def get_logger(name, file_handler_path: Optional[Path] = None):
if file_handler_path is None:
os.makedirs(settings.DIR_LOGS, exist_ok=True)
file_handler_path = settings.DIR_LOGS/ "logs.log"
if not os.path.exists(file_handler_path):
open(file_handler_path, 'w', encoding="UTF-8").close()
Ramimashkouk marked this conversation as resolved.
Show resolved Hide resolved

def get_logger(name):
logger = logging.getLogger(name)
logger.propagate = False
logger.setLevel(LOG_LEVELS[settings.LOG_LEVEL])

c_handler = logging.StreamHandler()
f_handler = logging.FileHandler(settings.DIR_LOGS)
f_handler = logging.FileHandler(file_handler_path)
c_handler.setLevel(LOG_LEVELS[settings.LOG_LEVEL])
f_handler.setLevel(LOG_LEVELS[settings.LOG_LEVEL])

Expand Down
6 changes: 3 additions & 3 deletions backend/df_designer/poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Loading