Skip to content

Commit

Permalink
Observability
Browse files Browse the repository at this point in the history
  • Loading branch information
Funkmyster committed Aug 29, 2023
1 parent 8b3708d commit 6a971c1
Show file tree
Hide file tree
Showing 12 changed files with 610 additions and 84 deletions.
6 changes: 3 additions & 3 deletions langcontroller/processors/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
"""LangController Controllers."""
from langcontroller.processors.base import *
from langcontroller.processors.llama import *
from langcontroller.processors.marvin import *
from langcontroller.processors.base import StructuredLLMOutputBase
from langcontroller.processors.llama import LlamaStructuredLLMOutput
from langcontroller.processors.marvin import MarvinStructuredLLMOutput
92 changes: 89 additions & 3 deletions langcontroller/processors/marvin.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,46 @@
# -*- coding: utf-8 -*-
"""Marvin Controller Module.
This module contains the MarvinController class, which is a subclass of
the LangController class.
This module contains the MarvinStructuredLLMOutput class, which is a
subclass of the StructuredLLMOutputBase class.
"""
from dataclasses import dataclass
from typing import Callable

import pydantic
from opentelemetry import metrics
from opentelemetry.sdk.metrics import MeterProvider
from opentelemetry.sdk.metrics.export import InMemoryMetricReader
from loguru import logger

from langcontroller.processors.base import (
StructuredLLMOutputBase,
OutputModel,
)

metric_reader = InMemoryMetricReader()
provider = MeterProvider(metric_readers=[metric_reader])
metrics.set_meter_provider(provider)
meter = metrics.get_meter(__name__)

prompt_render_counter = meter.create_counter(
"prompt.render.counter",
unit="1",
description="Counts the number of times a prompt is rendered",
)

structured_llm_output_counter = meter.create_counter(
"structured.llm.output.counter",
unit="1",
description="Counts the number of times a structured llm output completes successfully",
)

structured_llm_model_counter = meter.create_counter(
"structured.llm.model.counter",
unit="1",
description="Counts the number of times a structured llm model is executed",
)


@dataclass
class MarvinStructuredLLMOutput(StructuredLLMOutputBase):
Expand All @@ -26,8 +55,65 @@ def apply(self, **kwargs) -> OutputModel:
Returns:
`pydantic.main.ModelMetaclass`: The output model
"""

def processor_filter() -> Callable[[], bool]:
"""Processor Filter.
Returns:
Callable[[], bool]: The filter function
"""

def is_processor(record) -> bool:
"""Is Processor.
Args:
record (dict): The loguru record
Returns:
bool: True if the record is a processor, False otherwise
"""
if all(
[
record.get("extra", False),
record["extra"].get("model", False),
record["extra"].get("prompt_template", False),
]
):
return True

return is_processor

logger.add(
f"logs/processors/{self.__class__.__name__}.log",
rotation="10 MB",
filter=processor_filter(),
format="{time} | {level} | app.models.{extra[model]} | {extra[prompt_template]}.j2 | {message}",
)
context_logger = logger.bind(
model=self.output_model.__name__, prompt_template=self.prompt_template
)
context_logger.debug("Applying MarvinStructuredLLMOutput...")

context_logger.debug("Redering prompt_template...")
prompt = self.get_rendered_prompt(
prompt_template=self.prompt_template, **kwargs
)
context_logger.debug("Prompt rendered")
prompt_render_counter.add(1, {"prompt.template": self.prompt_template})

context_logger.debug("Submitting prompt to model for LLM processing...")
output: OutputModel = self.output_model(prompt)
context_logger.debug("Processing complete")
for attribute in output.__fields__:
context_logger.debug(
f"{output.__class__.__name__}.{attribute}: {getattr(output, attribute)}"
)

return self.output_model(prompt)
structured_llm_model_counter.add(
1, {"structured.llm.model": self.output_model.__name__}
)

structured_llm_output_counter.add(
1, {"structured.llm.output": self.__class__.__name__}
)
return output
Original file line number Diff line number Diff line change
Expand Up @@ -14,14 +14,14 @@ def create_{{ target_action_underscore_name }}() -> {{ target_action_python_name
Returns:
`app.models.{{ target_action_python_name }}`: The {{ target_action_human_name }}
"""
print("Creating {{ target_action_human_name }}...")
logger.info("Creating {{ target_action_human_name }}...")

model: {{ target_action_python_name }} = {{ controller_type }}StructuredLLMOutput(
prompt_template="{{ prompt_name }}",
output_model={{ target_action_python_name }},
).apply()

print(model.json(indent=2))
logger.info("Created {{ target_action_human_name }}.")

return model

Original file line number Diff line number Diff line change
Expand Up @@ -20,14 +20,14 @@ def create_{{ target_action_underscore_name }}(
Returns:
`app.models.{{ target_action_python_name }}`: The {{ target_action_human_name }}
"""
print("Creating {{ target_action_human_name }}...")
logger.info("Creating {{ target_action_human_name }}...")

model: {{ target_action_python_name }} = {{ controller_type }}StructuredLLMOutput(
prompt_template="{{ prompt_name }}",
output_model={{ target_action_python_name }},
).apply({{ source_action_underscore_name }}={{ source_action_underscore_name }})

print(model.json(indent=2))
logger.info("Created {{ target_action_human_name }}.")

return model

4 changes: 3 additions & 1 deletion langcontroller/templates/features/create_features_file.py.j2
Original file line number Diff line number Diff line change
Expand Up @@ -27,10 +27,12 @@ Patterns:
"""
import typer
from fastapi import FastAPI
from rich import print
from loguru import logger
from langcontroller.processors import *
from app.models import *

logger.add("logs/app/features.log", rotation="10 MB")

api_app = FastAPI()

cli_app = typer.Typer(rich_markup_mode="rich")
Expand Down
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
@asset
def {{ target_action_underscore_name }}(context: AssetExecutionContext) -> str:
"""Creates a {{ target_action_human_name }}."""
logger = get_dagster_logger()
logger.info(f"Materializing {{ target_action_human_name }}...")
context.log.info("Materializing {{ target_action_human_name }}...")

my_{{ target_action_underscore_name }} = create_{{ target_action_underscore_name }}()

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,8 @@ def {{ target_action_underscore_name }}(
context: AssetExecutionContext, {{ source_action_underscore_name }}: str
) -> str:
"""Creates a {{ target_action_human_name }}."""
logger = get_dagster_logger()
logger.info(f"Materializing {{ target_action_human_name }}...")
logger.info(f"Sourcing {{ source_action_human_name }}: {% raw %}{{% endraw %}{{ source_action_underscore_name }}{% raw %}}{% endraw %}")
context.log.info("Materializing {{ target_action_human_name }}...")
context.log.info(f"Sourcing {{ source_action_human_name }}: {% raw %}{{% endraw %}{{ source_action_underscore_name }}{% raw %}}{% endraw %}")

my_{{ target_action_underscore_name }} = create_{{ target_action_underscore_name }}(
{{ source_action_underscore_name }}={{ source_action_underscore_name }}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@ This module defines the following Dagster items:
"""
from dagster import (
asset,
get_dagster_logger,
AssetExecutionContext,
)
from app.features import *
Expand Down
13 changes: 12 additions & 1 deletion langcontroller/templates/top_level/create_pyproject_file.toml.j2
Original file line number Diff line number Diff line change
Expand Up @@ -8,19 +8,30 @@ readme = "README.md"
[tool.poetry.dependencies]
python = "^3.10"
jinja2 = "^3.1.2"
typer = { extras = ["all"], version = "^0.9.0" }
loguru = "^0.7.0"
opentelemetry-api = "^1.19.0"
opentelemetry-distro = "^0.40b0"
opentelemetry-exporter-otlp = "^1.19.0"
opentelemetry-instrumentation-fastapi = "^0.40b0"
opentelemetry-sdk = "^1.19.0"
python-slugify = "^8.0.1"
typer = { extras = ["all"], version = "^0.9.0" }

[tool.poetry.group.marvin.dependencies]
marvin = "^1.3.0"

[tool.poetry.group.llama.dependencies]
llama-index = "^0.8.12"

[tool.poetry.group.dev.dependencies]
pytest = "^7.4.0"
bandit = { extras = ["toml"], version = "^1.7.5" }
black = "^23.7.0"
docformatter = "^1.7.5"
hypothesis = "^6.82.6"
loguru = "^0.7.0"
opentelemetry-distro = "^0.40b0"
opentelemetry-exporter-otlp = "^1.19.0"
pdoc = "^14.0.0"
pydocstyle = { extras = ["toml"], version = "^6.3.0" }

Expand Down
Loading

0 comments on commit 6a971c1

Please sign in to comment.