|
| 1 | +import os |
| 2 | +import shutil |
| 3 | +import subprocess |
| 4 | +import sys |
| 5 | +import tempfile |
| 6 | +from pathlib import Path |
| 7 | + |
| 8 | +import click |
| 9 | +from pydantic import BaseModel, Field |
| 10 | +from rich.panel import Panel |
| 11 | + |
1 | 12 | from aicodebot.coder import Coder
|
2 | 13 | from aicodebot.helpers import exec_and_get_output, logger
|
3 | 14 | from aicodebot.lm import LanguageModelManager
|
4 | 15 | from aicodebot.output import OurMarkdown, get_console
|
5 | 16 | from aicodebot.prompts import get_prompt
|
6 |
| -from pathlib import Path |
7 |
| -from pydantic import BaseModel, Field |
8 |
| -from rich.panel import Panel |
9 |
| -import click, os, shutil, subprocess, sys, tempfile |
10 | 17 |
|
11 | 18 |
|
12 | 19 | class CommitMessage(BaseModel):
|
@@ -97,7 +104,9 @@ def commit(response_token_size, yes, skip_pre_commit, files): # noqa: PLR0915
|
97 | 104 | llm = lmm.model_factory(response_token_size=response_token_size)
|
98 | 105 | # Using Langchain Expression Language (LCEL) for structured output. So chic! 😉
|
99 | 106 | chain = prompt | llm.with_structured_output(CommitMessage)
|
| 107 | + logger.debug(f"Chain input: {{'diff_context': {diff_context}, 'languages': {languages}}}") |
100 | 108 | response = chain.invoke({"diff_context": diff_context, "languages": languages})
|
| 109 | + logger.debug(f"Chain response: {response}") |
101 | 110 |
|
102 | 111 | # Handle both object and dict responses,
|
103 | 112 | # The structured output sometimes returns a dict and sometimes returns an object?!
|
|
0 commit comments