File size: 2,285 Bytes
74ebe5c
 
 
bb2bd12
74ebe5c
 
 
 
 
bb2bd12
74ebe5c
 
 
 
bb2bd12
74ebe5c
bb2bd12
 
 
 
 
 
 
74ebe5c
 
bb2bd12
 
 
 
 
 
 
74ebe5c
 
 
 
 
 
bb2bd12
74ebe5c
bb2bd12
74ebe5c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
from model_handler import ModelHandler
from config import LING_FLASH_2_0

def fetch_flow_suggestion_agent(editor_content: str, style: str = "", kb=None, short_outline=None, long_outline=None):
    """
    Agent for fetching a short, real-time continuation.
    This agent calls a real LLM.
    """

    if not editor_content or len(editor_content.strip()) < 4:
        return "(请输入更多内容以获取建议...)"

    try:
        model_handler = ModelHandler()

        # For a simple continuation, we can use a concise system prompt.
        system_prompt = f"""你是一个写作助手,根据用户输入的内容,紧接着写一句 **简短、流畅** 的续写。
- 不要重复用户已输入的内容,直接开始写你续写的部分。
- 遵循 **整体章程** 中的风格和指导原则。

整体章程:
{style}"""


        # We use editor_content as the user prompt.
        # 我们的上下文编排方式,根据当前光标所在位置,来决定续写的位置。
        # ---
        # <之前的内容>...</之前的内容>{续写这里}
        # <之后的内容>...</之后的内容>
        # 写出 {续写这里} 部分的内容。
        # ---
        user_prompt = f"""===之前的内容===\n{editor_content[-80:]}"""
        # Use generate_code as it's a simple generator for direct content.
        # We need to provide a dummy code_type and a model_choice.
        # The model_choice here is the display name, but we can pass the constant.
        response_generator = model_handler.generate_code(
            system_prompt=system_prompt,
            user_prompt=user_prompt,
            model_choice=LING_FLASH_2_0
        )

        # Assemble the streamed response
        full_response = "".join(chunk for chunk in response_generator)
        return full_response.strip()

    except Exception as e:
        print(f"[Agent] Error fetching flow suggestion: {e}")
        return f"(获取建议时出错: {e})"


def accept_flow_suggestion_agent(current_text: str, suggestion: str):
    """
    Agent for accepting a flow suggestion.
    """

    if not suggestion or "等待输入" in suggestion or "出错" in suggestion:
        result = current_text
    else:
        result = current_text + suggestion
    return result