Explorar o código

add templates to prompt command

Bruce MacDonald hai 1 ano
pai
achega
d34985b9df
Modificáronse 3 ficheiros con 60 adicións e 0 borrados
  1. 1 0
      .gitignore
  2. 2 0
      proto.py
  3. 57 0
      template.py

+ 1 - 0
.gitignore

@@ -5,3 +5,4 @@
 *.spec
 build
 dist
+__pycache__

+ 2 - 0
proto.py

@@ -5,6 +5,7 @@ import click
 from llama_cpp import Llama
 from flask import Flask, Response, stream_with_context, request
 from flask_cors import CORS
+from template import template
 
 app = Flask(__name__)
 CORS(app)  # enable CORS for all routes
@@ -124,6 +125,7 @@ def generate(model, prompt):
     if prompt == "":
         prompt = input("Prompt: ")
     output = ""
+    prompt = template(model, prompt)
     for generated in query(model, prompt):
         generated_json = json.loads(generated)
         text = generated_json["choices"][0]["text"]

+ 57 - 0
template.py

@@ -0,0 +1,57 @@
+from difflib import SequenceMatcher
+
+model_prompts = {
+    "alpaca": """Below is an instruction that describes a task. Write a response that appropriately completes the request.
+
+### Instruction:
+{prompt}
+
+### Response:
+
+""",
+    "oasst": "<|prompter|>{prompt}<|endoftext|><|assistant|>",
+    "vicuna": """A chat between a curious user and an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the user's questions.
+
+USER: {prompt}
+ASSISTANT:""",
+    "hermes": """### Instruction:
+{prompt}
+
+### Response:
+""",
+    "gpt4": """### Instruction:
+{prompt}
+
+### Response:
+""",
+    "qlora": """### Human: {prompt}
+### Assistant:""",
+    "tulu": """<|user|>
+{prompt}
+<|assistant|>
+(include newline)""",
+    "wizardlm-7b": """{prompt}
+
+### Response:""",
+    "wizardlm-13b": """{prompt}
+
+### Response:""",
+    "wizardlm-30b": """{prompt}
+
+### Response:""",
+}
+
+
+def template(model, prompt):
+    max_ratio = 0
+    closest_key = ""
+    model_name = model.lower()
+    # Find the specialized prompt with the closest name match
+    for key in model_prompts.keys():
+        ratio = SequenceMatcher(None, model_name, key).ratio()
+        if ratio > max_ratio:
+            max_ratio = ratio
+            closest_key = key
+    # Return the value of the closest match
+    p = model_prompts.get(closest_key)  # .format(placeholder=prompt)
+    return p.format(prompt=prompt)