File size: 5,923 Bytes
b98ffbb |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 |
from dora import DoraStatus
import pylcs
import textwrap
import pandas as pd
import os
import pyarrow as pa
import numpy as np
from ctransformers import AutoModelForCausalLM
import json
MIN_NUMBER_LINES = 4
MAX_NUMBER_LINES = 21
def search_most_simlar_line(text, searched_line):
lines = text.split("\n")
values = []
for line in lines[MIN_NUMBER_LINES:MAX_NUMBER_LINES]:
values.append(pylcs.edit_distance(line, searched_line))
output = lines[np.array(values).argmin() + MIN_NUMBER_LINES]
return output
def strip_indentation(code_block):
# Use textwrap.dedent to strip common leading whitespace
dedented_code = textwrap.dedent(code_block)
return dedented_code
def replace_code_with_indentation(original_code, replacement_code):
# Split the original code into lines
lines = original_code.splitlines()
if len(lines) != 0:
# Preserve the indentation of the first line
indentation = lines[0][: len(lines[0]) - len(lines[0].lstrip())]
# Create a new list of lines with the replacement code and preserved indentation
new_code_lines = indentation + replacement_code
else:
new_code_lines = replacement_code
return new_code_lines
def replace_source_code(source_code, gen_replacement):
initial = search_most_simlar_line(source_code, gen_replacement)
print("Initial source code: %s" % initial)
replacement = strip_indentation(
gen_replacement.replace("```python\n", "")
.replace("\n```", "")
.replace("\n", "")
)
intermediate_result = replace_code_with_indentation(initial, replacement)
print("Intermediate result: %s" % intermediate_result)
end_result = source_code.replace(initial, intermediate_result)
return end_result
def save_as(content, path):
# use at the end of replace_2 as save_as(end_result, "file_path")
with open(path, "w") as file:
file.write(content)
class Operator:
def __init__(self):
# Load tokenizer
self.llm = AutoModelForCausalLM.from_pretrained(
"TheBloke/OpenHermes-2.5-Mistral-7B-GGUF",
model_file="openhermes-2.5-mistral-7b.Q4_K_M.gguf",
model_type="mistral",
gpu_layers=50,
)
def on_event(
self,
dora_event,
send_output,
) -> DoraStatus:
if dora_event["type"] == "INPUT":
input = dora_event["value"][0].as_py()
if False:
with open(input["path"], "r", encoding="utf8") as f:
raw = f.read()
prompt = f"{raw[:400]} \n\n {input['query']}. "
output = self.ask_mistral(
"You're a python code expert. Respond with only one line of code that modify a constant variable. Keep the uppercase.",
prompt,
)
print("output: {}".format(output))
source_code = replace_source_code(raw, output)
send_output(
"output_file",
pa.array(
[
{
"raw": source_code,
"path": input["path"],
"response": output,
"prompt": prompt,
}
]
),
dora_event["metadata"],
)
else:
print("input: ", input, flush=True)
output = self.ask_mistral(
"""You're a json expert. Format your response as a json with a topic field and a data field.
The schema for those json are:
- led: Int[3] (min: 0, max: 255)
- blaster: Int (min: 0, max: 128)
- control: Int[3] (min: -1, max: 1)
- rotation: Int[2] (min: -55, max: 55)
""",
input["query"],
)
print("output: {}".format(output), flush=True)
try:
output = json.loads(output)
if not isinstance(output["data"], list):
output["data"] = [output["data"]]
if output["topic"] in ["led", "blaster", "control", "rotation"]:
print("output", output)
send_output(
output["topic"],
pa.array(output["data"]),
dora_event["metadata"],
)
except:
print("Could not parse json")
# if data is not iterable, put data in a list
return DoraStatus.CONTINUE
def ask_mistral(self, system_message, prompt):
prompt_template = f"""<|im_start|>system
{system_message}<|im_end|>
<|im_start|>user
{prompt}<|im_end|>
<|im_start|>assistant
"""
# Generate output
outputs = self.llm(
prompt_template,
)
# Get the tokens from the output, decode them, print them
# Get text between im_start and im_end
return outputs.split("<|im_end|>")[0]
if __name__ == "__main__":
op = Operator()
# Path to the current file
current_file_path = __file__
# Directory of the current file
current_directory = os.path.dirname(current_file_path)
path = current_directory + "/planning_op.py"
with open(path, "r", encoding="utf8") as f:
raw = f.read()
op.on_event(
{
"type": "INPUT",
"id": "tick",
"value": pa.array(
[
{
"raw": raw,
"path": path,
"query": "le control a 1 0 0",
}
]
),
"metadata": [],
},
print,
)
|