File size: 1,818 Bytes
b7adb02 251dafb e104971 485a33e e104971 1f2ea00 89d0feb 251dafb af8d9d1 e104971 af8d9d1 e104971 e497580 1f2ea00 485a33e 1f2ea00 e104971 1f2ea00 bbe2c8f e104971 62f86f8 1f2ea00 805934c 1f2ea00 e497580 1f2ea00 e104971 37fe02c 251dafb e497580 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 |
import gradio as gr
from llama_cpp import Llama
from huggingface_hub import hf_hub_download
# Download your GGUF model from HF Hub
model_path = hf_hub_download(
repo_id="astegaras/lora_python_converter",
filename="llama-3.2-3b-instruct.Q2_K.gguf"
)
# Load GGUF with safe HF settings
llm = Llama(
model_path=model_path,
n_ctx=4096,
n_threads=4,
n_batch=64,
n_gpu_layers=0, # IMPORTANT
use_mmap=False, # IMPORTANT
use_mlock=False, # IMPORTANT
low_vram=True, # IMPORTANT
verbose=False
)
def generate_code(instruction):
messages = [
{"role": "system", "content": "You are a Python code generator. Return only code."},
{"role": "user", "content": instruction},
]
out = llm.create_chat_completion(
messages=messages,
max_tokens=512,
temperature=0.2,
top_p=0.5
)
return out["choices"][0]["message"]["content"]
# ---- GRADIO UI ----
with gr.Blocks(theme="gradio/soft") as demo:
gr.Markdown(
"""
# Python Code Generator
Enter a task in plain English and receive executable Python code.
Example:
*"Help me set up my to-do list"*
"""
)
with gr.Row():
with gr.Column(scale=1):
instruction = gr.Textbox(
label="Describe what you want to build",
placeholder="Example: Help me set up my to-do list",
lines=3,
)
submit = gr.Button("Generate Python Code", variant="primary")
with gr.Column(scale=1):
code_output = gr.Code(
label="Generated Python Code",
language="python"
)
submit.click(fn=generate_code, inputs=instruction, outputs=code_output)
demo.launch(share=True)
|