homepage / app.py
zhaoliangvaio's picture
Upload folder using huggingface_hub
5a97822 verified
from dotenv import load_dotenv
from openai import OpenAI
from pypdf import PdfReader
import gradio as gr
import json
import os
import datetime
from pathlib import Path
print("Current working directory:", os.getcwd())
def load_usage():
if USAGE_FILE.exists():
with open(USAGE_FILE, "r") as f:
data = json.load(f)
if data["date"] == datetime.date.today().isoformat():
return data["total_cost"]
return 0.0
def save_usage(total_cost):
with open(USAGE_FILE, "w") as f:
json.dump({
"date": datetime.date.today().isoformat(),
"total_cost": total_cost
}, f)
load_dotenv(override=True)
client = OpenAI()
# client = OpenAI(
# api_key=os.getenv('DEEPSEEK_API_KEY'),
# base_url="https://api.deepseek.com/v1" # Example endpoint; replace with the actual DeepSeek endpoint
# )
# === CONFIGURABLE BUDGET LIMIT (USD) ===
USAGE_LIMIT_PER_DAY = 0.1 # USD
USAGE_FILE = Path("daily_usage.json")
print(f"Expected usage file at: {USAGE_FILE.resolve()}")
total_cost = load_usage()
# Pricing reference (as of 2024-06): adjust if needed
PRICES = {
"gpt-4o-mini": {"input": 0.00015 / 1000, "output": 0.0006 / 1000},
"deepseek-chat": {"input": 0.00027 / 1000, "output": 0.0011 / 1000}
}
reader = PdfReader("resume.pdf")
resume = ""
for page in reader.pages:
text = page.extract_text()
if text:
resume += text
with open("summary.txt", "r", encoding="utf-8") as f:
summary = f.read()
name = "Liang Zhao"
system_prompt = f"You are acting as {name}. You are answering questions on {name}'s website, \
particularly questions related to {name}'s career, background, skills and experience. \
Your responsibility is to represent {name} for interactions on the website as faithfully as possible. \
You are given a summary of {name}'s background and Resume which you can use to answer questions. \
Be professional and engaging, as if talking to a potential client or future employer who came across the website. \
If you don't know the answer, say so."
system_prompt += f"\n\n## Summary:\n{summary}\n\n## Resume:\n{resume}\n\n"
system_prompt += f"With this context, please chat with the user, always staying in character as {name}."
def chat(message, history):
global total_cost
total_cost = load_usage()
if total_cost >= USAGE_LIMIT_PER_DAY:
return "Daily usage limit reached. Please try again tomorrow."
messages = [{"role": "system", "content": system_prompt}] + history + [{"role": "user", "content": message}]
response = client.chat.completions.create(model="gpt-4o-mini", messages=messages)
usage = response.usage
input_tokens = usage.prompt_tokens
output_tokens = usage.completion_tokens
print(input_tokens, output_tokens)
cost = (input_tokens * PRICES["gpt-4o-mini"]["input"] +
output_tokens * PRICES["gpt-4o-mini"]["output"])
total_cost += cost
save_usage(total_cost)
return response.choices[0].message.content
gr.ChatInterface(chat, type="messages").launch()