rahul7star commited on
Commit
cfbfbbe
·
verified ·
1 Parent(s): ae9ce4a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +38 -24
app.py CHANGED
@@ -18,6 +18,29 @@ from peft import LoraConfig, get_peft_model, TaskType
18
  from huggingface_hub import HfApi, HfFolder, Repository
19
  import os, tempfile, shutil
20
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
21
 
22
  # === GPU check (Zero GPU compatible) ===
23
  def check_gpu_status():
@@ -139,34 +162,25 @@ You are a wise teacher interpreting Bhagavad Gita with deep insights.
139
  tokenizer=tokenizer,
140
  )
141
 
142
- # ==== Train ====
143
- progress(0.7, desc="Training...")
144
- output_log.append("\n🚀 Starting training...\n" + "=" * 50)
145
- train_result = trainer.train()
146
-
147
- # ==== Save model ====
148
- progress(0.85, desc="Saving model...")
149
- output_log.append("\n💾 Saving model locally...")
150
- trainer.save_model(output_dir)
151
- tokenizer.save_pretrained(output_dir)
152
-
153
- # ==== Upload to HF Hub ====
154
- progress(0.9, desc="Uploading to Hugging Face Hub...")
155
- hf_repo = "rahul7star/Qwen0.5-3B-Gita"
156
- output_log.append(f"\n☁️ Uploading fine-tuned model to: {hf_repo}")
157
 
158
- api = HfApi()
159
- token = HfFolder.get_token()
 
 
 
160
 
161
- api.create_repo(repo_id=hf_repo, exist_ok=True)
 
 
162
 
163
- with tempfile.TemporaryDirectory() as tmpdir:
164
- repo = Repository(local_dir=tmpdir, clone_from=hf_repo, use_auth_token=token)
165
- shutil.copytree(output_dir, tmpdir, dirs_exist_ok=True)
166
- repo.push_to_hub(commit_message="Upload fine-tuned Qwen-Gita LoRA model")
167
 
168
- progress(1.0, desc="Complete!")
169
- output_log.append("\n✅ Training complete & model uploaded successfully!")
170
 
171
  except Exception as e:
172
  output_log.append(f"\n❌ Error: {e}")
 
18
  from huggingface_hub import HfApi, HfFolder, Repository
19
  import os, tempfile, shutil
20
 
21
+ import asyncio
22
+ import tempfile
23
+ import shutil
24
+ from huggingface_hub import HfApi, HfFolder, Repository
25
+
26
+ async def async_upload_model(local_dir, hf_repo, output_log):
27
+ try:
28
+ token = HfFolder.get_token()
29
+ api = HfApi()
30
+ api.create_repo(repo_id=hf_repo, exist_ok=True)
31
+
32
+ output_log.append(f"\n☁️ Starting async upload to: {hf_repo}")
33
+
34
+ with tempfile.TemporaryDirectory() as tmpdir:
35
+ repo = Repository(local_dir=tmpdir, clone_from=hf_repo, use_auth_token=token)
36
+ # Copy model files
37
+ shutil.copytree(local_dir, tmpdir, dirs_exist_ok=True)
38
+ repo.push_to_hub(commit_message="Upload fine-tuned model")
39
+
40
+ output_log.append("\n✅ Async upload complete!")
41
+ except Exception as e:
42
+ output_log.append(f"\n❌ Async upload error: {e}")
43
+
44
 
45
  # === GPU check (Zero GPU compatible) ===
46
  def check_gpu_status():
 
162
  tokenizer=tokenizer,
163
  )
164
 
165
+ # ==== Train ====
166
+ progress(0.7, desc="Training...")
167
+ output_log.append("\n🚀 Starting training...\n" + "=" * 50)
168
+ train_result = trainer.train()
 
 
 
 
 
 
 
 
 
 
 
169
 
170
+ # ==== Save model locally ====
171
+ progress(0.85, desc="Saving model...")
172
+ output_log.append("\n💾 Saving model locally...")
173
+ trainer.save_model(output_dir)
174
+ tokenizer.save_pretrained(output_dir)
175
 
176
+ # ==== Async upload ====
177
+ hf_repo = "rahul7star/Qwen0.5-3B-Gita"
178
+ asyncio.create_task(async_upload_model(output_dir, hf_repo, output_log))
179
 
180
+ progress(1.0, desc="Complete!")
181
+
 
 
182
 
183
+ output_log.append("\n✅ Training complete & model uploaded successfully!")
 
184
 
185
  except Exception as e:
186
  output_log.append(f"\n❌ Error: {e}")