ariG23498 HF Staff commited on
Commit
68cd322
·
verified ·
1 Parent(s): 80d0f16

Upload AvitoTech_avision_1.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. AvitoTech_avision_1.py +91 -0
AvitoTech_avision_1.py ADDED
@@ -0,0 +1,91 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # /// script
2
+ # requires-python = ">=3.12"
3
+ # dependencies = [
4
+ # "torch",
5
+ # "torchvision",
6
+ # "transformers",
7
+ # "diffusers",
8
+ # "sentence-transformers",
9
+ # "accelerate",
10
+ # "peft",
11
+ # "slack-sdk",
12
+ # ]
13
+ # ///
14
+
15
+ try:
16
+ # Load model directly
17
+ from transformers import AutoProcessor, AutoModelForVision2Seq
18
+
19
+ processor = AutoProcessor.from_pretrained("AvitoTech/avision")
20
+ model = AutoModelForVision2Seq.from_pretrained("AvitoTech/avision")
21
+ messages = [
22
+ {
23
+ "role": "user",
24
+ "content": [
25
+ {"type": "image", "url": "https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/p-blog/candy.JPG"},
26
+ {"type": "text", "text": "What animal is on the candy?"}
27
+ ]
28
+ },
29
+ ]
30
+ inputs = processor.apply_chat_template(
31
+ messages,
32
+ add_generation_prompt=True,
33
+ tokenize=True,
34
+ return_dict=True,
35
+ return_tensors="pt",
36
+ ).to(model.device)
37
+
38
+ outputs = model.generate(**inputs, max_new_tokens=40)
39
+ print(processor.decode(outputs[0][inputs["input_ids"].shape[-1]:]))
40
+ with open('AvitoTech_avision_1.txt', 'w', encoding='utf-8') as f:
41
+ f.write('Everything was good in AvitoTech_avision_1.txt')
42
+ except Exception as e:
43
+ import os
44
+ from slack_sdk import WebClient
45
+ client = WebClient(token=os.environ['SLACK_TOKEN'])
46
+ client.chat_postMessage(
47
+ channel='#hub-model-metadata-snippets-sprint',
48
+ text='Problem in <https://huggingface.co/datasets/model-metadata/code_execution_files/blob/main/AvitoTech_avision_1.txt|AvitoTech_avision_1.txt>',
49
+ )
50
+
51
+ with open('AvitoTech_avision_1.txt', 'a', encoding='utf-8') as f:
52
+ import traceback
53
+ f.write('''```CODE:
54
+ # Load model directly
55
+ from transformers import AutoProcessor, AutoModelForVision2Seq
56
+
57
+ processor = AutoProcessor.from_pretrained("AvitoTech/avision")
58
+ model = AutoModelForVision2Seq.from_pretrained("AvitoTech/avision")
59
+ messages = [
60
+ {
61
+ "role": "user",
62
+ "content": [
63
+ {"type": "image", "url": "https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/p-blog/candy.JPG"},
64
+ {"type": "text", "text": "What animal is on the candy?"}
65
+ ]
66
+ },
67
+ ]
68
+ inputs = processor.apply_chat_template(
69
+ messages,
70
+ add_generation_prompt=True,
71
+ tokenize=True,
72
+ return_dict=True,
73
+ return_tensors="pt",
74
+ ).to(model.device)
75
+
76
+ outputs = model.generate(**inputs, max_new_tokens=40)
77
+ print(processor.decode(outputs[0][inputs["input_ids"].shape[-1]:]))
78
+ ```
79
+
80
+ ERROR:
81
+ ''')
82
+ traceback.print_exc(file=f)
83
+
84
+ finally:
85
+ from huggingface_hub import upload_file
86
+ upload_file(
87
+ path_or_fileobj='AvitoTech_avision_1.txt',
88
+ repo_id='model-metadata/code_execution_files',
89
+ path_in_repo='AvitoTech_avision_1.txt',
90
+ repo_type='dataset',
91
+ )