abdeljalilELmajjodi commited on
Commit
4cff703
·
verified ·
1 Parent(s): fa004d4

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +7 -14
app.py CHANGED
@@ -38,9 +38,14 @@ examples = [
38
  ]
39
 
40
  #inf_dataset=load_dataset("atlasia/atlaset_inference_ds",token=token,split="test",name="llm")
41
- detected_commit=False
42
  submit_file = Path("user_submit/") / f"data_{uuid.uuid4()}.json"
43
-
 
 
 
 
 
 
44
  @spaces.GPU
45
  def generate_text(prompt, max_length=256, temperature=0.7, top_p=0.9, top_k=150, num_beams=8, repetition_penalty=1.5):
46
  inputs = tokenizer(prompt, return_tensors="pt").to(model.device)
@@ -60,7 +65,6 @@ def generate_text(prompt, max_length=256, temperature=0.7, top_p=0.9, top_k=150,
60
  result=tokenizer.decode(output[0], skip_special_tokens=True)
61
  #inf_dataset.add_item({"inputs":prompt,"outputs":result,"params":f"{max_length},{temperature},{top_p},{top_k},{num_beams},{repetition_penalty}"})
62
  save_feedback(prompt,result,f"{max_length},{temperature},{top_p},{top_k},{num_beams},{repetition_penalty}")
63
- detected_commit=True
64
  return result
65
 
66
  def save_feedback(input,output,params) -> None:
@@ -68,7 +72,6 @@ def save_feedback(input,output,params) -> None:
68
  with feedback_file.open("a") as f:
69
  f.write(json.dumps({"input": input, "output": output, "params": params}))
70
  f.write("\n")
71
- detected_commit=True
72
 
73
  if __name__ == "__main__":
74
  # Create the Gradio interface
@@ -89,14 +92,4 @@ if __name__ == "__main__":
89
  description="Enter a prompt and get AI-generated text using our pretrained LLM on Moroccan Darija.",
90
  examples=examples,
91
  )
92
- if detected_commit:
93
- print("[INFO] CommitScheduler...")
94
- scheduler = CommitScheduler(
95
- repo_id="atlasia/atlaset_inference_ds",
96
- repo_type="dataset",
97
- folder_path=submit_file,
98
- every=5,
99
- token=token
100
- )
101
- detected_commit=False
102
  app.launch()
 
38
  ]
39
 
40
  #inf_dataset=load_dataset("atlasia/atlaset_inference_ds",token=token,split="test",name="llm")
 
41
  submit_file = Path("user_submit/") / f"data_{uuid.uuid4()}.json"
42
+ scheduler = CommitScheduler(
43
+ repo_id="atlasia/atlaset_inference_ds",
44
+ repo_type="dataset",
45
+ folder_path=submit_file,
46
+ every=5,
47
+ token=token
48
+ )
49
  @spaces.GPU
50
  def generate_text(prompt, max_length=256, temperature=0.7, top_p=0.9, top_k=150, num_beams=8, repetition_penalty=1.5):
51
  inputs = tokenizer(prompt, return_tensors="pt").to(model.device)
 
65
  result=tokenizer.decode(output[0], skip_special_tokens=True)
66
  #inf_dataset.add_item({"inputs":prompt,"outputs":result,"params":f"{max_length},{temperature},{top_p},{top_k},{num_beams},{repetition_penalty}"})
67
  save_feedback(prompt,result,f"{max_length},{temperature},{top_p},{top_k},{num_beams},{repetition_penalty}")
 
68
  return result
69
 
70
  def save_feedback(input,output,params) -> None:
 
72
  with feedback_file.open("a") as f:
73
  f.write(json.dumps({"input": input, "output": output, "params": params}))
74
  f.write("\n")
 
75
 
76
  if __name__ == "__main__":
77
  # Create the Gradio interface
 
92
  description="Enter a prompt and get AI-generated text using our pretrained LLM on Moroccan Darija.",
93
  examples=examples,
94
  )
 
 
 
 
 
 
 
 
 
 
95
  app.launch()