playmak3r commited on
Commit
eda9118
·
1 Parent(s): a296ab3

Add a temperature slider control in the advanced options and adjust the default temperature value for the translation function

Browse files
Files changed (2) hide show
  1. app.py +4 -1
  2. model.py +8 -1
app.py CHANGED
@@ -65,11 +65,14 @@ with gr.Blocks(
65
  interactive=False
66
  )
67
 
 
 
 
68
  translate_btn = gr.Button("Translate ✨")
69
 
70
  translate_btn.click(
71
  run,
72
- inputs=[input_text, target_language],
73
  outputs=output_text
74
  )
75
 
 
65
  interactive=False
66
  )
67
 
68
+ with gr.Accordion("Advanced Options", open=False):
69
+ temp = gr.Slider(0.00, 1.0, step=.05, label="Temperature", value=0.3)
70
+
71
  translate_btn = gr.Button("Translate ✨")
72
 
73
  translate_btn.click(
74
  run,
75
+ inputs=[input_text, target_language, temp],
76
  outputs=output_text
77
  )
78
 
model.py CHANGED
@@ -11,6 +11,7 @@ model = AutoModelForCausalLM.from_pretrained(model_path)
11
  def run(
12
  text: str = "It’s on the house.",
13
  target_language: str = "Portuguese",
 
14
  ):
15
  messages = [
16
  {
@@ -27,7 +28,13 @@ def run(
27
  input_ids = tokenized_chat.to(model.device)
28
  input_length = input_ids.shape[1]
29
 
30
- outputs = model.generate(input_ids, max_new_tokens=2048)
 
 
 
 
 
 
31
  # 2. Fatiamos o tensor: pegamos do [input_length:] até o fim
32
  # Isso isola apenas os tokens novos gerados
33
  generated_tokens = outputs[0][input_length:]
 
11
  def run(
12
  text: str = "It’s on the house.",
13
  target_language: str = "Portuguese",
14
+ temperature: float = 0.3,
15
  ):
16
  messages = [
17
  {
 
28
  input_ids = tokenized_chat.to(model.device)
29
  input_length = input_ids.shape[1]
30
 
31
+ outputs = model.generate(
32
+ input_ids,
33
+ max_new_tokens=2048,
34
+ do_sample=True,
35
+ temperature=temperature,
36
+ top_p=0.9
37
+ )
38
  # 2. Fatiamos o tensor: pegamos do [input_length:] até o fim
39
  # Isso isola apenas os tokens novos gerados
40
  generated_tokens = outputs[0][input_length:]