linjunpop commited on
Commit
84250ad
·
1 Parent(s): fca30cf

Upload summarine.livemd

Browse files
Files changed (1) hide show
  1. public-apps/summarine.livemd +70 -0
public-apps/summarine.livemd ADDED
@@ -0,0 +1,70 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <!-- livebook:{"app_settings":{"show_source":true,"slug":"Summarine"}} -->
2
+
3
+ # Summarine
4
+
5
+ ```elixir
6
+ Mix.install(
7
+ [
8
+ {:kino_bumblebee, "~> 0.3.0"},
9
+ {:exla, "~> 0.5.1"},
10
+ {:req, "~> 0.3.11"}
11
+ ],
12
+ config: [nx: [default_backend: EXLA.Backend]]
13
+ )
14
+ ```
15
+
16
+ ## Intro
17
+
18
+ Audio to text, then summary.
19
+
20
+ ## App
21
+
22
+ ```elixir
23
+ {:ok, model_info} = Bumblebee.load_model({:hf, "openai/whisper-base"})
24
+ {:ok, featurizer} = Bumblebee.load_featurizer({:hf, "openai/whisper-base"})
25
+ {:ok, tokenizer} = Bumblebee.load_tokenizer({:hf, "openai/whisper-base"})
26
+ {:ok, generation_config} = Bumblebee.load_generation_config({:hf, "openai/whisper-base"})
27
+ generation_config = Bumblebee.configure(generation_config, max_new_tokens: 100)
28
+
29
+ serving =
30
+ Bumblebee.Audio.speech_to_text(model_info, featurizer, tokenizer, generation_config,
31
+ compile: [batch_size: 1],
32
+ defn_options: [compiler: EXLA]
33
+ )
34
+ ```
35
+
36
+ ```elixir
37
+ audio_input = Kino.Input.audio("Audio", sampling_rate: featurizer.sampling_rate)
38
+ form = Kino.Control.form([audio: audio_input], submit: "Run")
39
+ frame = Kino.Frame.new()
40
+
41
+ Kino.listen(form, fn %{data: %{audio: audio}} ->
42
+ if audio do
43
+ Kino.Frame.render(frame, Kino.Text.new("Running..."))
44
+
45
+ audio =
46
+ audio.data
47
+ |> Nx.from_binary(:f32)
48
+ |> Nx.reshape({:auto, audio.num_channels})
49
+ |> Nx.mean(axes: [1])
50
+
51
+ %{results: [%{text: generated_text}]} = Nx.Serving.run(serving, audio)
52
+ Kino.Frame.render(frame, Kino.Text.new(generated_text))
53
+ end
54
+ end)
55
+
56
+ Kino.Layout.grid([form, frame], boxed: true, gap: 16)
57
+ ```
58
+
59
+ ```elixir
60
+ generated_text = "I'm blue and you are red, I'm very very blue."
61
+ ```
62
+
63
+ ```elixir
64
+ payload = %{
65
+ model: "llama2-uncensored",
66
+ prompt: "Please summary the text: #{generated_text}"
67
+ }
68
+
69
+ Req.post!("http://localhost:11434/api/generate", json: payload).body
70
+ ```