Infinitode Pty Ltd commited on
Commit
0cef7ae
·
verified ·
1 Parent(s): 7dd2c0a

Upload 4 files

Browse files
Files changed (4) hide show
  1. .gitattributes +35 -35
  2. README.md +23 -13
  3. app.py +132 -0
  4. requirements.txt +4 -0
.gitattributes CHANGED
@@ -1,35 +1,35 @@
1
- *.7z filter=lfs diff=lfs merge=lfs -text
2
- *.arrow filter=lfs diff=lfs merge=lfs -text
3
- *.bin filter=lfs diff=lfs merge=lfs -text
4
- *.bz2 filter=lfs diff=lfs merge=lfs -text
5
- *.ckpt filter=lfs diff=lfs merge=lfs -text
6
- *.ftz filter=lfs diff=lfs merge=lfs -text
7
- *.gz filter=lfs diff=lfs merge=lfs -text
8
- *.h5 filter=lfs diff=lfs merge=lfs -text
9
- *.joblib filter=lfs diff=lfs merge=lfs -text
10
- *.lfs.* filter=lfs diff=lfs merge=lfs -text
11
- *.mlmodel filter=lfs diff=lfs merge=lfs -text
12
- *.model filter=lfs diff=lfs merge=lfs -text
13
- *.msgpack filter=lfs diff=lfs merge=lfs -text
14
- *.npy filter=lfs diff=lfs merge=lfs -text
15
- *.npz filter=lfs diff=lfs merge=lfs -text
16
- *.onnx filter=lfs diff=lfs merge=lfs -text
17
- *.ot filter=lfs diff=lfs merge=lfs -text
18
- *.parquet filter=lfs diff=lfs merge=lfs -text
19
- *.pb filter=lfs diff=lfs merge=lfs -text
20
- *.pickle filter=lfs diff=lfs merge=lfs -text
21
- *.pkl filter=lfs diff=lfs merge=lfs -text
22
- *.pt filter=lfs diff=lfs merge=lfs -text
23
- *.pth filter=lfs diff=lfs merge=lfs -text
24
- *.rar filter=lfs diff=lfs merge=lfs -text
25
- *.safetensors filter=lfs diff=lfs merge=lfs -text
26
- saved_model/**/* filter=lfs diff=lfs merge=lfs -text
27
- *.tar.* filter=lfs diff=lfs merge=lfs -text
28
- *.tar filter=lfs diff=lfs merge=lfs -text
29
- *.tflite filter=lfs diff=lfs merge=lfs -text
30
- *.tgz filter=lfs diff=lfs merge=lfs -text
31
- *.wasm filter=lfs diff=lfs merge=lfs -text
32
- *.xz filter=lfs diff=lfs merge=lfs -text
33
- *.zip filter=lfs diff=lfs merge=lfs -text
34
- *.zst filter=lfs diff=lfs merge=lfs -text
35
- *tfevents* filter=lfs diff=lfs merge=lfs -text
 
1
+ *.7z filter=lfs diff=lfs merge=lfs -text
2
+ *.arrow filter=lfs diff=lfs merge=lfs -text
3
+ *.bin filter=lfs diff=lfs merge=lfs -text
4
+ *.bz2 filter=lfs diff=lfs merge=lfs -text
5
+ *.ckpt filter=lfs diff=lfs merge=lfs -text
6
+ *.ftz filter=lfs diff=lfs merge=lfs -text
7
+ *.gz filter=lfs diff=lfs merge=lfs -text
8
+ *.h5 filter=lfs diff=lfs merge=lfs -text
9
+ *.joblib filter=lfs diff=lfs merge=lfs -text
10
+ *.lfs.* filter=lfs diff=lfs merge=lfs -text
11
+ *.mlmodel filter=lfs diff=lfs merge=lfs -text
12
+ *.model filter=lfs diff=lfs merge=lfs -text
13
+ *.msgpack filter=lfs diff=lfs merge=lfs -text
14
+ *.npy filter=lfs diff=lfs merge=lfs -text
15
+ *.npz filter=lfs diff=lfs merge=lfs -text
16
+ *.onnx filter=lfs diff=lfs merge=lfs -text
17
+ *.ot filter=lfs diff=lfs merge=lfs -text
18
+ *.parquet filter=lfs diff=lfs merge=lfs -text
19
+ *.pb filter=lfs diff=lfs merge=lfs -text
20
+ *.pickle filter=lfs diff=lfs merge=lfs -text
21
+ *.pkl filter=lfs diff=lfs merge=lfs -text
22
+ *.pt filter=lfs diff=lfs merge=lfs -text
23
+ *.pth filter=lfs diff=lfs merge=lfs -text
24
+ *.rar filter=lfs diff=lfs merge=lfs -text
25
+ *.safetensors filter=lfs diff=lfs merge=lfs -text
26
+ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
27
+ *.tar.* filter=lfs diff=lfs merge=lfs -text
28
+ *.tar filter=lfs diff=lfs merge=lfs -text
29
+ *.tflite filter=lfs diff=lfs merge=lfs -text
30
+ *.tgz filter=lfs diff=lfs merge=lfs -text
31
+ *.wasm filter=lfs diff=lfs merge=lfs -text
32
+ *.xz filter=lfs diff=lfs merge=lfs -text
33
+ *.zip filter=lfs diff=lfs merge=lfs -text
34
+ *.zst filter=lfs diff=lfs merge=lfs -text
35
+ *tfevents* filter=lfs diff=lfs merge=lfs -text
README.md CHANGED
@@ -1,13 +1,23 @@
1
- ---
2
- title: Dungen
3
- emoji: 🚀
4
- colorFrom: indigo
5
- colorTo: indigo
6
- sdk: gradio
7
- sdk_version: 4.41.0
8
- app_file: app.py
9
- pinned: false
10
- license: mit
11
- ---
12
-
13
- Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ title: Dungen
3
+ emoji: 🚀
4
+ colorFrom: indigo
5
+ colorTo: indigo
6
+ sdk: gradio
7
+ sdk_version: 4.41.0
8
+ app_file: app.py
9
+ pinned: false
10
+ license: mit
11
+ ---
12
+
13
+ # Dungen
14
+
15
+ Name generation models, specifically trained to generate names based on games. The 2 models available can generate names based on names of characters (NPCs), places, and items in the official games of:
16
+ - Terraria
17
+ - The Elder Scrolls V: Skyrim
18
+
19
+ ## Training your own models
20
+
21
+ You can train your own version of these models, using the project code from our GitHub repo link below:
22
+
23
+ https://github.com/Infinitode/OPEN-ARC/tree/main/Project-5-TWNG
app.py ADDED
@@ -0,0 +1,132 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import random
3
+ import gradio as gr
4
+ import sentencepiece as spm
5
+ import numpy as np
6
+ import tflite_runtime.interpreter as tflite
7
+
8
+ def custom_pad_sequences(sequences, maxlen, padding='pre', value=0):
9
+ """
10
+ Pads sequences to the same length.
11
+
12
+ :param sequences: List of lists, where each element is a sequence.
13
+ :param maxlen: Maximum length of all sequences.
14
+ :param padding: 'pre' or 'post', pad either before or after each sequence.
15
+ :param value: Float, padding value.
16
+ :return: Numpy array with dimensions (number_of_sequences, maxlen)
17
+ """
18
+ padded_sequences = np.full((len(sequences), maxlen), value)
19
+ for i, seq in enumerate(sequences):
20
+ if padding == 'pre':
21
+ if len(seq) <= maxlen:
22
+ padded_sequences[i, -len(seq):] = seq
23
+ else:
24
+ padded_sequences[i, :] = seq[-maxlen:]
25
+ elif padding == 'post':
26
+ if len(seq) <= maxlen:
27
+ padded_sequences[i, :len(seq)] = seq
28
+ else:
29
+ padded_sequences[i, :] = seq[:maxlen]
30
+ return padded_sequences
31
+
32
+ def generate_random_name(interpreter, vocab_size, sp, max_length=10, temperature=0.5, seed_text=""):
33
+ # Get input and output tensors
34
+ input_details = interpreter.get_input_details()
35
+ output_details = interpreter.get_output_details()
36
+
37
+ if seed_text:
38
+ generated_name = seed_text
39
+ else:
40
+ random_index = np.random.randint(1, vocab_size)
41
+ random_token = sp.id_to_piece(random_index)
42
+ generated_name = random_token
43
+
44
+ for _ in range(max_length - 1):
45
+ token_list = sp.encode_as_ids(generated_name)
46
+ # Pad to the correct length expected by the model
47
+ token_list = custom_pad_sequences([token_list], maxlen=max_seq_len, padding='pre')
48
+
49
+ # Convert token_list to FLOAT32 before setting the tensor
50
+ token_list = token_list.astype(np.float32)
51
+
52
+ # Set the input tensor
53
+ interpreter.set_tensor(input_details[0]['index'], token_list)
54
+
55
+ # Run inference
56
+ interpreter.invoke()
57
+
58
+ # Get the output tensor
59
+ predicted = interpreter.get_tensor(output_details[0]['index'])[0]
60
+
61
+ # Apply temperature to predictions
62
+ predicted = np.log(predicted + 1e-8) / temperature
63
+ predicted = np.exp(predicted) / np.sum(np.exp(predicted))
64
+
65
+ # Sample from the distribution
66
+ next_index = np.random.choice(range(vocab_size), p=predicted)
67
+ next_index = int(next_index)
68
+ next_token = sp.id_to_piece(next_index)
69
+
70
+ generated_name += next_token
71
+
72
+ # Decode the generated subword tokens into a string
73
+ decoded_name = sp.decode_pieces(generated_name.split())
74
+
75
+ # Stop if end token is predicted (optional)
76
+ if next_token == '' or len(decoded_name) > max_length:
77
+ break
78
+
79
+ decoded_name = decoded_name.replace("▁", " ")
80
+ decoded_name = decoded_name.replace("</s>", "")
81
+ generated_name = decoded_name.rsplit(' ', 1)[0]
82
+ generated_name = generated_name[0].upper() + generated_name[1:]
83
+
84
+ # Split the name and check the last part
85
+ parts = generated_name.split()
86
+ if parts and len(parts[-1]) < 3:
87
+ generated_name = " ".join(parts[:-1])
88
+
89
+ return generated_name.strip()
90
+
91
+ def generateTerrariaNames(amount, max_length=30, temperature=0.5, seed_text=""):
92
+ sp = spm.SentencePieceProcessor()
93
+ sp.load("models/terraria_names.model")
94
+ amount = int(amount)
95
+
96
+ names = []
97
+
98
+ # Define necessary variables
99
+ vocab_size = sp.GetPieceSize()
100
+ max_seq_len = 12 # For skyrim = 13, for terraria = 12
101
+
102
+ # Load TFLite model
103
+ interpreter = tflite.Interpreter(model_path="models/dungen_terraria_model.tflite")
104
+ interpreter.allocate_tensors()
105
+
106
+ # Use the function to generate a name
107
+ # Assuming `vocab_size` and `sp` (SentencePiece processor) are defined elsewhere
108
+ for _ in range(amount):
109
+ generated_name = generate_random_name(interpreter, vocab_size, sp, seed_text=seed_text, max_length=max_length, temperature=temperature)
110
+ names.append(generated_name)
111
+ return names
112
+
113
+ def gradio_interface(amount, max_length, temperature, seed_text):
114
+ names = generateTerrariaNames(amount, max_length, temperature, seed_text)
115
+ return names
116
+
117
+ iface = gr.Interface(
118
+ fn=gradio_interface,
119
+ inputs=[
120
+ gr.inputs.Number(default=1, maximum=25, label="Amount"),
121
+ gr.inputs.Slider(minimum=1, maximum=100, default=30, label="Max Length"),
122
+ gr.inputs.Slider(minimum=0.1, maximum=1.0, default=0.5, label="Temperature"),
123
+ gr.inputs.Textbox(default="", label="Seed Text (optional)")
124
+ ],
125
+ outputs="text",
126
+ title="Terraria Name Generator",
127
+ description="Generate random Terraria names using a TFLite model."
128
+ )
129
+
130
+
131
+ if __name__ == "__main__":
132
+ iface.launch()
requirements.txt ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ gradio
2
+ tflite_runtime
3
+ numpy
4
+ sentencepiece