Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -45,45 +45,46 @@ class AdvancedAIHumanizer:
|
|
45 |
|
46 |
# AI-flagged formal terms with contextually appropriate replacements
|
47 |
self.formal_replacements = {
|
48 |
-
r'\bdelve into\b': ["explore", "examine", "investigate", "analyze"],
|
49 |
-
r'\bembark on\b': ["begin", "start", "initiate", "commence"],
|
50 |
-
r'\ba testament to\b': ["evidence of", "proof of", "demonstrates", "shows"],
|
51 |
-
r'\blandscape of\b': ["context of", "environment of", "field of", "domain of"],
|
52 |
-
r'\bnavigating\b': ["managing", "addressing", "handling", "working through"],
|
53 |
-
r'\bmeticulous\b': ["careful", "thorough", "detailed", "precise"],
|
54 |
-
r'\bintricate\b': ["complex", "detailed", "sophisticated", "elaborate"],
|
55 |
-
r'\bmyriad\b': ["numerous", "many", "various", "multiple"],
|
56 |
-
r'\bplethora\b': ["abundance", "variety", "range", "collection"],
|
57 |
-
r'\bparadigm\b': ["model", "framework", "approach", "system"],
|
58 |
-
r'\bsynergy\b': ["collaboration", "cooperation", "coordination", "integration"],
|
59 |
-
r'\bleverage\b': ["utilize", "employ", "use", "apply"],
|
60 |
-
r'\bfacilitate\b': ["enable", "support", "assist", "help"],
|
61 |
-
r'\boptimize\b': ["improve", "enhance", "refine", "perfect"],
|
62 |
-
r'\bstreamline\b': ["simplify", "improve", "refine", "enhance"],
|
63 |
-
r'\brobust\b': ["strong", "reliable", "effective", "solid"],
|
64 |
-
r'\bseamless\b': ["smooth", "integrated", "unified", "continuous"],
|
65 |
-
r'\binnovative\b': ["creative", "original", "novel", "advanced"],
|
66 |
-
r'\bcutting-edge\b': ["advanced", "latest", "modern", "current"],
|
67 |
-
r'\bstate-of-the-art\b': ["advanced", "modern", "sophisticated", "current"]
|
68 |
}
|
69 |
|
70 |
# Transition phrase variations
|
71 |
self.transition_replacements = {
|
72 |
-
r'\bfurthermore\b': ["additionally", "moreover", "in addition", "also"],
|
73 |
-
r'\bmoreover\b': ["furthermore", "additionally", "also", "in addition"],
|
74 |
-
r'\bhowever\b': ["nevertheless", "yet", "still", "although"],
|
75 |
-
r'\bnevertheless\b': ["however", "yet", "still", "nonetheless"],
|
76 |
-
r'\btherefore\b': ["consequently", "thus", "as a result", "hence"],
|
77 |
-
r'\bconsequently\b': ["therefore", "thus", "as a result", "accordingly"],
|
78 |
-
r'\bin conclusion\b': ["finally", "ultimately", "in summary", "to summarize"],
|
79 |
-
r'\bto summarize\b': ["in conclusion", "finally", "in summary", "overall"],
|
80 |
-
r'\bin summary\b': ["to conclude", "overall", "finally", "in essence"]
|
81 |
}
|
82 |
|
83 |
# Sentence structure patterns for variation
|
84 |
self.sentence_starters = [
|
85 |
"Additionally,", "Furthermore,", "In particular,", "Notably,",
|
86 |
-
"Importantly,", "Significantly,", "Moreover,", "Consequently,"
|
|
|
87 |
]
|
88 |
|
89 |
# Professional contractions (limited and contextual)
|
@@ -95,7 +96,9 @@ class AdvancedAIHumanizer:
|
|
95 |
r'\bdo not\b': "don't",
|
96 |
r'\bdoes not\b': "doesn't",
|
97 |
r'\bwill not\b': "won't",
|
98 |
-
r'\bwould not\b': "wouldn't"
|
|
|
|
|
99 |
}
|
100 |
|
101 |
def load_synonym_database(self):
|
@@ -104,6 +107,7 @@ class AdvancedAIHumanizer:
|
|
104 |
# Test WordNet availability
|
105 |
wordnet.synsets('test')
|
106 |
self.wordnet_available = True
|
|
|
107 |
except:
|
108 |
self.wordnet_available = False
|
109 |
print("WordNet not available, using limited synonym replacement")
|
@@ -140,41 +144,60 @@ class AdvancedAIHumanizer:
|
|
140 |
except:
|
141 |
return word
|
142 |
|
143 |
-
def preserve_meaning_replacement(self, text):
|
144 |
"""Replace AI-flagged terms while preserving exact meaning"""
|
145 |
result = text
|
146 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
147 |
# Apply formal term replacements
|
148 |
for pattern, replacements in self.formal_replacements.items():
|
149 |
-
if re.search(pattern, result, re.IGNORECASE):
|
150 |
replacement = random.choice(replacements)
|
151 |
result = re.sub(pattern, replacement, result, flags=re.IGNORECASE)
|
152 |
|
153 |
# Apply transition phrase replacements
|
154 |
for pattern, replacements in self.transition_replacements.items():
|
155 |
-
if re.search(pattern, result, re.IGNORECASE):
|
156 |
replacement = random.choice(replacements)
|
157 |
result = re.sub(pattern, replacement, result, flags=re.IGNORECASE)
|
158 |
|
159 |
return result
|
160 |
|
161 |
-
def vary_sentence_structure(self, text):
|
162 |
"""Vary sentence structures while maintaining meaning"""
|
163 |
sentences = sent_tokenize(text)
|
164 |
varied_sentences = []
|
165 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
166 |
for i, sentence in enumerate(sentences):
|
167 |
# Occasionally add transitional phrases at the beginning
|
168 |
-
if i > 0 and len(sentence.split()) > 6 and random.random() <
|
169 |
starter = random.choice(self.sentence_starters)
|
170 |
sentence = sentence[0].lower() + sentence[1:]
|
171 |
sentence = f"{starter} {sentence}"
|
172 |
|
173 |
# Convert some passive to active voice and vice versa
|
174 |
-
|
|
|
175 |
|
176 |
# Restructure complex sentences occasionally
|
177 |
-
if len(sentence.split()) > 15 and random.random() <
|
178 |
sentence = self.restructure_complex_sentence(sentence)
|
179 |
|
180 |
varied_sentences.append(sentence)
|
@@ -192,7 +215,7 @@ class AdvancedAIHumanizer:
|
|
192 |
]
|
193 |
|
194 |
for pattern, replacement in passive_patterns:
|
195 |
-
if re.search(pattern, sentence) and random.random() < 0.
|
196 |
sentence = re.sub(pattern, replacement, sentence)
|
197 |
break
|
198 |
|
@@ -214,21 +237,38 @@ class AdvancedAIHumanizer:
|
|
214 |
|
215 |
return sentence
|
216 |
|
217 |
-
def apply_subtle_contractions(self, text):
|
218 |
"""Apply professional contractions sparingly"""
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
219 |
for pattern, contraction in self.professional_contractions.items():
|
220 |
-
|
221 |
-
if re.search(pattern, text, re.IGNORECASE) and random.random() < 0.3:
|
222 |
text = re.sub(pattern, contraction, text, flags=re.IGNORECASE)
|
223 |
|
224 |
return text
|
225 |
|
226 |
-
def enhance_vocabulary_diversity(self, text):
|
227 |
"""Enhance vocabulary diversity using contextual synonyms"""
|
228 |
words = word_tokenize(text)
|
229 |
enhanced_words = []
|
230 |
word_frequency = defaultdict(int)
|
231 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
232 |
# Track word frequency to identify repetitive words
|
233 |
for word in words:
|
234 |
if word.isalpha() and len(word) > 4:
|
@@ -237,7 +277,7 @@ class AdvancedAIHumanizer:
|
|
237 |
for word in words:
|
238 |
if (word.isalpha() and len(word) > 4 and
|
239 |
word_frequency[word.lower()] > 1 and
|
240 |
-
random.random() <
|
241 |
|
242 |
synonym = self.get_contextual_synonym(word)
|
243 |
enhanced_words.append(synonym)
|
@@ -246,14 +286,23 @@ class AdvancedAIHumanizer:
|
|
246 |
|
247 |
return ' '.join(enhanced_words)
|
248 |
|
249 |
-
def add_natural_variation(self, text):
|
250 |
"""Add natural human-like variations"""
|
251 |
sentences = sent_tokenize(text)
|
252 |
varied_sentences = []
|
253 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
254 |
for sentence in sentences:
|
255 |
# Occasionally vary sentence length and structure
|
256 |
-
if len(sentence.split()) > 20 and random.random() <
|
257 |
# Split very long sentences
|
258 |
mid_point = len(sentence.split()) // 2
|
259 |
words = sentence.split()
|
@@ -267,7 +316,7 @@ class AdvancedAIHumanizer:
|
|
267 |
break
|
268 |
|
269 |
# Add subtle emphasis occasionally
|
270 |
-
if random.random() <
|
271 |
sentence = self.add_subtle_emphasis(sentence)
|
272 |
|
273 |
varied_sentences.append(sentence)
|
@@ -280,7 +329,9 @@ class AdvancedAIHumanizer:
|
|
280 |
(r'\bvery important\b', "crucial"),
|
281 |
(r'\bvery significant\b', "highly significant"),
|
282 |
(r'\bvery effective\b', "highly effective"),
|
283 |
-
(r'\bvery useful\b', "particularly useful")
|
|
|
|
|
284 |
]
|
285 |
|
286 |
for pattern, replacement in emphasis_patterns:
|
@@ -314,20 +365,19 @@ class AdvancedAIHumanizer:
|
|
314 |
|
315 |
return text.strip()
|
316 |
|
317 |
-
def advanced_humanize(self, text,
|
318 |
"""Apply sophisticated humanization that preserves meaning"""
|
319 |
current_text = text
|
320 |
|
321 |
-
|
322 |
-
|
323 |
-
|
324 |
-
|
325 |
-
|
326 |
-
|
327 |
-
|
328 |
-
|
329 |
-
|
330 |
-
|
331 |
# Final coherence and cleanup
|
332 |
current_text = self.final_coherence_check(current_text)
|
333 |
|
@@ -346,7 +396,7 @@ class AdvancedAIHumanizer:
|
|
346 |
except Exception as e:
|
347 |
return f"Could not calculate readability: {str(e)}"
|
348 |
|
349 |
-
def humanize_text(self, text, intensity="
|
350 |
"""Main humanization method with meaning preservation"""
|
351 |
if not text or not text.strip():
|
352 |
return "Please provide text to humanize."
|
@@ -362,13 +412,18 @@ class AdvancedAIHumanizer:
|
|
362 |
except Exception as nltk_error:
|
363 |
return f"NLTK Error: {str(nltk_error)}. Please try again."
|
364 |
|
365 |
-
#
|
366 |
-
|
367 |
-
|
368 |
-
|
369 |
-
|
370 |
-
|
371 |
-
|
|
|
|
|
|
|
|
|
|
|
372 |
|
373 |
return result
|
374 |
|
@@ -450,13 +505,13 @@ def create_interface():
|
|
450 |
|
451 |
intensity = gr.Radio(
|
452 |
choices=[
|
453 |
-
("Light Processing", "light"),
|
454 |
-
("
|
455 |
-
("
|
456 |
],
|
457 |
-
value="
|
458 |
-
label="π§ Processing
|
459 |
-
info="
|
460 |
)
|
461 |
|
462 |
btn = gr.Button(
|
@@ -481,7 +536,36 @@ def create_interface():
|
|
481 |
|
482 |
gr.HTML("""
|
483 |
<div class="feature-box">
|
484 |
-
<h3>π―
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
485 |
<div style="display: grid; grid-template-columns: repeat(auto-fit, minmax(250px, 1fr)); gap: 15px; margin: 15px 0;">
|
486 |
<div class="info-box">
|
487 |
<strong>π Meaning Preservation:</strong><br>
|
|
|
45 |
|
46 |
# AI-flagged formal terms with contextually appropriate replacements
|
47 |
self.formal_replacements = {
|
48 |
+
r'\bdelve into\b': ["explore", "examine", "investigate", "analyze", "look into"],
|
49 |
+
r'\bembark on\b': ["begin", "start", "initiate", "commence", "launch"],
|
50 |
+
r'\ba testament to\b': ["evidence of", "proof of", "demonstrates", "shows", "indicates"],
|
51 |
+
r'\blandscape of\b': ["context of", "environment of", "field of", "domain of", "realm of"],
|
52 |
+
r'\bnavigating\b': ["managing", "addressing", "handling", "working through", "dealing with"],
|
53 |
+
r'\bmeticulous\b': ["careful", "thorough", "detailed", "precise", "systematic"],
|
54 |
+
r'\bintricate\b': ["complex", "detailed", "sophisticated", "elaborate", "nuanced"],
|
55 |
+
r'\bmyriad\b': ["numerous", "many", "various", "multiple", "countless"],
|
56 |
+
r'\bplethora\b': ["abundance", "variety", "range", "collection", "wealth"],
|
57 |
+
r'\bparadigm\b': ["model", "framework", "approach", "system", "method"],
|
58 |
+
r'\bsynergy\b': ["collaboration", "cooperation", "coordination", "integration", "teamwork"],
|
59 |
+
r'\bleverage\b': ["utilize", "employ", "use", "apply", "harness"],
|
60 |
+
r'\bfacilitate\b': ["enable", "support", "assist", "help", "promote"],
|
61 |
+
r'\boptimize\b': ["improve", "enhance", "refine", "perfect", "maximize"],
|
62 |
+
r'\bstreamline\b': ["simplify", "improve", "refine", "enhance", "optimize"],
|
63 |
+
r'\brobust\b': ["strong", "reliable", "effective", "solid", "durable"],
|
64 |
+
r'\bseamless\b': ["smooth", "integrated", "unified", "continuous", "fluid"],
|
65 |
+
r'\binnovative\b': ["creative", "original", "novel", "advanced", "groundbreaking"],
|
66 |
+
r'\bcutting-edge\b': ["advanced", "latest", "modern", "current", "state-of-the-art"],
|
67 |
+
r'\bstate-of-the-art\b': ["advanced", "modern", "sophisticated", "current", "latest"]
|
68 |
}
|
69 |
|
70 |
# Transition phrase variations
|
71 |
self.transition_replacements = {
|
72 |
+
r'\bfurthermore\b': ["additionally", "moreover", "in addition", "also", "besides"],
|
73 |
+
r'\bmoreover\b': ["furthermore", "additionally", "also", "in addition", "what's more"],
|
74 |
+
r'\bhowever\b': ["nevertheless", "yet", "still", "although", "but"],
|
75 |
+
r'\bnevertheless\b': ["however", "yet", "still", "nonetheless", "even so"],
|
76 |
+
r'\btherefore\b': ["consequently", "thus", "as a result", "hence", "so"],
|
77 |
+
r'\bconsequently\b': ["therefore", "thus", "as a result", "accordingly", "hence"],
|
78 |
+
r'\bin conclusion\b': ["finally", "ultimately", "in summary", "to summarize", "overall"],
|
79 |
+
r'\bto summarize\b': ["in conclusion", "finally", "in summary", "overall", "in essence"],
|
80 |
+
r'\bin summary\b': ["to conclude", "overall", "finally", "in essence", "ultimately"]
|
81 |
}
|
82 |
|
83 |
# Sentence structure patterns for variation
|
84 |
self.sentence_starters = [
|
85 |
"Additionally,", "Furthermore,", "In particular,", "Notably,",
|
86 |
+
"Importantly,", "Significantly,", "Moreover,", "Consequently,",
|
87 |
+
"Interestingly,", "Specifically,", "Essentially,", "Primarily,"
|
88 |
]
|
89 |
|
90 |
# Professional contractions (limited and contextual)
|
|
|
96 |
r'\bdo not\b': "don't",
|
97 |
r'\bdoes not\b': "doesn't",
|
98 |
r'\bwill not\b': "won't",
|
99 |
+
r'\bwould not\b': "wouldn't",
|
100 |
+
r'\bshould not\b': "shouldn't",
|
101 |
+
r'\bcould not\b': "couldn't"
|
102 |
}
|
103 |
|
104 |
def load_synonym_database(self):
|
|
|
107 |
# Test WordNet availability
|
108 |
wordnet.synsets('test')
|
109 |
self.wordnet_available = True
|
110 |
+
print("WordNet loaded successfully")
|
111 |
except:
|
112 |
self.wordnet_available = False
|
113 |
print("WordNet not available, using limited synonym replacement")
|
|
|
144 |
except:
|
145 |
return word
|
146 |
|
147 |
+
def preserve_meaning_replacement(self, text, intensity_level=1):
|
148 |
"""Replace AI-flagged terms while preserving exact meaning"""
|
149 |
result = text
|
150 |
|
151 |
+
# Determine replacement probability based on intensity
|
152 |
+
replacement_probability = {
|
153 |
+
1: 0.3, # Light
|
154 |
+
2: 0.5, # Standard
|
155 |
+
3: 0.7 # Heavy
|
156 |
+
}
|
157 |
+
|
158 |
+
prob = replacement_probability.get(intensity_level, 0.5)
|
159 |
+
|
160 |
# Apply formal term replacements
|
161 |
for pattern, replacements in self.formal_replacements.items():
|
162 |
+
if re.search(pattern, result, re.IGNORECASE) and random.random() < prob:
|
163 |
replacement = random.choice(replacements)
|
164 |
result = re.sub(pattern, replacement, result, flags=re.IGNORECASE)
|
165 |
|
166 |
# Apply transition phrase replacements
|
167 |
for pattern, replacements in self.transition_replacements.items():
|
168 |
+
if re.search(pattern, result, re.IGNORECASE) and random.random() < prob:
|
169 |
replacement = random.choice(replacements)
|
170 |
result = re.sub(pattern, replacement, result, flags=re.IGNORECASE)
|
171 |
|
172 |
return result
|
173 |
|
174 |
+
def vary_sentence_structure(self, text, intensity_level=1):
|
175 |
"""Vary sentence structures while maintaining meaning"""
|
176 |
sentences = sent_tokenize(text)
|
177 |
varied_sentences = []
|
178 |
|
179 |
+
# Determine variation probability based on intensity
|
180 |
+
variation_probability = {
|
181 |
+
1: 0.1, # Light
|
182 |
+
2: 0.2, # Standard
|
183 |
+
3: 0.3 # Heavy
|
184 |
+
}
|
185 |
+
|
186 |
+
prob = variation_probability.get(intensity_level, 0.2)
|
187 |
+
|
188 |
for i, sentence in enumerate(sentences):
|
189 |
# Occasionally add transitional phrases at the beginning
|
190 |
+
if i > 0 and len(sentence.split()) > 6 and random.random() < prob:
|
191 |
starter = random.choice(self.sentence_starters)
|
192 |
sentence = sentence[0].lower() + sentence[1:]
|
193 |
sentence = f"{starter} {sentence}"
|
194 |
|
195 |
# Convert some passive to active voice and vice versa
|
196 |
+
if random.random() < prob:
|
197 |
+
sentence = self.vary_voice(sentence)
|
198 |
|
199 |
# Restructure complex sentences occasionally
|
200 |
+
if len(sentence.split()) > 15 and random.random() < prob:
|
201 |
sentence = self.restructure_complex_sentence(sentence)
|
202 |
|
203 |
varied_sentences.append(sentence)
|
|
|
215 |
]
|
216 |
|
217 |
for pattern, replacement in passive_patterns:
|
218 |
+
if re.search(pattern, sentence) and random.random() < 0.3:
|
219 |
sentence = re.sub(pattern, replacement, sentence)
|
220 |
break
|
221 |
|
|
|
237 |
|
238 |
return sentence
|
239 |
|
240 |
+
def apply_subtle_contractions(self, text, intensity_level=1):
|
241 |
"""Apply professional contractions sparingly"""
|
242 |
+
# Determine contraction probability based on intensity
|
243 |
+
contraction_probability = {
|
244 |
+
1: 0.2, # Light
|
245 |
+
2: 0.3, # Standard
|
246 |
+
3: 0.4 # Heavy
|
247 |
+
}
|
248 |
+
|
249 |
+
prob = contraction_probability.get(intensity_level, 0.3)
|
250 |
+
|
251 |
for pattern, contraction in self.professional_contractions.items():
|
252 |
+
if re.search(pattern, text, re.IGNORECASE) and random.random() < prob:
|
|
|
253 |
text = re.sub(pattern, contraction, text, flags=re.IGNORECASE)
|
254 |
|
255 |
return text
|
256 |
|
257 |
+
def enhance_vocabulary_diversity(self, text, intensity_level=1):
|
258 |
"""Enhance vocabulary diversity using contextual synonyms"""
|
259 |
words = word_tokenize(text)
|
260 |
enhanced_words = []
|
261 |
word_frequency = defaultdict(int)
|
262 |
|
263 |
+
# Determine synonym probability based on intensity
|
264 |
+
synonym_probability = {
|
265 |
+
1: 0.1, # Light
|
266 |
+
2: 0.2, # Standard
|
267 |
+
3: 0.3 # Heavy
|
268 |
+
}
|
269 |
+
|
270 |
+
prob = synonym_probability.get(intensity_level, 0.2)
|
271 |
+
|
272 |
# Track word frequency to identify repetitive words
|
273 |
for word in words:
|
274 |
if word.isalpha() and len(word) > 4:
|
|
|
277 |
for word in words:
|
278 |
if (word.isalpha() and len(word) > 4 and
|
279 |
word_frequency[word.lower()] > 1 and
|
280 |
+
random.random() < prob):
|
281 |
|
282 |
synonym = self.get_contextual_synonym(word)
|
283 |
enhanced_words.append(synonym)
|
|
|
286 |
|
287 |
return ' '.join(enhanced_words)
|
288 |
|
289 |
+
def add_natural_variation(self, text, intensity_level=1):
|
290 |
"""Add natural human-like variations"""
|
291 |
sentences = sent_tokenize(text)
|
292 |
varied_sentences = []
|
293 |
|
294 |
+
# Determine variation probability based on intensity
|
295 |
+
variation_probability = {
|
296 |
+
1: 0.05, # Light
|
297 |
+
2: 0.15, # Standard
|
298 |
+
3: 0.25 # Heavy
|
299 |
+
}
|
300 |
+
|
301 |
+
prob = variation_probability.get(intensity_level, 0.15)
|
302 |
+
|
303 |
for sentence in sentences:
|
304 |
# Occasionally vary sentence length and structure
|
305 |
+
if len(sentence.split()) > 20 and random.random() < prob:
|
306 |
# Split very long sentences
|
307 |
mid_point = len(sentence.split()) // 2
|
308 |
words = sentence.split()
|
|
|
316 |
break
|
317 |
|
318 |
# Add subtle emphasis occasionally
|
319 |
+
if random.random() < prob:
|
320 |
sentence = self.add_subtle_emphasis(sentence)
|
321 |
|
322 |
varied_sentences.append(sentence)
|
|
|
329 |
(r'\bvery important\b', "crucial"),
|
330 |
(r'\bvery significant\b', "highly significant"),
|
331 |
(r'\bvery effective\b', "highly effective"),
|
332 |
+
(r'\bvery useful\b', "particularly useful"),
|
333 |
+
(r'\bvery good\b', "excellent"),
|
334 |
+
(r'\bvery bad\b', "poor")
|
335 |
]
|
336 |
|
337 |
for pattern, replacement in emphasis_patterns:
|
|
|
365 |
|
366 |
return text.strip()
|
367 |
|
368 |
+
def advanced_humanize(self, text, intensity_level=1):
|
369 |
"""Apply sophisticated humanization that preserves meaning"""
|
370 |
current_text = text
|
371 |
|
372 |
+
print(f"Processing with intensity level: {intensity_level}")
|
373 |
+
|
374 |
+
# Apply humanization techniques with intensity-based parameters
|
375 |
+
current_text = self.preserve_meaning_replacement(current_text, intensity_level)
|
376 |
+
current_text = self.vary_sentence_structure(current_text, intensity_level)
|
377 |
+
current_text = self.enhance_vocabulary_diversity(current_text, intensity_level)
|
378 |
+
current_text = self.apply_subtle_contractions(current_text, intensity_level)
|
379 |
+
current_text = self.add_natural_variation(current_text, intensity_level)
|
380 |
+
|
|
|
381 |
# Final coherence and cleanup
|
382 |
current_text = self.final_coherence_check(current_text)
|
383 |
|
|
|
396 |
except Exception as e:
|
397 |
return f"Could not calculate readability: {str(e)}"
|
398 |
|
399 |
+
def humanize_text(self, text, intensity="standard"):
|
400 |
"""Main humanization method with meaning preservation"""
|
401 |
if not text or not text.strip():
|
402 |
return "Please provide text to humanize."
|
|
|
412 |
except Exception as nltk_error:
|
413 |
return f"NLTK Error: {str(nltk_error)}. Please try again."
|
414 |
|
415 |
+
# Map intensity to numeric levels
|
416 |
+
intensity_mapping = {
|
417 |
+
"light": 1,
|
418 |
+
"standard": 2,
|
419 |
+
"heavy": 3
|
420 |
+
}
|
421 |
+
|
422 |
+
intensity_level = intensity_mapping.get(intensity, 2)
|
423 |
+
print(f"Using intensity: {intensity} (level {intensity_level})")
|
424 |
+
|
425 |
+
# Apply humanization
|
426 |
+
result = self.advanced_humanize(text, intensity_level)
|
427 |
|
428 |
return result
|
429 |
|
|
|
505 |
|
506 |
intensity = gr.Radio(
|
507 |
choices=[
|
508 |
+
("Light Processing (30% changes)", "light"),
|
509 |
+
("Standard Processing (50% changes)", "standard"),
|
510 |
+
("Heavy Processing (70% changes)", "heavy")
|
511 |
],
|
512 |
+
value="standard",
|
513 |
+
label="π§ Processing Intensity",
|
514 |
+
info="Choose how extensively to humanize the content"
|
515 |
)
|
516 |
|
517 |
btn = gr.Button(
|
|
|
536 |
|
537 |
gr.HTML("""
|
538 |
<div class="feature-box">
|
539 |
+
<h3>π― Processing Intensity Levels:</h3>
|
540 |
+
<div style="display: grid; grid-template-columns: repeat(auto-fit, minmax(300px, 1fr)); gap: 15px; margin: 15px 0;">
|
541 |
+
<div class="info-box">
|
542 |
+
<strong>π’ Light Processing (30%):</strong><br>
|
543 |
+
β’ Minimal word replacements<br>
|
544 |
+
β’ Basic sentence variation<br>
|
545 |
+
β’ Subtle changes only<br>
|
546 |
+
β’ Best for: Already human-like content
|
547 |
+
</div>
|
548 |
+
<div class="info-box">
|
549 |
+
<strong>π‘ Standard Processing (50%):</strong><br>
|
550 |
+
β’ Moderate humanization<br>
|
551 |
+
β’ Balanced approach<br>
|
552 |
+
β’ Professional tone maintained<br>
|
553 |
+
β’ Best for: Most AI-generated content
|
554 |
+
</div>
|
555 |
+
<div class="info-box">
|
556 |
+
<strong>π΄ Heavy Processing (70%):</strong><br>
|
557 |
+
β’ Extensive modifications<br>
|
558 |
+
β’ Maximum variation<br>
|
559 |
+
β’ Strong AI detection bypass<br>
|
560 |
+
β’ Best for: Highly detectable AI text
|
561 |
+
</div>
|
562 |
+
</div>
|
563 |
+
</div>
|
564 |
+
""")
|
565 |
+
|
566 |
+
gr.HTML("""
|
567 |
+
<div class="feature-box">
|
568 |
+
<h3>π Advanced Humanization Features:</h3>
|
569 |
<div style="display: grid; grid-template-columns: repeat(auto-fit, minmax(250px, 1fr)); gap: 15px; margin: 15px 0;">
|
570 |
<div class="info-box">
|
571 |
<strong>π Meaning Preservation:</strong><br>
|