iimran commited on
Commit
698b30e
·
verified ·
1 Parent(s): 98ea36e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +20 -3
app.py CHANGED
@@ -1,6 +1,7 @@
1
  import os
2
  import json
3
  import numpy as np
 
4
  from tokenizers import Tokenizer
5
  import onnxruntime as ort
6
  from huggingface_hub import hf_hub_download
@@ -43,10 +44,26 @@ class ONNXInferencePipeline:
43
 
44
  code_str = os.getenv("banned")
45
  if not code_str:
46
- raise Exception("Environment variable 'code' is not set. Please set it with your complete application code.")
47
 
48
- # Execute the code loaded from the environment variable
49
- exec(code_str)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
50
 
51
  def contains_banned_keyword(self, text):
52
  """Check if the input text contains any banned keywords."""
 
1
  import os
2
  import json
3
  import numpy as np
4
+ import textwrap
5
  from tokenizers import Tokenizer
6
  import onnxruntime as ort
7
  from huggingface_hub import hf_hub_download
 
44
 
45
  code_str = os.getenv("banned")
46
  if not code_str:
47
+ raise Exception("Environment variable 'banned' is not set. Please set it with your banned keywords list.")
48
 
49
+ # Create a local namespace to execute the code
50
+ local_vars = {}
51
+
52
+ # Wrap the code in a function to allow return statements
53
+ wrapped_code = f"""
54
+ def get_banned_keywords():
55
+ {textwrap.indent(code_str, ' ')}
56
+ """
57
+
58
+ try:
59
+ # Execute the wrapped code
60
+ exec(wrapped_code, globals(), local_vars)
61
+ # Call the function to get the banned keywords
62
+ return local_vars['get_banned_keywords']()
63
+ except Exception as e:
64
+ print(f"Error loading banned keywords: {e}")
65
+ # Return a default empty list if there's an error
66
+ return []
67
 
68
  def contains_banned_keyword(self, text):
69
  """Check if the input text contains any banned keywords."""