import gradio as gr from transformers import LlamaForConditionalGeneration, LlamaTokenizer # Load pre-trained model and tokenizer model_name = "meta-ai/llama-3.2-3b-instruct" model = LlamaForConditionalGeneration.from_pretrained(model_name) tokenizer = LlamaTokenizer.from_pretrained(model_name) def evaluate_password_strength(password): # Use the Llama model to evaluate the password strength input_text = f"Rate the strength of the password: {password}" inputs = tokenizer(input_text, return_tensors="pt") output = model.generate(**inputs) response = tokenizer.decode(output[0], skip_special_tokens=True) return response demo = gr.Interface( evaluate_password_strength, gr.Textbox(label="Enter your password"), gr.Textbox(label="Password Strength Evaluation"), title="Password Strength Evaluator", description="Get the AI's evaluation of your password strength.", ) if __name__ == "__main__": demo.launch()