import gradio as gr from transformers import AutoModelForCausalLM, AutoTokenizer model_name = 'deepseek-ai/deepseek-coder-33b-instruct' tokenizer = AutoTokenizer.from_pretrained(model_name) model = AutoModelForCausalLM.from_pretrained(model_name) input_ids = tokenizer.encode(input_text, return_tensors="pt") kwargs = { "max_length": 500, "num_return_sequences": 1, "temperature": 0.7, "top_k": 50 } # Generate text output_ids = model.generate(input_ids, **kwargs) # Decode and print the output output_text = tokenizer.decode(output_ids[0], skip_special_tokens=True) print(output_text)