|
streamlit==1.31.0 |
|
# π¨ Build interactive web apps: st.title("Hello"), st.button("Click"), st.image("pic.png") |
|
# π Display data: st.dataframe(df), st.plotly_chart(fig), st.write("Text") |
|
# π Create UI: st.slider("Range", 0, 100), st.selectbox("Choose", options), st.file_uploader("Upload") |
|
|
|
asyncio==3.4.3 |
|
# π Run async tasks: await asyncio.sleep(1), asyncio.run(main()), asyncio.gather(*tasks) |
|
# π Handle coroutines: async def fetch(), await coro(), asyncio.create_task(func()) |
|
# β³ Manage event loops: loop = asyncio.get_event_loop(), loop.run_until_complete(), asyncio.ensure_future() |
|
|
|
websockets==12.0 |
|
# π WebSocket server: async with websockets.serve(handler, "localhost", 8765): await asyncio.Future() |
|
# π‘ Client connect: async with websockets.connect("ws://localhost:8765") as ws: await ws.send("Hi") |
|
# π Real-time comms: async for msg in ws: print(msg), await ws.recv(), ws.send("Response") |
|
|
|
pillow==10.2.0 |
|
# πΌοΈ Load images: img = Image.open("file.png"), img.resize((100, 100)), img.save("out.png") |
|
# π¨ Process images: img.convert("RGB"), ImageDraw.Draw(img).text((10, 10), "Hello"), img.rotate(90) |
|
# πΈ Manipulate: img.crop((0, 0, 50, 50)), img.filter(ImageFilter.BLUR), img.thumbnail((50, 50)) |
|
|
|
edge_tts==6.1.12 |
|
# ποΈ Text-to-speech: comm = edge_tts.Communicate("Hello", "en-US-AriaNeural"), await comm.save("out.mp3") |
|
# π Customize voice: edge_tts.Communicate(text, voice="en-GB-SoniaNeural", rate="+10%"), await comm.save() |
|
# π’ Generate audio: async def speak(text): await edge_tts.Communicate(text, "en-US-GuyNeural").save("file.mp3") |
|
|
|
audio-recorder-streamlit==0.0.8 |
|
# π€ Record audio: audio_bytes = audio_recorder(), st.audio(audio_bytes, format="audio/wav") |
|
# π Capture voice: if audio_bytes: process_audio(audio_bytes), st.write("Recording...") |
|
# π₯ Save recording: with open("recording.wav", "wb") as f: f.write(audio_recorder()) |
|
|
|
nest_asyncio==1.6.0 |
|
# π§ Fix nested loops: nest_asyncio.apply(), asyncio.run(main()), loop.run_until_complete(coro()) |
|
# π Enable async in sync: nest_asyncio.apply() before asyncio.run() in Jupyter/Streamlit |
|
# β³ Patch event loop: import nest_asyncio; nest_asyncio.apply() for multiple event loops |
|
|
|
streamlit-paste-button==0.1.1 |
|
# π Paste images: result = paste_image_button("Paste"), if result.image_data: st.image(result.image_data) |
|
# βοΈ Capture clipboard: paste_data = paste_image_button("Click"), st.write(paste_data.text_data) |
|
# πΌοΈ Process paste: if paste_image_button("Paste").image_data: save_image(result.image_data) |
|
|
|
pypdf2==3.0.1 |
|
# π Read PDFs: reader = PdfReader("file.pdf"), text = reader.pages[0].extract_text(), num_pages = len(reader.pages) |
|
# π Extract text: for page in PdfReader("doc.pdf").pages: st.write(page.extract_text()) |
|
# π Parse PDF: pdf = PdfReader(open("file.pdf", "rb")), st.write(pdf.metadata), page_count = len(pdf.pages) |
|
|
|
anthropic==0.34.2 |
|
# π€ AI responses: client = anthropic.Anthropic(api_key="key"), resp = client.messages.create(model="claude-3", messages=[...]) |
|
# π’ Claude chat: msg = client.messages.create(model="claude-3-sonnet", max_tokens=1000, messages=[{"role": "user", "content": "Hi"}]) |
|
# π Query AI: response = client.completions.create(model="claude-2", prompt="Hello", max_tokens_to_sample=512) |
|
|
|
openai==1.45.0 |
|
# π§ GPT queries: client = openai.OpenAI(api_key="key"), resp = client.chat.completions.create(model="gpt-4", messages=[...]) |
|
# π Generate text: completion = openai.Completion.create(model="text-davinci-003", prompt="Write a story", max_tokens=100) |
|
# π AI chat: chat = client.chat.completions.create(model="gpt-3.5-turbo", messages=[{"role": "user", "content": "Hello"}]) |
|
|
|
gradio-client==1.3.0 |
|
# π Call Gradio APIs: client = Client("user/repo"), result = client.predict("input", api_name="/predict") |
|
# π‘ Remote inference: prediction = Client("awacke1/Arxiv-Paper-Search").predict("query", 10, "search", "model") |
|
# π Fetch results: resp = client.predict(param1, param2, api_name="/endpoint"), st.write(resp) |
|
|
|
python-dotenv==1.0.1 |
|
# π Load env vars: load_dotenv(), api_key = os.getenv("API_KEY"), st.write(os.getenv("SECRET")) |
|
# π Config from .env: from dotenv import load_dotenv; load_dotenv(); db_url = os.getenv("DB_URL") |
|
# π Secure secrets: load_dotenv(".env"), token = os.getenv("TOKEN"), client = SomeClient(token) |
|
|
|
streamlit-marquee==0.1.0 |
|
# π₯ Scrolling text: streamlit_marquee(content="Hello", background="#000", color="#FFF", font-size="20px") |
|
# π£ Dynamic marquee: streamlit_marquee(content="News", animationDuration="10s", width="100%") |
|
# π Styled ticker: streamlit_marquee(content="Update", background="#1E1E1E", lineHeight="30px") |