{ "nbformat": 4, "nbformat_minor": 0, "metadata": { "colab": { "provenance": [] }, "kernelspec": { "name": "python3", "display_name": "Python 3" }, "language_info": { "name": "python" } }, "cells": [ { "cell_type": "markdown", "source": [ "# Install Dependencies" ], "metadata": { "id": "39AMoCOa1ckc" } }, { "metadata": { "id": "VoHxuLPu7s37" }, "cell_type": "code", "source": [ "! wget -q https://github.com/protocolbuffers/protobuf/releases/download/v3.19.0/protoc-3.19.0-linux-x86_64.zip\n", "! unzip -o protoc-3.19.0-linux-x86_64.zip -d /usr/local/" ], "outputs": [], "execution_count": null }, { "cell_type": "markdown", "source": [ "## Install LiteRT Pipeline" ], "metadata": { "id": "qGAaAKzYK5ei" } }, { "cell_type": "code", "source": [ "!pip install git+https://github.com/google-ai-edge/ai-edge-apis.git#subdirectory=litert_tools" ], "metadata": { "id": "43tAeO0AZ7zp" }, "execution_count": null, "outputs": [] }, { "cell_type": "markdown", "source": [ "# Create Pipeline from model file" ], "metadata": { "id": "K5okZCTgYpUd" } }, { "cell_type": "code", "source": [ "from litert_tools.pipeline import pipeline\n", "runner = pipeline.load(\"litert-community/Hammer2.1-1.5b\", \"Hammer2.1-1.5b_seq128_q8_ekv1280.task\")" ], "metadata": { "id": "3t47HAG2tvc3" }, "execution_count": null, "outputs": [] }, { "cell_type": "markdown", "source": [ "# Generate text from model" ], "metadata": { "id": "dASKx_JtYXwe" } }, { "cell_type": "code", "source": [ "# Disclaimer: Model performance demonstrated with the Python API in this notebook is not representative of performance on a local device.\n", "prompt = \"What is the capital of France?\"\n", "output = runner.generate(prompt, max_decode_steps=None)" ], "metadata": { "id": "wT9BIiATkjzL" }, "execution_count": null, "outputs": [] } ] }