DevBhojani commited on
Commit
68becce
·
verified ·
1 Parent(s): 9cd15ff

Upload finalProject.ipynb

Browse files
Files changed (1) hide show
  1. finalProject.ipynb +301 -0
finalProject.ipynb ADDED
@@ -0,0 +1,301 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": 1,
6
+ "id": "f8489e29",
7
+ "metadata": {},
8
+ "outputs": [
9
+ {
10
+ "name": "stderr",
11
+ "output_type": "stream",
12
+ "text": [
13
+ "C:\\Users\\Dev\\AppData\\Local\\Packages\\PythonSoftwareFoundation.Python.3.11_qbz5n2kfra8p0\\LocalCache\\local-packages\\Python311\\site-packages\\tqdm\\auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n",
14
+ " from .autonotebook import tqdm as notebook_tqdm\n",
15
+ "C:\\Users\\Dev\\AppData\\Local\\Packages\\PythonSoftwareFoundation.Python.3.11_qbz5n2kfra8p0\\LocalCache\\local-packages\\Python311\\site-packages\\sklearn\\base.py:442: InconsistentVersionWarning: Trying to unpickle estimator DecisionTreeClassifier from version 1.6.1 when using version 1.7.1. This might lead to breaking code or invalid results. Use at your own risk. For more info please refer to:\n",
16
+ "https://scikit-learn.org/stable/model_persistence.html#security-maintainability-limitations\n",
17
+ " warnings.warn(\n",
18
+ "C:\\Users\\Dev\\AppData\\Local\\Packages\\PythonSoftwareFoundation.Python.3.11_qbz5n2kfra8p0\\LocalCache\\local-packages\\Python311\\site-packages\\sklearn\\base.py:442: InconsistentVersionWarning: Trying to unpickle estimator RandomForestClassifier from version 1.6.1 when using version 1.7.1. This might lead to breaking code or invalid results. Use at your own risk. For more info please refer to:\n",
19
+ "https://scikit-learn.org/stable/model_persistence.html#security-maintainability-limitations\n",
20
+ " warnings.warn(\n"
21
+ ]
22
+ }
23
+ ],
24
+ "source": [
25
+ "from huggingface_hub import hf_hub_download\n",
26
+ "import joblib\n",
27
+ "\n",
28
+ "repo_id = \"DevBhojani/Classification-SamsumDataset\"\n",
29
+ "model_filename = \"random_forest_classifier_model.joblib\"\n",
30
+ "\n",
31
+ "model_path = hf_hub_download(repo_id=repo_id, filename=model_filename)\n",
32
+ "loaded_classifier_model = joblib.load(model_path)"
33
+ ]
34
+ },
35
+ {
36
+ "cell_type": "code",
37
+ "execution_count": 2,
38
+ "id": "f5326e44",
39
+ "metadata": {},
40
+ "outputs": [
41
+ {
42
+ "name": "stderr",
43
+ "output_type": "stream",
44
+ "text": [
45
+ "C:\\Users\\Dev\\AppData\\Local\\Packages\\PythonSoftwareFoundation.Python.3.11_qbz5n2kfra8p0\\LocalCache\\local-packages\\Python311\\site-packages\\sklearn\\base.py:442: InconsistentVersionWarning: Trying to unpickle estimator TfidfTransformer from version 1.6.1 when using version 1.7.1. This might lead to breaking code or invalid results. Use at your own risk. For more info please refer to:\n",
46
+ "https://scikit-learn.org/stable/model_persistence.html#security-maintainability-limitations\n",
47
+ " warnings.warn(\n",
48
+ "C:\\Users\\Dev\\AppData\\Local\\Packages\\PythonSoftwareFoundation.Python.3.11_qbz5n2kfra8p0\\LocalCache\\local-packages\\Python311\\site-packages\\sklearn\\base.py:442: InconsistentVersionWarning: Trying to unpickle estimator TfidfVectorizer from version 1.6.1 when using version 1.7.1. This might lead to breaking code or invalid results. Use at your own risk. For more info please refer to:\n",
49
+ "https://scikit-learn.org/stable/model_persistence.html#security-maintainability-limitations\n",
50
+ " warnings.warn(\n"
51
+ ]
52
+ }
53
+ ],
54
+ "source": [
55
+ "import joblib\n",
56
+ "from sklearn.feature_extraction.text import TfidfVectorizer\n",
57
+ "\n",
58
+ "repo_id = \"DevBhojani/Classification-SamsumDataset\"\n",
59
+ "model_filename = \"random_forest_classifier_model.joblib\"\n",
60
+ "vectorizer_filename = \"tfidf_vectorizer.joblib\"\n",
61
+ "\n",
62
+ "model_path = hf_hub_download(repo_id=repo_id, filename=model_filename)\n",
63
+ "vectorizer_path = hf_hub_download(repo_id=repo_id, filename=vectorizer_filename)\n",
64
+ "\n",
65
+ "loaded_classifier_model = joblib.load(model_path)\n",
66
+ "loaded_tfidf_vectorizer = joblib.load(vectorizer_path)"
67
+ ]
68
+ },
69
+ {
70
+ "cell_type": "code",
71
+ "execution_count": 3,
72
+ "id": "e47f08e6",
73
+ "metadata": {},
74
+ "outputs": [
75
+ {
76
+ "ename": "ModuleNotFoundError",
77
+ "evalue": "Could not import module 'pipeline'. Are this object's requirements defined correctly?",
78
+ "output_type": "error",
79
+ "traceback": [
80
+ "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
81
+ "\u001b[1;31mRuntimeError\u001b[0m Traceback (most recent call last)",
82
+ "File \u001b[1;32m~\\AppData\\Local\\Packages\\PythonSoftwareFoundation.Python.3.11_qbz5n2kfra8p0\\LocalCache\\local-packages\\Python311\\site-packages\\transformers\\utils\\import_utils.py:2154\u001b[0m, in \u001b[0;36m_LazyModule.__getattr__\u001b[1;34m(self, name)\u001b[0m\n\u001b[0;32m 2153\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m-> 2154\u001b[0m module \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_get_module\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_class_to_module\u001b[49m\u001b[43m[\u001b[49m\u001b[43mname\u001b[49m\u001b[43m]\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 2155\u001b[0m value \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mgetattr\u001b[39m(module, name)\n",
83
+ "File \u001b[1;32m~\\AppData\\Local\\Packages\\PythonSoftwareFoundation.Python.3.11_qbz5n2kfra8p0\\LocalCache\\local-packages\\Python311\\site-packages\\transformers\\utils\\import_utils.py:2184\u001b[0m, in \u001b[0;36m_LazyModule._get_module\u001b[1;34m(self, module_name)\u001b[0m\n\u001b[0;32m 2183\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mException\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m e:\n\u001b[1;32m-> 2184\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m e\n",
84
+ "File \u001b[1;32m~\\AppData\\Local\\Packages\\PythonSoftwareFoundation.Python.3.11_qbz5n2kfra8p0\\LocalCache\\local-packages\\Python311\\site-packages\\transformers\\utils\\import_utils.py:2182\u001b[0m, in \u001b[0;36m_LazyModule._get_module\u001b[1;34m(self, module_name)\u001b[0m\n\u001b[0;32m 2181\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m-> 2182\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mimportlib\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mimport_module\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43m.\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m \u001b[49m\u001b[38;5;241;43m+\u001b[39;49m\u001b[43m \u001b[49m\u001b[43mmodule_name\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[38;5;18;43m__name__\u001b[39;49m\u001b[43m)\u001b[49m\n\u001b[0;32m 2183\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mException\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m e:\n",
85
+ "File \u001b[1;32mC:\\Program Files\\WindowsApps\\PythonSoftwareFoundation.Python.3.11_3.11.2544.0_x64__qbz5n2kfra8p0\\Lib\\importlib\\__init__.py:126\u001b[0m, in \u001b[0;36mimport_module\u001b[1;34m(name, package)\u001b[0m\n\u001b[0;32m 125\u001b[0m level \u001b[38;5;241m+\u001b[39m\u001b[38;5;241m=\u001b[39m \u001b[38;5;241m1\u001b[39m\n\u001b[1;32m--> 126\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43m_bootstrap\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_gcd_import\u001b[49m\u001b[43m(\u001b[49m\u001b[43mname\u001b[49m\u001b[43m[\u001b[49m\u001b[43mlevel\u001b[49m\u001b[43m:\u001b[49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mpackage\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mlevel\u001b[49m\u001b[43m)\u001b[49m\n",
86
+ "File \u001b[1;32m<frozen importlib._bootstrap>:1204\u001b[0m, in \u001b[0;36m_gcd_import\u001b[1;34m(name, package, level)\u001b[0m\n",
87
+ "File \u001b[1;32m<frozen importlib._bootstrap>:1176\u001b[0m, in \u001b[0;36m_find_and_load\u001b[1;34m(name, import_)\u001b[0m\n",
88
+ "File \u001b[1;32m<frozen importlib._bootstrap>:1147\u001b[0m, in \u001b[0;36m_find_and_load_unlocked\u001b[1;34m(name, import_)\u001b[0m\n",
89
+ "File \u001b[1;32m<frozen importlib._bootstrap>:690\u001b[0m, in \u001b[0;36m_load_unlocked\u001b[1;34m(spec)\u001b[0m\n",
90
+ "File \u001b[1;32m<frozen importlib._bootstrap_external>:940\u001b[0m, in \u001b[0;36mexec_module\u001b[1;34m(self, module)\u001b[0m\n",
91
+ "File \u001b[1;32m<frozen importlib._bootstrap>:241\u001b[0m, in \u001b[0;36m_call_with_frames_removed\u001b[1;34m(f, *args, **kwds)\u001b[0m\n",
92
+ "File \u001b[1;32m~\\AppData\\Local\\Packages\\PythonSoftwareFoundation.Python.3.11_qbz5n2kfra8p0\\LocalCache\\local-packages\\Python311\\site-packages\\transformers\\pipelines\\__init__.py:26\u001b[0m\n\u001b[0;32m 25\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mfeature_extraction_utils\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m PreTrainedFeatureExtractor\n\u001b[1;32m---> 26\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mimage_processing_utils\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m BaseImageProcessor\n\u001b[0;32m 27\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mmodels\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mauto\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mconfiguration_auto\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m AutoConfig\n",
93
+ "File \u001b[1;32m~\\AppData\\Local\\Packages\\PythonSoftwareFoundation.Python.3.11_qbz5n2kfra8p0\\LocalCache\\local-packages\\Python311\\site-packages\\transformers\\image_processing_utils.py:22\u001b[0m\n\u001b[0;32m 21\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mimage_processing_base\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m BatchFeature, ImageProcessingMixin\n\u001b[1;32m---> 22\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mimage_transforms\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m center_crop, normalize, rescale\n\u001b[0;32m 23\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mimage_utils\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m ChannelDimension, get_image_size\n",
94
+ "File \u001b[1;32m~\\AppData\\Local\\Packages\\PythonSoftwareFoundation.Python.3.11_qbz5n2kfra8p0\\LocalCache\\local-packages\\Python311\\site-packages\\transformers\\image_transforms.py:22\u001b[0m\n\u001b[0;32m 20\u001b[0m \u001b[38;5;28;01mimport\u001b[39;00m \u001b[38;5;21;01mnumpy\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m \u001b[38;5;21;01mnp\u001b[39;00m\n\u001b[1;32m---> 22\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mimage_utils\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m (\n\u001b[0;32m 23\u001b[0m ChannelDimension,\n\u001b[0;32m 24\u001b[0m ImageInput,\n\u001b[0;32m 25\u001b[0m get_channel_dimension_axis,\n\u001b[0;32m 26\u001b[0m get_image_size,\n\u001b[0;32m 27\u001b[0m infer_channel_dimension_format,\n\u001b[0;32m 28\u001b[0m )\n\u001b[0;32m 29\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mutils\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m ExplicitEnum, TensorType, is_jax_tensor, is_tf_tensor, is_torch_tensor\n",
95
+ "File \u001b[1;32m~\\AppData\\Local\\Packages\\PythonSoftwareFoundation.Python.3.11_qbz5n2kfra8p0\\LocalCache\\local-packages\\Python311\\site-packages\\transformers\\image_utils.py:59\u001b[0m\n\u001b[0;32m 58\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m is_torchvision_available():\n\u001b[1;32m---> 59\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mtorchvision\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mtransforms\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m InterpolationMode\n\u001b[0;32m 61\u001b[0m pil_torch_interpolation_mapping \u001b[38;5;241m=\u001b[39m {\n\u001b[0;32m 62\u001b[0m PILImageResampling\u001b[38;5;241m.\u001b[39mNEAREST: InterpolationMode\u001b[38;5;241m.\u001b[39mNEAREST_EXACT,\n\u001b[0;32m 63\u001b[0m PILImageResampling\u001b[38;5;241m.\u001b[39mBOX: InterpolationMode\u001b[38;5;241m.\u001b[39mBOX,\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 67\u001b[0m PILImageResampling\u001b[38;5;241m.\u001b[39mLANCZOS: InterpolationMode\u001b[38;5;241m.\u001b[39mLANCZOS,\n\u001b[0;32m 68\u001b[0m }\n",
96
+ "File \u001b[1;32m~\\AppData\\Local\\Packages\\PythonSoftwareFoundation.Python.3.11_qbz5n2kfra8p0\\LocalCache\\local-packages\\Python311\\site-packages\\torchvision\\__init__.py:6\u001b[0m\n\u001b[0;32m 5\u001b[0m \u001b[38;5;28;01mimport\u001b[39;00m \u001b[38;5;21;01mtorch\u001b[39;00m\n\u001b[1;32m----> 6\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mtorchvision\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m _meta_registrations, datasets, io, models, ops, transforms, utils\n\u001b[0;32m 8\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mextension\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m _HAS_OPS\n",
97
+ "File \u001b[1;32m~\\AppData\\Local\\Packages\\PythonSoftwareFoundation.Python.3.11_qbz5n2kfra8p0\\LocalCache\\local-packages\\Python311\\site-packages\\torchvision\\_meta_registrations.py:163\u001b[0m\n\u001b[0;32m 160\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m grad\u001b[38;5;241m.\u001b[39mnew_empty((batch_size, channels, height, width))\n\u001b[1;32m--> 163\u001b[0m \u001b[38;5;129;43m@torch\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_custom_ops\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mimpl_abstract\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mtorchvision::nms\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m)\u001b[49m\n\u001b[0;32m 164\u001b[0m \u001b[38;5;28;43;01mdef\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[38;5;21;43mmeta_nms\u001b[39;49m\u001b[43m(\u001b[49m\u001b[43mdets\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mscores\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43miou_threshold\u001b[49m\u001b[43m)\u001b[49m\u001b[43m:\u001b[49m\n\u001b[0;32m 165\u001b[0m \u001b[43m \u001b[49m\u001b[43mtorch\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_check\u001b[49m\u001b[43m(\u001b[49m\u001b[43mdets\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mdim\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m==\u001b[39;49m\u001b[43m \u001b[49m\u001b[38;5;241;43m2\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mlambda\u001b[39;49;00m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43mf\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mboxes should be a 2d tensor, got \u001b[39;49m\u001b[38;5;132;43;01m{\u001b[39;49;00m\u001b[43mdets\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mdim\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\u001b[38;5;132;43;01m}\u001b[39;49;00m\u001b[38;5;124;43mD\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m)\u001b[49m\n",
98
+ "File \u001b[1;32m~\\AppData\\Local\\Packages\\PythonSoftwareFoundation.Python.3.11_qbz5n2kfra8p0\\LocalCache\\local-packages\\Python311\\site-packages\\torch\\library.py:1023\u001b[0m, in \u001b[0;36mregister_fake.<locals>.register\u001b[1;34m(func)\u001b[0m\n\u001b[0;32m 1022\u001b[0m use_lib \u001b[38;5;241m=\u001b[39m lib\n\u001b[1;32m-> 1023\u001b[0m \u001b[43muse_lib\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_register_fake\u001b[49m\u001b[43m(\u001b[49m\u001b[43mop_name\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mfunc\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43m_stacklevel\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mstacklevel\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m+\u001b[39;49m\u001b[43m \u001b[49m\u001b[38;5;241;43m1\u001b[39;49m\u001b[43m)\u001b[49m\n\u001b[0;32m 1024\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m func\n",
99
+ "File \u001b[1;32m~\\AppData\\Local\\Packages\\PythonSoftwareFoundation.Python.3.11_qbz5n2kfra8p0\\LocalCache\\local-packages\\Python311\\site-packages\\torch\\library.py:214\u001b[0m, in \u001b[0;36mLibrary._register_fake\u001b[1;34m(self, op_name, fn, _stacklevel)\u001b[0m\n\u001b[0;32m 212\u001b[0m func_to_register \u001b[38;5;241m=\u001b[39m fn\n\u001b[1;32m--> 214\u001b[0m handle \u001b[38;5;241m=\u001b[39m \u001b[43mentry\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mfake_impl\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mregister\u001b[49m\u001b[43m(\u001b[49m\u001b[43mfunc_to_register\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43msource\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 215\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_registration_handles\u001b[38;5;241m.\u001b[39mappend(handle)\n",
100
+ "File \u001b[1;32m~\\AppData\\Local\\Packages\\PythonSoftwareFoundation.Python.3.11_qbz5n2kfra8p0\\LocalCache\\local-packages\\Python311\\site-packages\\torch\\_library\\fake_impl.py:31\u001b[0m, in \u001b[0;36mFakeImplHolder.register\u001b[1;34m(self, func, source)\u001b[0m\n\u001b[0;32m 26\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mRuntimeError\u001b[39;00m(\n\u001b[0;32m 27\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mregister_fake(...): the operator \u001b[39m\u001b[38;5;132;01m{\u001b[39;00m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mqualname\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m \u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m 28\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124malready has an fake impl registered at \u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m 29\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;132;01m{\u001b[39;00m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mkernel\u001b[38;5;241m.\u001b[39msource\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m.\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m 30\u001b[0m )\n\u001b[1;32m---> 31\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[43mtorch\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_C\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_dispatch_has_kernel_for_dispatch_key\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mqualname\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mMeta\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m)\u001b[49m:\n\u001b[0;32m 32\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mRuntimeError\u001b[39;00m(\n\u001b[0;32m 33\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mregister_fake(...): the operator \u001b[39m\u001b[38;5;132;01m{\u001b[39;00m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mqualname\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m \u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m 34\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124malready has an DispatchKey::Meta implementation via a \u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 37\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mregister_fake.\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m 38\u001b[0m )\n",
101
+ "\u001b[1;31mRuntimeError\u001b[0m: operator torchvision::nms does not exist",
102
+ "\nThe above exception was the direct cause of the following exception:\n",
103
+ "\u001b[1;31mModuleNotFoundError\u001b[0m Traceback (most recent call last)",
104
+ "Cell \u001b[1;32mIn[3], line 2\u001b[0m\n\u001b[0;32m 1\u001b[0m \u001b[38;5;28;01mimport\u001b[39;00m \u001b[38;5;21;01mgradio\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m \u001b[38;5;21;01mgr\u001b[39;00m\n\u001b[1;32m----> 2\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mtransformers\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m pipeline, AutoTokenizer\n\u001b[0;32m 3\u001b[0m \u001b[38;5;28;01mimport\u001b[39;00m \u001b[38;5;21;01mre\u001b[39;00m\n\u001b[0;32m 4\u001b[0m \u001b[38;5;28;01mimport\u001b[39;00m \u001b[38;5;21;01mcontractions\u001b[39;00m\n",
105
+ "File \u001b[1;32m~\\AppData\\Local\\Packages\\PythonSoftwareFoundation.Python.3.11_qbz5n2kfra8p0\\LocalCache\\local-packages\\Python311\\site-packages\\transformers\\utils\\import_utils.py:2157\u001b[0m, in \u001b[0;36m_LazyModule.__getattr__\u001b[1;34m(self, name)\u001b[0m\n\u001b[0;32m 2155\u001b[0m value \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mgetattr\u001b[39m(module, name)\n\u001b[0;32m 2156\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m (\u001b[38;5;167;01mModuleNotFoundError\u001b[39;00m, \u001b[38;5;167;01mRuntimeError\u001b[39;00m) \u001b[38;5;28;01mas\u001b[39;00m e:\n\u001b[1;32m-> 2157\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mModuleNotFoundError\u001b[39;00m(\n\u001b[0;32m 2158\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mCould not import module \u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;132;01m{\u001b[39;00mname\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124m. Are this object\u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124ms requirements defined correctly?\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m 2159\u001b[0m ) \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01me\u001b[39;00m\n\u001b[0;32m 2161\u001b[0m \u001b[38;5;28;01melif\u001b[39;00m name \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_modules:\n\u001b[0;32m 2162\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n",
106
+ "\u001b[1;31mModuleNotFoundError\u001b[0m: Could not import module 'pipeline'. Are this object's requirements defined correctly?"
107
+ ]
108
+ }
109
+ ],
110
+ "source": [
111
+ "import gradio as gr\n",
112
+ "from transformers import pipeline, AutoTokenizer\n",
113
+ "import re\n",
114
+ "import contractions\n",
115
+ "# Assuming loaded_classifier_model and loaded_tfidf_vectorizer are already loaded from the previous cell\n",
116
+ "\n",
117
+ "def remove_html_tags(text):\n",
118
+ " pattern = r'[^a-zA-Z0-9\\s]'\n",
119
+ " text = re.sub(pattern, '', str(text))\n",
120
+ " return text\n",
121
+ "\n",
122
+ "def remove_url(text):\n",
123
+ " pattern = re.compile(r'https?://\\S+|www\\.\\S+')\n",
124
+ " return pattern.sub(r'', str(text))\n",
125
+ "\n",
126
+ "def remove_emojis(text):\n",
127
+ " emoji_pattern = re.compile(\n",
128
+ " \"[\"\n",
129
+ " u\"\\U0001F600-\\U0001F64F\" # emoticons\n",
130
+ " u\"\\U0001F300-\\U0001F5FF\" # symbols & pictographs\n",
131
+ " u\"\\U0001F680-\\U0001F6FF\" # transport & map symbols\n",
132
+ " u\"\\U0001F1E0-\\U0001F1FF\" # flags\n",
133
+ " u\"\\U00002700-\\U000027BF\" # miscellaneous symbols\n",
134
+ " u\"\\U0001F900-\\U0001F9FF\" # supplemental symbols\n",
135
+ " u\"\\U00002600-\\U000026FF\" # weather & other symbols\n",
136
+ " u\"\\U0001FA70-\\U0001FAFF\" # extended symbols\n",
137
+ " \"]+\",\n",
138
+ " flags=re.UNICODE\n",
139
+ " )\n",
140
+ " return emoji_pattern.sub(r'', str(text))\n",
141
+ "\n",
142
+ "def expand_contractions(text):\n",
143
+ " return contractions.fix(text)\n",
144
+ "\n",
145
+ "def remove_special_and_numbers(text):\n",
146
+ " return re.sub(r'[^a-zA-Z\\s]', '', str(text))\n",
147
+ "\n",
148
+ "def clean_text(text):\n",
149
+ " text = remove_url(text)\n",
150
+ " text = remove_emojis(text)\n",
151
+ " text = expand_contractions(text)\n",
152
+ " text = text.lower()\n",
153
+ " return text\n",
154
+ "\n",
155
+ "summarizer = pipeline(\"summarization\", model=\"luisotorres/bart-finetuned-samsum\")\n",
156
+ "# summarizer2 = pipeline(\"summarization\", model=\"knkarthick/MEETING_SUMMARY\")\n",
157
+ "tokenizer = AutoTokenizer.from_pretrained(\"luisotorres/bart-finetuned-samsum\")\n",
158
+ "\n",
159
+ "def split_into_chunks(conversation, n=15):\n",
160
+ " lines = conversation.strip().split('\\n')\n",
161
+ " chunk_size = max(1, len(lines) // n)\n",
162
+ " return ['\\n'.join(lines[i:i+chunk_size]) for i in range(0, len(lines), chunk_size)]\n",
163
+ "\n",
164
+ "def truncate_chunk(text, max_tokens=1024):\n",
165
+ " tokens = tokenizer.encode(text, truncation=True, max_length=max_tokens)\n",
166
+ " return tokenizer.decode(tokens, skip_special_tokens=True)\n",
167
+ "\n",
168
+ "def summarize_chunks(chunks, model):\n",
169
+ " summaries = []\n",
170
+ " for chunk in chunks:\n",
171
+ " chunk = chunk.strip()\n",
172
+ " if not chunk:\n",
173
+ " continue\n",
174
+ " try:\n",
175
+ " truncated_chunk = truncate_chunk(chunk)\n",
176
+ " summary = model(truncated_chunk, max_length=1024, min_length=20, do_sample=False)[0]['summary_text']\n",
177
+ " summaries.append(summary)\n",
178
+ " except Exception as e:\n",
179
+ " print(f\"Error summarizing chunk: {e}\")\n",
180
+ " return summaries\n",
181
+ "\n",
182
+ "def combine_summaries(summaries):\n",
183
+ " return ' '.join(summaries)\n",
184
+ "\n",
185
+ "def summarize_dialogue(conversation, model):\n",
186
+ " chunks = split_into_chunks(conversation, n=1)\n",
187
+ " summaries = summarize_chunks(chunks, model)\n",
188
+ " final_summary = combine_summaries(summaries)\n",
189
+ " return final_summary\n",
190
+ "\n",
191
+ "def analyze_meeting_transcript(user_input):\n",
192
+ " if not user_input.strip():\n",
193
+ " return \"Please enter some text to summarize.\", \"\"\n",
194
+ "\n",
195
+ " cleaned_input = clean_text(user_input)\n",
196
+ " summary1 = summarize_dialogue(cleaned_input, summarizer)\n",
197
+ "\n",
198
+ " # Use the loaded vectorizer to transform the input\n",
199
+ " cleaned_input_vectorized = loaded_tfidf_vectorizer.transform([cleaned_input])\n",
200
+ "\n",
201
+ " intent_classification = loaded_classifier_model.predict(cleaned_input_vectorized)[0]\n",
202
+ " # print(intent_classification)\n",
203
+ " # print(cleaned_input_vectorized)\n",
204
+ " # intent_classification = \"Transactional Inquiry & Information Exchange\"\n",
205
+ "\n",
206
+ " # Format the intent classification output\n",
207
+ " formatted_intent = intent_classification.replace(\"__label__\", \"\").replace(\"_\", \" \")\n",
208
+ "\n",
209
+ "\n",
210
+ " return summary1, formatted_intent\n",
211
+ "\n",
212
+ "interface = gr.Interface(\n",
213
+ " fn=analyze_meeting_transcript,\n",
214
+ " inputs=gr.Textbox(label=\"Enter dialogue here\", lines=12, placeholder=\"Paste your meeting transcript...\"),\n",
215
+ " outputs=[\n",
216
+ " gr.Textbox(label=\"Summary (Luis Torres BART)\"),\n",
217
+ " # gr.Textbox(label=\"Summary 2 (KN Karthick MEETING_SUMMARY)\"),\n",
218
+ " gr.Textbox(label=\"Intent Classification\") # Removed \"Placeholder\"\n",
219
+ " ],\n",
220
+ " title=\"Meeting Transcript Analyzer\",\n",
221
+ " description=\"Summarizes meeting dialogues and classifies the intent.\",\n",
222
+ " allow_flagging=\"never\",\n",
223
+ " examples=[\n",
224
+ " [\n",
225
+ " '''\n",
226
+ "Amanda: guess what!\n",
227
+ "Chris: hey ;) ur pregnant!\n",
228
+ "Amanda: noo ;) but close enough! I'm so proud of myself! Remember I go to these dancing classes with Michael?\n",
229
+ "Chris: Yeah?\n",
230
+ "Amanda: So we went yesterday and the instructor needed a partner to show the steps we had so far\n",
231
+ "Chris: so there's only one guy teaching you? without a female partner?\n",
232
+ "Amanda: Well, this time he was alone, BUT THAT'S NOT THE POINT! Listen!\n",
233
+ "Chris: yeah, sorry :D tell me!\n",
234
+ "Amanda: So he needed a partner and noone really knew the steps like perfectly\n",
235
+ "Amanda: and obviously noone wanted to be mocked\n",
236
+ "Amanda: so I thought, aaaah :D\n",
237
+ "Chris: u volunteered? really? you??\n",
238
+ "Amanda: yeah!\n",
239
+ "Chris: whooa! that's so great! #therapy #worthit :D\n",
240
+ "Amanda: yeah i know :D maybe one day i'll actually stop being so shy\n",
241
+ "Chris: that's definitely the first step! :D congrats!\n",
242
+ "Amanda: tx ^_^\n",
243
+ "Chris: what dance was it?\n",
244
+ "Amanda: English waltz\n",
245
+ "Chris: isn't it, like, SO difficult?\n",
246
+ "Amanda: yeah it is! but everyone said I looked like a pro :D\n",
247
+ "Chris: Well done!!\n",
248
+ "'''\n",
249
+ " ],\n",
250
+ " [\"I have some exciting news to share!\"],\n",
251
+ " [\n",
252
+ " '''\n",
253
+ "Beryl: Hello guys! How are you doing? We've lost contact for a few months now. Hope you are well.\n",
254
+ "Anton: A happy hello to you Beryl! Great to hear from you. We are fine, thanks. And yourself?\n",
255
+ "Beryl: I'm very well indeed. Thank you. Any changes in your setup?\n",
256
+ "Anton: Not really. SOS. Same Old Soup ;) But we are happy for that.\n",
257
+ "Beryl: Are you still running your lovely airbnb?\n",
258
+ "Anton: Oh yes, we are. We had a few months off during summer, our summer, but now bookings start flowing in. Well... Are you planning to visit us? You two are always welcome!\n",
259
+ "Beryl: You caught me here. I'm vaguely considering going down to Onrus again, most likely in January. What does it look like with vacancies then?\n",
260
+ "Anton: Perfect! Just give me your dates and I'll keep it booked for you.\n",
261
+ "Beryl: Would you prefer me to do it via airbnb website or just like this directly with you?\n",
262
+ "Anton: I think it'll be more advantageous for both of us to do it directly. Do you know exactly when you'll be coming?\n",
263
+ "Beryl: Not so much. Can I get back to you in 2, 3 days' time?\n",
264
+ "Anton: ASAP really. As I say we've been receiving bookings daily now.\n",
265
+ "Beryl: Well, no big deal. I'll be staying in Cape Town for a longer time and am quite flexible in my dates.\n",
266
+ "Anton: Will you be coming with Tino, if I may ask?\n",
267
+ "Beryl: No. I am single again. Hurray! So pls make it single occupancy any week in January, Anton.\n",
268
+ "Anton: Great! 4th till 12th?\n",
269
+ "Beryl: Very good. I'll call you beforehand from Cape Town. Greetings to you both!\n",
270
+ "Anton: Take care!'''\n",
271
+ " ],\n",
272
+ " ]\n",
273
+ ")\n",
274
+ "\n",
275
+ "if __name__ == \"__main__\":\n",
276
+ " interface.launch(debug=True, share=True)"
277
+ ]
278
+ }
279
+ ],
280
+ "metadata": {
281
+ "kernelspec": {
282
+ "display_name": "Python 3",
283
+ "language": "python",
284
+ "name": "python3"
285
+ },
286
+ "language_info": {
287
+ "codemirror_mode": {
288
+ "name": "ipython",
289
+ "version": 3
290
+ },
291
+ "file_extension": ".py",
292
+ "mimetype": "text/x-python",
293
+ "name": "python",
294
+ "nbconvert_exporter": "python",
295
+ "pygments_lexer": "ipython3",
296
+ "version": "3.11.9"
297
+ }
298
+ },
299
+ "nbformat": 4,
300
+ "nbformat_minor": 5
301
+ }