Yago Bolivar
commited on
Commit
·
ca88013
1
Parent(s):
b218046
feat: add accessibility check for GPT4All library and model initialization
Browse files- utilities/check_gpt4all.py +70 -0
utilities/check_gpt4all.py
ADDED
@@ -0,0 +1,70 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import sys
|
2 |
+
|
3 |
+
def check_gpt4all_accessibility(model_name="/Users/yagoairm2/Library/Application Support/nomic.ai/GPT4All/Meta-Llama-3-8B-Instruct.Q4_0.gguf"):
|
4 |
+
"""
|
5 |
+
Checks if the gpt4all library is installed and can load a model.
|
6 |
+
"""
|
7 |
+
print("--- GPT4All Accessibility Check ---")
|
8 |
+
try:
|
9 |
+
print("Step 1: Attempting to import GPT4All...")
|
10 |
+
from gpt4all import GPT4All
|
11 |
+
print("SUCCESS: GPT4All library imported successfully.\n")
|
12 |
+
except ImportError:
|
13 |
+
print("ERROR: GPT4All library not found.")
|
14 |
+
print("Please install it by running: pip install gpt4all")
|
15 |
+
print("-----------------------------------")
|
16 |
+
return False
|
17 |
+
except Exception as e:
|
18 |
+
print(f"ERROR: An unexpected error occurred during import: {e}")
|
19 |
+
print("-----------------------------------")
|
20 |
+
return False
|
21 |
+
|
22 |
+
try:
|
23 |
+
print(f"Step 2: Attempting to initialize GPT4All model: '{model_name}'...")
|
24 |
+
print("This might take a moment if the model needs to be downloaded.")
|
25 |
+
# You can specify a model_path if your models are stored in a custom location:
|
26 |
+
# model = GPT4All(model_name, model_path="/path/to/your/models/")
|
27 |
+
model = GPT4All(model_name, allow_download=True) # allow_download defaults to True
|
28 |
+
print(f"SUCCESS: GPT4All model '{model_name}' initialized successfully.")
|
29 |
+
print("Model object:", model)
|
30 |
+
|
31 |
+
# Optional: Perform a quick generation test
|
32 |
+
print("\nStep 3: Performing a quick generation test...")
|
33 |
+
try:
|
34 |
+
# Ensure the model has a chat session or generate method available
|
35 |
+
# For newer versions, direct generation might be preferred
|
36 |
+
# For older versions or specific models, a chat session might be needed
|
37 |
+
if hasattr(model, 'generate'):
|
38 |
+
# Using a context manager for chat session if needed by the model/version
|
39 |
+
with model.chat_session():
|
40 |
+
response = model.generate("Hello, world!", max_tokens=10, temp=0.7)
|
41 |
+
print(f"Test generation successful. Response (first few tokens): {response[:50]}...")
|
42 |
+
else:
|
43 |
+
print("Model does not have a direct 'generate' method in this context. Skipping generation test.")
|
44 |
+
print("SUCCESS: GPT4All seems to be working correctly.")
|
45 |
+
print("-----------------------------------")
|
46 |
+
return True
|
47 |
+
except Exception as e:
|
48 |
+
print(f"ERROR: Failed during test generation with model '{model_name}': {e}")
|
49 |
+
print("This could be due to model compatibility or an issue with the generation process.")
|
50 |
+
print("-----------------------------------")
|
51 |
+
return False # Consider this a partial success if initialization worked but generation failed
|
52 |
+
|
53 |
+
except Exception as e:
|
54 |
+
print(f"ERROR: Failed to initialize GPT4All model '{model_name}': {e}")
|
55 |
+
print("This could be due to various reasons:")
|
56 |
+
print(" - The model name is incorrect or not available for download.")
|
57 |
+
print(" - You don't have an internet connection to download the model.")
|
58 |
+
print(" - Issues with model file integrity if previously downloaded.")
|
59 |
+
print(" - Insufficient disk space or permissions.")
|
60 |
+
print(" - Underlying C++ library issues (check gpt4all installation).")
|
61 |
+
print("Please check the model name and your internet connection.")
|
62 |
+
print("You can find available models at https://gpt4all.io/index.html")
|
63 |
+
print("-----------------------------------")
|
64 |
+
return False
|
65 |
+
|
66 |
+
if __name__ == "__main__":
|
67 |
+
if check_gpt4all_accessibility():
|
68 |
+
print("\nGPT4All is accessible and a model was loaded successfully.")
|
69 |
+
else:
|
70 |
+
print("\nGPT4All accessibility check failed. Please review the error messages above.")
|