File size: 8,594 Bytes
8beb2b1
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
#!/usr/bin/env python3
"""
Atlas Intelligence Unified Bridge Fix
This script patches the unified_bridge.py file to correct integration issues
between components.
"""

import os
import sys
import shutil
import re

# Define the path to the unified bridge file
UNIFIED_BRIDGE_PATH = os.path.expanduser("~/AtlasUnified/unified_bridge.py")
BACKUP_PATH = os.path.expanduser("~/AtlasUnified/unified_bridge.py.bak")

def backup_original_file():
    """Create a backup of the original file"""
    if os.path.exists(UNIFIED_BRIDGE_PATH):
        print(f"Creating backup at {BACKUP_PATH}")
        shutil.copy2(UNIFIED_BRIDGE_PATH, BACKUP_PATH)
        return True
    else:
        print(f"Error: Unified bridge file not found at {UNIFIED_BRIDGE_PATH}")
        return False

def fix_openmanus_integration(content):
    """Fix OpenManus integration code"""
    # Find and replace the OpenManus integration section
    openmanus_pattern = r"# Process with OpenManus if enabled.*?(?=\s{4}# Process with|$)"
    openmanus_replacement = """# Process with OpenManus if enabled - FIXED code to use ask_tool function correctly
    if config["integrations"]["enable_openmanus"]:
        try:
            sys.path.append(config["paths"]["openmanus"])
            from app.llm import ask_tool
            
            # Using the ask_tool function directly
            openmanus_result = ask_tool(query_text)
            results["openmanus"] = {"response": openmanus_result}
            
        except Exception as e:
            logger.error(f"OpenManus processing error: {e}")
            logger.error(traceback.format_exc())
            results["openmanus"] = {"error": str(e)}
    
"""
    return re.sub(openmanus_pattern, openmanus_replacement, content, flags=re.DOTALL)

def fix_quantum_vision_integration(content):
    """Fix QuantumVision integration code"""
    # Find and replace the QuantumVision integration section
    quantum_pattern = r"# Process with Quantum Vision if enabled.*?(?=\s{4}return results|$)"
    quantum_replacement = """# Process with Quantum Vision if enabled - FIXED code to use the correct function
    if config["integrations"]["enable_quantum_vision"]:
        try:
            sys.path.append(config["paths"]["quantum_vision"])
            try:
                import nlp_processor
                import openai_integration
                import spacy
                
                # Load a small spaCy model if available, otherwise use simple processing
                try:
                    nlp = spacy.load("en_core_web_sm")
                except:
                    # Fallback to a simpler model
                    try:
                        nlp = spacy.blank("en")
                    except:
                        # If spaCy isn't properly installed/configured
                        raise ImportError("spaCy models not available")
                
                # Use the correct function from nlp_processor
                quantum_result = nlp_processor.process_text(nlp, query_text)
                results["quantum_vision"] = {"response": quantum_result}
                
            except (ImportError, AttributeError):
                # Fallback to direct OpenAI integration if available
                try:
                    import openai_integration
                    openai_response = openai_integration.generate_text(query_text)
                    results["quantum_vision"] = {"fallback_response": openai_response}
                except:
                    # If both approaches fail, use a simple response
                    results["quantum_vision"] = {
                        "fallback_response": f"Processed query: {query_text}",
                        "simple_analysis": {"words": len(query_text.split())}
                    }
                    
        except Exception as e:
            logger.error(f"Quantum Vision processing error: {e}")
            logger.error(traceback.format_exc())
            results["quantum_vision"] = {"error": str(e)}
    
"""
    return re.sub(quantum_pattern, quantum_replacement, content, flags=re.DOTALL)

def fix_integration_helper_functions(content):
    """Fix the helper functions for status checks"""
    # Fix OpenManus status check
    openmanus_check_pattern = r"def check_openmanus_status\(\).*?(?=def check_quantum|$)"
    openmanus_check_replacement = """def check_openmanus_status() -> str:
    \"\"\"Check if OpenManus is available and working\"\"\"
    try:
        if not os.path.exists(config["paths"]["openmanus"]):
            return "unavailable (path not found)"
            
        # Try importing a key module - fixed import
        sys.path.append(config["paths"]["openmanus"])
        import app.llm
        return "available"
    except Exception as e:
        logger.error(f"Failed to check OpenManus: {e}")
        return f"error: {str(e)}"

"""

    content = re.sub(openmanus_check_pattern, openmanus_check_replacement, content, flags=re.DOTALL)
    
    # Fix QuantumVision status check
    quantum_check_pattern = r"def check_quantum_vision_status\(\).*?(?=def check_casibase|$)"
    quantum_check_replacement = """def check_quantum_vision_status() -> str:
    \"\"\"Check if QuantumVision is available and working\"\"\"
    try:
        if not os.path.exists(config["paths"]["quantum_vision"]):
            return "unavailable (path not found)"
            
        # Try importing the module directly
        sys.path.append(config["paths"]["quantum_vision"])
        import nlp_processor
        return "available"
    except Exception as e:
        logger.error(f"Failed to check QuantumVision: {e}")
        return f"error: {str(e)}"

"""
    return re.sub(quantum_check_pattern, quantum_check_replacement, content, flags=re.DOTALL)

def create_openai_helper_function():
    """Create an OpenAI helper function for QuantumVision"""
    openai_file_path = os.path.expanduser("~/Library/Mobile Documents/com~apple~CloudDocs/Atlas Business/QuantumVision/openai_integration.py")
    
    # Check if the function already exists
    if os.path.exists(openai_file_path):
        with open(openai_file_path, 'r') as file:
            content = file.read()
            if 'def generate_text(' in content:
                print("OpenAI helper function already exists.")
                return
    
    # Create or update the file with the necessary function
    with open(openai_file_path, 'a') as file:
        file.write('''

def generate_text(prompt):
    """Generate text using OpenAI API
    
    Args:
        prompt: The text prompt to send to OpenAI
        
    Returns:
        str: The generated response
    """
    try:
        import logging
        logger = logging.getLogger(__name__)
        
        import openai
        from openai import OpenAI
        
        # Initialize the OpenAI client with your API key
        client = OpenAI()
        
        logger.info(f"Sending prompt to OpenAI: {prompt[:50]}...")
        
        # Make the API call
        response = client.chat.completions.create(
            model="gpt-4o",
            messages=[
                {"role": "system", "content": "You are a helpful AI assistant."},
                {"role": "user", "content": prompt}
            ],
            max_tokens=500
        )
        
        # Extract and return the response text
        response_text = response.choices[0].message.content
        return response_text
        
    except Exception as e:
        logger.error(f"Error generating text with OpenAI: {str(e)}")
        return f"Error generating response: {str(e)}"
''')

def main():
    """Main function to apply all fixes"""
    # Check if the unified bridge file exists
    if not os.path.exists(UNIFIED_BRIDGE_PATH):
        print(f"Error: Unified bridge file not found at {UNIFIED_BRIDGE_PATH}")
        return False
    
    # Create a backup
    if not backup_original_file():
        return False
    
    # Read the file content
    with open(UNIFIED_BRIDGE_PATH, 'r') as file:
        content = file.read()
    
    # Apply fixes
    content = fix_openmanus_integration(content)
    content = fix_quantum_vision_integration(content)
    content = fix_integration_helper_functions(content)
    
    # Write the updated content back
    with open(UNIFIED_BRIDGE_PATH, 'w') as file:
        file.write(content)
    
    # Create OpenAI helper function
    create_openai_helper_function()
    
    print("Fixes applied successfully!")
    print("Restart the Atlas Unified service to apply the changes.")
    return True

if __name__ == "__main__":
    main()