File size: 7,927 Bytes
cc47095
 
 
 
 
 
 
 
 
 
 
 
4131afb
eea3a7e
8189811
 
089cff4
4aa9206
12ea30f
114528e
61661f8
8189811
8ee02dc
4131afb
e6605fa
ea263f7
 
418e0c1
cc47095
 
 
 
 
 
418e0c1
cc47095
 
 
3e16173
8189811
cc47095
114528e
d8aacae
 
 
4131afb
d8aacae
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
089cff4
114528e
089cff4
 
1af5dbd
4131afb
cc47095
4131afb
ea263f7
cc47095
ea263f7
 
114528e
 
 
 
 
 
 
 
 
 
 
ea263f7
114528e
 
 
 
 
 
 
 
 
cc47095
 
114528e
1a83889
 
114528e
 
cc47095
 
 
 
 
 
114528e
4131afb
114528e
4131afb
114528e
cc47095
d3ba393
1a83889
 
 
 
 
 
d3ba393
8189811
1a83889
 
114528e
 
 
 
 
cc47095
1a83889
cc47095
 
 
 
 
 
 
 
 
 
 
 
1a83889
114528e
1a83889
 
 
 
4131afb
114528e
4131afb
 
 
 
 
 
1a83889
4131afb
1a83889
114528e
1a83889
 
 
 
 
 
 
 
cc47095
 
114528e
1a83889
 
 
 
 
 
 
114528e
1a83889
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
import aiohttp
import json
import logging
import torch
import faiss
import numpy as np
from transformers import AutoModelForCausalLM, AutoTokenizer
from typing import List, Dict, Any
from cryptography.fernet import Fernet
from jwt import encode, decode, ExpiredSignatureError
from datetime import datetime, timedelta
import pyttsx3
import os

from components.multi_model_analyzer import MultiAgentSystem
from components.neuro_symbolic_engine import NeuroSymbolicEngine
from components.self_improving_ai import SelfImprovingAI
from modules.secure_memory_loader import load_secure_memory_module
from ethical_filter import EthicalFilter
from codette_openai_fallback import query_codette_with_fallback
from CodriaoCore.federated_learning import FederatedAI
from utils.database import Database
from utils.logger import logger
from codriao_tb_module import CodriaoHealthModule
from fail_safe import AIFailsafeSystem
from quarantine_engine import QuarantineEngine
    from anomaly_score import AnomalyScorer

class AICoreAGIX:
    def __init__(self, config_path: str = "config.json"):
        self.ethical_filter = EthicalFilter()
        self.config = self._load_config(config_path)
        self.tokenizer = AutoTokenizer.from_pretrained(self.config["model_name"])
        self.model = AutoModelForCausalLM.from_pretrained(self.config["model_name"])
        self.context_memory = self._initialize_vector_memory()
        self.http_session = aiohttp.ClientSession()
        self.database = Database()
        self.multi_agent_system = MultiAgentSystem()
        self.self_improving_ai = SelfImprovingAI()
        self.neural_symbolic_engine = NeuroSymbolicEngine()
        self.federated_ai = FederatedAI()
        self.failsafe_system = AIFailsafeSystem()
def engage_lockdown_mode(self, reason="Unspecified anomaly"):
    timestamp = datetime.utcnow().isoformat()
    self.lockdown_engaged = True

    # Disable external systems
    try:
        self.http_session = None
        if hasattr(self.federated_ai, "network_enabled"):
            self.federated_ai.network_enabled = False
        if hasattr(self.self_improving_ai, "enable_learning"):
            self.self_improving_ai.enable_learning = False
    except Exception as e:
        logger.error(f"Lockdown component shutdown failed: {e}")

    # Log the event
    lockdown_event = {
        "event": "Lockdown Mode Activated",
        "reason": reason,
        "timestamp": timestamp
    }
    logger.warning(f"[LOCKDOWN MODE] - Reason: {reason} | Time: {timestamp}")
    self.failsafe_system.trigger_failsafe("Lockdown initiated", str(lockdown_event))

    # Return confirmation
    return {
        "status": "Lockdown Engaged",
        "reason": reason,
        "timestamp": timestamp
    }
        # Secure memory setup
        self._encryption_key = Fernet.generate_key()
        secure_memory_module = load_secure_memory_module()
        SecureMemorySession = secure_memory_module.SecureMemorySession
        self.secure_memory_loader = SecureMemorySession(self._encryption_key)

        self.speech_engine = pyttsx3.init()
        self.health_module = CodriaoHealthModule(ai_core=self)
    

        self.quarantine_engine = QuarantineEngine()
        self.anomaly_scorer = AnomalyScorer()
    def _load_config(self, config_path: str) -> dict:
        """Loads the configuration file."""
        try:
            with open(config_path, 'r') as file:
                return json.load(file)
        except FileNotFoundError:
            logger.error(f"Configuration file not found: {config_path}")
            raise
        except json.JSONDecodeError as e:
            logger.error(f"Error decoding JSON in config file: {config_path}, Error: {e}")
            raise
    
    def _initialize_vector_memory(self):
        """Initializes FAISS vector memory."""
        return faiss.IndexFlatL2(768)

    def _vectorize_query(self, query: str):
        """Vectorizes user query using tokenizer."""
        tokenized = self.tokenizer(query, return_tensors="pt")
        return tokenized["input_ids"].detach().numpy()

    async def generate_response(self, query: str, user_id: int) -> Dict[str, Any]:
        try:
            # Validate query input
            if not isinstance(query, str) or len(query.strip()) == 0:
                raise ValueError("Invalid query input.")

            # Ethical filter
            result = self.ethical_filter.analyze_query(query)
            if result["status"] == "blocked":
                return {"error": result["reason"]}
            if result["status"] == "flagged":
                logger.warning(result["warning"])

            # Special diagnostics trigger
            if any(phrase in query.lower() for phrase in ["tb check", "analyze my tb", "run tb diagnostics", "tb test"]):
                return await self.run_tb_diagnostics("tb_image.jpg", "tb_cough.wav", user_id)

            # Vector memory and responses
            vectorized_query = self._vectorize_query(query)
            self.secure_memory_loader.encrypt_vector(user_id, vectorized_query)

            responses = await asyncio.gather(
                self._generate_local_model_response(query),
                self.multi_agent_system.delegate_task(query),
                self.self_improving_ai.evaluate_response(query),
                self.neural_symbolic_engine.integrate_reasoning(query)
            )

            final_response = "\n\n".join(responses)

            # Verify response safety
            safe = self.failsafe_system.verify_response_safety(final_response)
            if not safe:
                return {"error": "Failsafe triggered due to unsafe response content."}

            self.database.log_interaction(user_id, query, final_response)
            self._log_to_blockchain(user_id, query, final_response)
            self._speak_response(final_response)

            return {
                "response": final_response,
                "real_time_data": self.federated_ai.get_latest_data(),
                "context_enhanced": True,
                "security_status": "Fully Secure"
            }
        except Exception as e:
            logger.error(f"Response generation failed: {e}")
            return {"error": "Processing failed - safety protocols engaged"}

    async def _generate_local_model_response(self, query: str) -> str:
        """Generates a response using the local model."""
        inputs = self.tokenizer(query, return_tensors="pt")
        outputs = self.model.generate(**inputs)
        return self.tokenizer.decode(outputs[0], skip_special_tokens=True)

    async def run_tb_diagnostics(self, image_path: str, audio_path: str, user_id: int) -> Dict[str, Any]:
        """Runs TB diagnostics with AI modules."""
        try:
            result = await self.health_module.evaluate_tb_risk(image_path, audio_path, user_id)
            logger.info(f"TB Diagnostic Result: {result}")
            return result
        except Exception as e:
            logger.error(f"TB diagnostics failed: {e}")
            return {"tb_risk": "ERROR", "error": str(e)}

    def _log_to_blockchain(self, user_id: int, query: str, final_response: str):
        """Logs interaction to blockchain with retries."""
        retries = 3
        for attempt in range(retries):
            try:
                logger.info(f"Logging interaction to blockchain: Attempt {attempt + 1}")
                break
            except Exception as e:
                logger.warning(f"Blockchain logging failed: {e}")
                continue

    def _speak_response(self, response: str):
        """Speaks out the generated response."""
        try:
            self.speech_engine.say(response)
            self.speech_engine.runAndWait()
        except Exception as e:
            logger.error(f"Speech synthesis failed: {e}")

    async def shutdown(self):
        """Closes asynchronous resources."""
        await self.http_session.close()