| 1 | + | # This code has been developed by Fundació Privada Internet i Innovació Digital a Catalunya (i2CAT) |
| 2 | + | import io |
| 3 | + | import AI_Engine.core.data_handler as data |
| 4 | + | import AI_Engine.core.utils as utils |
| 5 | + | import AI_Engine.core.model as model |
| 6 | + | import AI_Engine.core.mapping.mapping_techniques_data_sources as mapping |
| 7 | + | import pandas as pd |
| 8 | + | import spacy as sp |
| 9 | + | |
| 10 | + | import logging |
| 11 | + | import os |
| 12 | + | import json |
| 13 | + | import sys |
| 14 | + | from logging.handlers import RotatingFileHandler |
| 15 | + | |
| 16 | + | |
| 17 | + | from fastapi.logger import logger |
| 18 | + | from typing import List |
| 19 | + | from fastapi import FastAPI, Request |
| 20 | + | from spacy import displacy |
| 21 | + | from tqdm import tqdm |
| 22 | + | |
| 23 | + | |
| 24 | + | from fastapi.responses import StreamingResponse |
| 25 | + | |
| 26 | + | from fastapi.middleware.cors import CORSMiddleware |
| 27 | + | |
| 28 | + | app = FastAPI() |
| 29 | + | |
| 30 | + | #Preguntar si els possibles origins d'acces a la API han de ser part d'un input/fitxer de config |
| 31 | + | origins = [ |
| 32 | + | "http://localhost", |
| 33 | + | "http://localhost:8081", |
| 34 | + | "http://localhost:4200", |
| 35 | + | "http://localhost:4201", |
| 36 | + | "http://localhost:9000", |
| 37 | + | "http://localhost:9001", |
| 38 | + | "http://172.26.212.33", |
| 39 | + | "http://172.26.212.33:8081", |
| 40 | + | "http://172.26.212.33:4200", |
| 41 | + | "http://172.26.212.33:4201", |
| 42 | + | "http://172.26.212.33:9000", |
| 43 | + | "http://172.26.212.33:9001", |
| 44 | + | |
| 45 | + | ] |
| 46 | + | app.add_middleware( |
| 47 | + | CORSMiddleware, |
| 48 | + | allow_origins=origins, |
| 49 | + | allow_credentials=True, |
| 50 | + | allow_methods=["*"], |
| 51 | + | allow_headers=["*"], |
| 52 | + | ) |
| 53 | + | |
| 54 | + | |
| 55 | + | config = utils.load_config("AI_Engine/config_files/") |
| 56 | + | |
| 57 | + | dataset_path = config.get("PATH", "DATASET_PATH") |
| 58 | + | model_path = config.get("PATH", "MODEL_PATH") |
| 59 | + | rawData_path = config.get("PATH", "RAW_PATH") |
| 60 | + | predictions_path = config.get("PATH", "PREDICTIONS_PATH") |
| 61 | + | |
| 62 | + | elastic_host = config.get("ELASTIC", "CLIENT_HOST") |
| 63 | + | |
| 64 | + | @app.get("/train") |
| 65 | + | async def train_model( |
| 66 | + | train_pairs: str = |
| 67 | + | """ |
| 68 | + | {"webserver" : ["webproxy-squid", "webserver-generic", "webserver-nginx"], |
| 69 | + | "dns" : ["dns-infoblox-nios", "dns-generic"], |
| 70 | + | "identity" : ["identity-service-cisco"], |
| 71 | + | "evtx" : ["microsoft-windows-evtx"], |
| 72 | + | "firewall" : ["firewall-fortigate","firewall-paloalto"] |
| 73 | + | } |
| 74 | + | """ |
| 75 | + | ): |
| 76 | + | |
| 77 | + | """ |
| 78 | + | Endpoint to re-train the model selected through configuration file |
| 79 | + | :param train_pairs: str that represents a mapping between each category and each index used to train |
| 80 | + | """ |
| 81 | + | logging.basicConfig(level=logging.DEBUG, filename='logs/train', filemode='w') |
| 82 | + | logger.addHandler(RotatingFileHandler("logs/train", maxBytes=1000,backupCount=0)) |
| 83 | + | |
| 84 | + | retrieve = config.getboolean("OPTIONS", "RETRIEVE") |
| 85 | + | prepare_data = config.getboolean("OPTIONS", "PREPARE_DATA") |
| 86 | + | fasttext_flag = config.getboolean("OPTIONS", "FASTTEXT") |
| 87 | + | False |
| 88 | + | global dataset_path, model_path, rawData_path, elastic_host |
| 89 | + | |
| 90 | + | logger.debug(f'[PATH] Dataset Path:{dataset_path}') |
| 91 | + | logger.debug(f'[PATH] Model Path:{model_path}') |
| 92 | + | logger.debug(f'[PATH] RawData Path:{rawData_path}') |
| 93 | + | logger.debug(f'[OPTIONS] FASTTEXT?:{fasttext_flag}') |
| 94 | + | logger.debug(f'[HOST] ElasticSearch database host: {elastic_host}') |
| 95 | + | logger.debug(f'[Model Type] Model used:{"fasttext multinomial logistic regression" if fasttext_flag else "XGBoost"}') |
| 96 | + | |
| 97 | + | train_pairs = json.loads(train_pairs) |
| 98 | + | train_pairs = utils.exchange_key_value(train_pairs) |
| 99 | + | |
| 100 | + | |
| 101 | + | if prepare_data: |
| 102 | + | print('entro') |
| 103 | + | if retrieve: |
| 104 | + | utils.get_datasets_elastic(client_host = elastic_host, datasets_path = rawData_path, search_object={"query": {"match_all": {}}}, all_idx=False, |
| 105 | + | idx_names = list(train_pairs.keys()))# list(train_pairs.values())) |
| 106 | + | |
| 107 | + | if fasttext_flag: |
| 108 | + | train_path, _ = data.create_datasets(path_to_rawData = rawData_path, path_to_datasets = dataset_path+'fasttext_', |
| 109 | + | training_data=False, idx_pairs=train_pairs, predict = False) |
| 110 | + | |
| 111 | + | model_name = 'model.bin' |
| 112 | + | |
| 113 | + | try: |
| 114 | + | model.train_model(train_path, path_save_model = model_path+model_name, fasttext_flag = fasttext_flag) |
| 115 | + | |
| 116 | + | config.set("PATH", "MODEL_PATH", model_path+model_name) |
| 117 | + | return {"model path" : model_path+model_name} |
| 118 | + | |
| 119 | + | except: |
| 120 | + | raise UnboundLocalError('Data is not saved, modify the config.ini: [PREPRARE_DATA]=True & [RETRIEVE]=True to retrieve the data') |
| 121 | + | |
| 122 | + | |
| 123 | + | |
| 124 | + | |
| 125 | + | |
| 126 | + | @app.get("/predict") |
| 127 | + | async def predict(predict_idxs: str = """["classification_validate_dataset","index1", "index2", "index3"]""", re_execution: str = "False"): |
| 128 | + | """ |
| 129 | + | Endpoint that allows to Load a pretrained model to make predictions over data. |
| 130 | + | :param predict_idxs: str that represents a list of index names to extract the datsets to perform predictions on |
| 131 | + | """ |
| 132 | + | logging.basicConfig(level=logging.DEBUG, filename='logs/predict', filemode='w') |
| 133 | + | |
| 134 | + | re_execution = re_execution == "True" |
| 135 | + | model_path = config.get("PATH", "MODEL_PATH") |
| 136 | + | |
| 137 | + | |
| 138 | + | if re_execution or len(os.listdir('results')) == 0: |
| 139 | + | fasttext_flag = config.getboolean("OPTIONS", "FASTTEXT") |
| 140 | + | prepare_data = config.getboolean("OPTIONS", "PREPARE_DATA" ) |
| 141 | + | retrieve = config.getboolean("OPTIONS", "RETRIEVE") |
| 142 | + | |
| 143 | + | global dataset_path, rawData_path, elastic_host |
| 144 | + | |
| 145 | + | predict_idxs = json.loads(predict_idxs) |
| 146 | + | |
| 147 | + | if not os.path.isfile(model_path): |
| 148 | + | model_path = model_path + 'model.bin' |
| 149 | + | |
| 150 | + | if prepare_data: |
| 151 | + | if retrieve: utils.get_datasets_elastic(elastic_host, datasets_path = rawData_path, search_object={"query": {"match_all": {}}}, idx_names = predict_idxs, all_idx=False) |
| 152 | + | |
| 153 | + | if fasttext_flag: |
| 154 | + | _ , test_path = data.create_datasets(path_to_rawData = rawData_path, path_to_datasets=dataset_path, training_data=False, |
| 155 | + | predict=True, predict_idx = predict_idxs, idx_pairs={}) |
| 156 | + | |
| 157 | + | |
| 158 | + | logger.debug(f'[PATH] Dataset Path:{dataset_path}') |
| 159 | + | logger.debug(f'[PATH] Model Path:{model_path}') |
| 160 | + | logger.debug(f'[PATH] Predictions Path:{predictions_path}') |
| 161 | + | logger.debug(f'[OPTIONS] FASTTEXT?:{fasttext_flag}') |
| 162 | + | |
| 163 | + | output = {} |
| 164 | + | |
| 165 | + | for predict_index in predict_idxs: |
| 166 | + | |
| 167 | + | file_name = f'fasttext_test-{predict_index}.txt' |
| 168 | + | |
| 169 | + | results = model.predict(dataset_path+file_name, model_path, predictions_path, fasttext_flag = fasttext_flag) |
| 170 | + | data_sources = utils.category_percent(results['Path to predictions'], data_sources = True).keys() |
| 171 | + | |
| 172 | + | output[predict_index] = { |
| 173 | + | "Category Split: Data types" : utils.category_percent(results['Path to predictions'], data_sources = False), |
| 174 | + | "Techniques" : mapping.dataSources2techniques(data_sources), |
| 175 | + | "Predictions Results / Path to predictions" : results |
| 176 | + | } |
| 177 | + | |
| 178 | + | |
| 179 | + | output['MITRE'] = utils.technique_overlapping(output) |
| 180 | + | output['log-entities'] = json.load(open("entities.json",)) |
| 181 | + | print(output) |
| 182 | + | |
| 183 | + | for key, _ in output.items(): |
| 184 | + | if key in predict_idxs: |
| 185 | + | del output[key]["Techniques"] |
| 186 | + | |
| 187 | + | from datetime import datetime |
| 188 | + | |
| 189 | + | with open(f'results/{datetime.now()}.json', 'w') as results_file: |
| 190 | + | json.dump(output, results_file, indent = 4) |
| 191 | + | |
| 192 | + | else: |
| 193 | + | file = r'results/'+ str(max(os.listdir('results'))) |
| 194 | + | output = json.load(open(file,)) |
| 195 | + | |
| 196 | + | |
| 197 | + | try: |
| 198 | + | os.path.isfile(model_path) |
| 199 | + | return output |
| 200 | + | except FileNotFoundError as fnf: |
| 201 | + | print(fnf) |
| 202 | + | |
| 203 | + | |
| 204 | + | @app.get("/get_results") |
| 205 | + | async def get_results(): |
| 206 | + | try: |
| 207 | + | file = r'results/'+ str(max(os.listdir('results'))) |
| 208 | + | return json.load(open(file,)) |
| 209 | + | except: |
| 210 | + | return "File not available" |
| 211 | + | |
| 212 | + | |
| 213 | + | @app.get("/assert_completeness") |
| 214 | + | async def predict_completeness(predict_idxs: str = """["webproxy-squid","webserver-nginx","webserver-generic"]"""): |
| 215 | + | |
| 216 | + | """ |
| 217 | + | Endpoint to assert the completeness over a set of given indexes |
| 218 | + | :param predict_idxs: str that represents a list of index names to extract the datsets to perform predictions on |
| 219 | + | """ |
| 220 | + | |
| 221 | + | model_path = config.get("PATH", "MODEL_PATH_COMPLETENESS") |
| 222 | + | rawData_path = config.get("PATH", "RAW_PATH") |
| 223 | + | retrieve = config.getboolean("OPTIONS", "RETRIEVE_COMPLETENESS") |
| 224 | + | |
| 225 | + | logging.basicConfig(level=logging.DEBUG, filename='logs/predict', filemode='w') |
| 226 | + | logger.debug(f'[PATH] Model Path:{model_path}') |
| 227 | + | logger.debug(f'[PATH] RawData Path:{rawData_path}') |
| 228 | + | logger.debug(f'[OPTIONS] Data retrieved? :{retrieve}') |
| 229 | + | |
| 230 | + | predict_idxs = json.loads(predict_idxs) |
| 231 | + | |
| 232 | + | if retrieve: utils.get_datasets_elastic(elastic_host, datasets_path = rawData_path, search_object={"query": {"match_all": {}}}, idx_names = predict_idxs, all_idx=False) |
| 233 | + | |
| 234 | + | nlp_ner = sp.load(model_path) |
| 235 | + | |
| 236 | + | output_dict = {} |
| 237 | + | named_entities = [] |
| 238 | + | |
| 239 | + | for idx in predict_idxs: |
| 240 | + | |
| 241 | + | print('Extracting from ', idx) |
| 242 | + | test_data = open(f"{rawData_path}/{idx}.txt") |
| 243 | + | test_data_lines = test_data.read().splitlines()[0:100] |
| 244 | + | test_data.close() |
| 245 | + | |
| 246 | + | for line in tqdm(test_data_lines): |
| 247 | + | doc = nlp_ner(line) |
| 248 | + | output_dict[line] = [] |
| 249 | + | for ent in doc.ents: |
| 250 | + | output_dict[line].append((ent.text, ent.start_char, ent.end_char, ent.label_)) |
| 251 | + | if ent.label_ not in named_entities: |
| 252 | + | named_entities.append(ent.label_) |
| 253 | + | |
| 254 | + | return {"named_entities" : named_entities, |
| 255 | + | "entities_by_point" : output_dict |
| 256 | + | } |
| 257 | + | |
| 258 | + | |
| 259 | + | |
| 260 | + | @app.get("/dummy") |
| 261 | + | async def dummy_endpoint(): |
| 262 | + | idxs_sources = [ { |
| 263 | + | "index_1" : |
| 264 | + | { |
| 265 | + | "Network Traffic Content (webserver)": "64.8824296805", |
| 266 | + | "Domain Name: Active DNS": "86.136300963", |
| 267 | + | "Network Traffic Content": "19.0061410207", |
| 268 | + | "Filebeat values are waiting...": "12.7020491282", |
| 269 | + | "Network Traffic Content (DHCP)": "2.2477302618", |
| 270 | + | "User Account: User Account Authentication": "0.8545988715", |
| 271 | + | "User Account: User Account Creation": "0.8545988715", |
| 272 | + | "User Account: User Account Deletion": "0.8545988715", |
| 273 | + | "User Account: User Account Modification": "1.709197743", |
| 274 | + | "User Account: User Account Metadata": "0.8545988715", |
| 275 | + | "Process: OS API Execution": "0.3070510373", |
| 276 | + | "Process: Process Access": "0.3070510373", |
| 277 | + | "Process: Process Creation": "0.3070510373", |
| 278 | + | "Process: Process Metadata": "0.3070510373", |
| 279 | + | "Process: Process Modification": "0.3070510373", |
| 280 | + | "Process: Process Termination": "0.3070510373" |
| 281 | + | } |
| 282 | + | }, |
| 283 | + | |
| 284 | + | { |
| 285 | + | "index_2" : |
| 286 | + | { |
| 287 | + | "Network Traffic Content (webserver)": "34.8824296805", |
| 288 | + | "Domain Name: Active DNS": "86.136300963", |
| 289 | + | "Network Traffic Content": "49.0061410207", |
| 290 | + | "Filebeat values are waiting...": "8.7020491282", |
| 291 | + | "Network Traffic Content (DHCP)": "2.2477302618", |
| 292 | + | "User Account: User Account Authentication": "4.8545988715", |
| 293 | + | "User Account: User Account Creation": "4.8545988715", |
| 294 | + | "User Account: User Account Deletion": "4.8545988715", |
| 295 | + | "User Account: User Account Modification": "1.709197743", |
| 296 | + | "User Account: User Account Metadata": "0.8545988715", |
| 297 | + | "Process: OS API Execution": "0.3070510373", |
| 298 | + | "Process: Process Access": "0.3070510373", |
| 299 | + | "Process: Process Creation": "0.3070510373", |
| 300 | + | "Process: Process Metadata": "0.3070510373", |
| 301 | + | "Process: Process Modification": "0.3070510373", |
| 302 | + | "Process: Process Termination": "0.3070510373" |
| 303 | + | } |
| 304 | + | }, |
| 305 | + | |
| 306 | + | { |
| 307 | + | "index_3" : |
| 308 | + | { |
| 309 | + | "Network Traffic Content (webserver)": "64.8824296805", |
| 310 | + | "Domain Name: Active DNS": "46.136300963", |
| 311 | + | "Network Traffic Content": "19.0061410207", |
| 312 | + | "Filebeat values are waiting...": "12.7020491282", |
| 313 | + | "Network Traffic Content (DHCP)": "2.2477302618", |
| 314 | + | "User Account: User Account Authentication": "0.8545988715", |
| 315 | + | "User Account: User Account Creation": "0.8545988715", |
| 316 | + | "User Account: User Account Deletion": "0.8545988715", |
| 317 | + | "User Account: User Account Modification": "1.709197743", |
| 318 | + | "User Account: User Account Metadata": "0.8545988715", |
| 319 | + | "Process: OS API Execution": "20.3070510373", |
| 320 | + | "Process: Process Access": "20.3070510373", |
| 321 | + | "Process: Process Creation": "20.3070510373", |
| 322 | + | "Process: Process Metadata": "20.3070510373", |
| 323 | + | "Process: Process Modification": "20.3070510373", |
| 324 | + | "Process: Process Termination": "20.3070510373" |
| 325 | + | } |
| 326 | + | } |
| 327 | + | ] |
| 328 | + | idxs_types = [ { |
| 329 | + | "index_1" : |
| 330 | + | { "filebeat": "76.7458580307", |
| 331 | + | "evtx" : "12.5517246889", |
| 332 | + | "dns" : "9.4877857839", |
| 333 | + | "webserver" : "1.2146314965" |
| 334 | + | } |
| 335 | + | }, |
| 336 | + | |
| 337 | + | { |
| 338 | + | "index_2" : |
| 339 | + | { "filebeat": "66.7458580307", |
| 340 | + | "evtx" : "17.5517246889", |
| 341 | + | "dns" : "14.4877857839", |
| 342 | + | "webserver" : "1.2146314965" |
| 343 | + | } |
| 344 | + | }, |
| 345 | + | |
| 346 | + | { |
| 347 | + | "index_3" : |
| 348 | + | { "filebeat": "56.7458580307", |
| 349 | + | "evtx" : "22.5517246889", |
| 350 | + | "dns" : "19.4877857839", |
| 351 | + | "webserver" : "1.2146314965" |
| 352 | + | } |
| 353 | + | } |
| 354 | + | ] |
| 355 | + | |
| 356 | + | mitre = json.load(open('AI_Engine/production_src/dummy_techniques.json',)) |
| 357 | + | |
| 358 | + | dummy_dict = { |
| 359 | + | "MITRE" : mitre, |
| 360 | + | "log-sources" : idxs_sources , |
| 361 | + | "log-types" : idxs_types |
| 362 | + | } |
| 363 | + | |
| 364 | + | return(json.dumps(dummy_dict, indent=4)) |
| 365 | + | |
| 366 | + | |
| 367 | + | @app.get("/dummy__") |
| 368 | + | async def dummy_endpoint__(): |
| 369 | + | |
| 370 | + | idxs_sources = [ { |
| 371 | + | "index_1" : |
| 372 | + | { |
| 373 | + | "Network Traffic Content (webserver)": "64.8824296805", |
| 374 | + | "Domain Name: Active DNS": "86.136300963", |
| 375 | + | "Network Traffic Content": "19.0061410207", |
| 376 | + | "Filebeat values are waiting...": "12.7020491282", |
| 377 | + | "Network Traffic Content (DHCP)": "2.2477302618", |
| 378 | + | "User Account: User Account Authentication": "0.8545988715", |
| 379 | + | "User Account: User Account Creation": "0.8545988715", |
| 380 | + | "User Account: User Account Deletion": "0.8545988715", |
| 381 | + | "User Account: User Account Modification": "1.709197743", |
| 382 | + | "User Account: User Account Metadata": "0.8545988715", |
| 383 | + | "Process: OS API Execution": "0.3070510373", |
| 384 | + | "Process: Process Access": "0.3070510373", |
| 385 | + | "Process: Process Creation": "0.3070510373", |
| 386 | + | "Process: Process Metadata": "0.3070510373", |
| 387 | + | "Process: Process Modification": "0.3070510373", |
| 388 | + | "Process: Process Termination": "0.3070510373" |
| 389 | + | } |
| 390 | + | }, |
| 391 | + | |
| 392 | + | { |
| 393 | + | "index_2" : |
| 394 | + | { |
| 395 | + | "Network Traffic Content (webserver)": "34.8824296805", |
| 396 | + | "Domain Name: Active DNS": "86.136300963", |
| 397 | + | "Network Traffic Content": "49.0061410207", |
| 398 | + | "Filebeat values are waiting...": "8.7020491282", |
| 399 | + | "Network Traffic Content (DHCP)": "2.2477302618", |
| 400 | + | "User Account: User Account Authentication": "4.8545988715", |
| 401 | + | "User Account: User Account Creation": "4.8545988715", |
| 402 | + | "User Account: User Account Deletion": "4.8545988715", |
| 403 | + | "User Account: User Account Modification": "1.709197743", |
| 404 | + | "User Account: User Account Metadata": "0.8545988715", |
| 405 | + | "Process: OS API Execution": "0.3070510373", |
| 406 | + | "Process: Process Access": "0.3070510373", |
| 407 | + | "Process: Process Creation": "0.3070510373", |
| 408 | + | "Process: Process Metadata": "0.3070510373", |
| 409 | + | "Process: Process Modification": "0.3070510373", |
| 410 | + | "Process: Process Termination": "0.3070510373" |
| 411 | + | } |
| 412 | + | }, |
| 413 | + | |
| 414 | + | { |
| 415 | + | "index_3" : |
| 416 | + | { |
| 417 | + | "Network Traffic Content (webserver)": "64.8824296805", |
| 418 | + | "Domain Name: Active DNS": "46.136300963", |
| 419 | + | "Network Traffic Content": "19.0061410207", |
| 420 | + | "Filebeat values are waiting...": "12.7020491282", |
| 421 | + | "Network Traffic Content (DHCP)": "2.2477302618", |
| 422 | + | "User Account: User Account Authentication": "0.8545988715", |
| 423 | + | "User Account: User Account Creation": "0.8545988715", |
| 424 | + | "User Account: User Account Deletion": "0.8545988715", |
| 425 | + | "User Account: User Account Modification": "1.709197743", |
| 426 | + | "User Account: User Account Metadata": "0.8545988715", |
| 427 | + | "Process: OS API Execution": "20.3070510373", |
| 428 | + | "Process: Process Access": "20.3070510373", |
| 429 | + | "Process: Process Creation": "20.3070510373", |
| 430 | + | "Process: Process Metadata": "20.3070510373", |
| 431 | + | "Process: Process Modification": "20.3070510373", |
| 432 | + | "Process: Process Termination": "20.3070510373" |
| 433 | + | } |
| 434 | + | } |
| 435 | + | ] |
| 436 | + | idxs_types = [ { |
| 437 | + | "index_1" : |
| 438 | + | { "filebeat": "76.7458580307", |
| 439 | + | "evtx" : "12.5517246889", |
| 440 | + | "dns" : "9.4877857839", |
| 441 | + | "webserver" : "1.2146314965" |
| 442 | + | } |
| 443 | + | }, |
| 444 | + | |
| 445 | + | { |
| 446 | + | "index_2" : |
| 447 | + | { "filebeat": "66.7458580307", |
| 448 | + | "evtx" : "17.5517246889", |
| 449 | + | "dns" : "14.4877857839", |
| 450 | + | "webserver" : "1.2146314965" |
| 451 | + | } |
| 452 | + | }, |
| 453 | + | |
| 454 | + | { |
| 455 | + | "index_3" : |
| 456 | + | { "filebeat": "56.7458580307", |
| 457 | + | "evtx" : "22.5517246889", |
| 458 | + | "dns" : "19.4877857839", |
| 459 | + | "webserver" : "1.2146314965" |
| 460 | + | } |
| 461 | + | } |
| 462 | + | ] |
| 463 | + | entities = [ { |
| 464 | + | "index_1" : |
| 465 | + | { |
| 466 | + | "firewall": ["FILENAME", "PACKETS-SENT", "URL", "SORUCE", "APPLICATION", "PACKETS-RECEIVED", "BYTES-SENT", "BYTES-RECEIVED"], |
| 467 | + | "dns" : ["IP", "PROTOCOL", "CODE", "BYTES-SENT", "URL", "USER_AGENT"], |
| 468 | + | "webserver" : ["IP", "URL", "HTTP-CODE", "HTTP-METHOD"], |
| 469 | + | "evtx" : [], |
| 470 | + | "filebeat" : [] |
| 471 | + | } |
| 472 | + | }, |
| 473 | + | { |
| 474 | + | "index_2" : |
| 475 | + | { |
| 476 | + | "firewall": ["FILENAME", "URL", "SORUCE", "APPLICATION", "BYTES-SENT", "BYTES-RECEIVED"], |
| 477 | + | "dns" : ["IP", "PROTOCOL", "CODE", "URL"], |
| 478 | + | "webserver" : ["IP", "URL", "HTTP-CODE", "HTTP-METHOD"], |
| 479 | + | "evtx" : [], |
| 480 | + | "filebeat" : [] |
| 481 | + | } |
| 482 | + | }, |
| 483 | + | { |
| 484 | + | "index_3" : |
| 485 | + | { |
| 486 | + | "firewall": ["FILENAME", "PACKETS-SENT", "URL", "SORUCE", "APPLICATION", "PACKETS-RECEIVED"], |
| 487 | + | "dns" : ["IP", "PROTOCOL", "CODE", "BYTES-SENT", "URL"], |
| 488 | + | "webserver" : ["IP", "URL", "HTTP-METHOD"], |
| 489 | + | "evtx" : [], |
| 490 | + | "filebeat" : [] |
| 491 | + | } |
| 492 | + | } |
| 493 | + | ] |
| 494 | + | |
| 495 | + | |
| 496 | + | mitre = json.load(open('AI_Engine/production_src/dummy_techniques.json',)) |
| 497 | + | |
| 498 | + | dummy_out_full = { |
| 499 | + | "MITRE" : mitre, |
| 500 | + | "log-sources" : idxs_sources , |
| 501 | + | "log-types" : idxs_types, |
| 502 | + | "log-entities" : entities |
| 503 | + | } |
| 504 | + | |
| 505 | + | dummy_out_complete = [{'techniqueID' : technique, 'color': color} for technique, color in mitre.items() if color == "#8cdd69"] |
| 506 | + | dummy_out_partial = [{'techniqueID' : technique, 'color': color} for technique, color in mitre.items() if color == "#ffd966"] |
| 507 | + | dummy_out_missing = [{'techniqueID' : technique, 'color': color} for technique, color in mitre.items() if color == "#ed4f4f"] |
| 508 | + | |
| 509 | + | saving_path = "data/api_out/" |
| 510 | + | files_paths = list(map(lambda file: saving_path+file, ['dummy_out_full.txt', 'dummy_out_complete.txt', 'dummy_out_partial.txt', 'dummy_out_missing.txt'])) |
| 511 | + | |
| 512 | + | outputs = [dummy_out_full, dummy_out_complete, dummy_out_partial, dummy_out_missing] |
| 513 | + | |
| 514 | + | for path, output in zip(files_paths, outputs): |
| 515 | + | with open(path, 'w') as f: |
| 516 | + | json.dump(output, f, ensure_ascii=False, indent=4) |
| 517 | + | |
| 518 | + | return dummy_out_full |
| 519 | + | |
| 520 | + | |
| 521 | + | |
| 522 | + | @app.get("/loadConfig") |
| 523 | + | async def loadConfig(): |
| 524 | + | |
| 525 | + | with open("data/api_out/MatrixConfigurationFinal.json", "rb") as _fh: |
| 526 | + | return StreamingResponse(io.BytesIO(_fh.read()), media_type="application/json; charset=utf8") |
| 527 | + | |
| 528 | + | @app.post("/uploadConfig") |
| 529 | + | async def uploadConfig(data: Request): |
| 530 | + | |
| 531 | + | conf = await data.json() |
| 532 | + | try: |
| 533 | + | with open('data/api_out/MatrixConfigurationFinal.json', 'w', encoding='utf-8') as outfile: |
| 534 | + | json.dump(conf, outfile, ensure_ascii=True, indent=4) |
| 535 | + | return "OK" |
| 536 | + | except IOError as e: |
| 537 | + | return e |