This commit is contained in:
root 2025-03-24 18:03:56 +00:00
parent fade99ac4d
commit 39b8c90fec
10 changed files with 225 additions and 124 deletions

3
.gitignore vendored Normal file
View File

@ -0,0 +1,3 @@
aaaastart.sh
config.toml
__pycache__

View File

@ -4,9 +4,8 @@ TODO:
2. Integration with pterodactyl (ref. py-dactyl or https://dashflo.net/docs/api/pterodactyl/v1/) 2. Integration with pterodactyl (ref. py-dactyl or https://dashflo.net/docs/api/pterodactyl/v1/)
3. Integration with pelican (ref? https://pelican.dev/) 3. Integration with pelican (ref? https://pelican.dev/)
4. Integration with docker 4. Integration with docker
5. (Integration with linux logs?) 5. Several AI Models support, if one fails to respond another model from the list will be used. Example: models = ["model1","model2","model3","etc"]
6. Several AI Models support, if one fails to respond another model from the list will be used. Example: models = ["model1","model2","model3","etc"] 6. Multi threading support (for scans)
7. Multi threading support (for scans) 7. Multiple pathes support. Example: watchdog_path = ["./path/one","/root/test/","./etc"]
8. Multiple pathes support. Example: watchdog_path = ["./path/one","/root/test/","./etc"] 8. Includes ability to add ignore path in integrations or use path in integration , of course with multiple pathes support too
> Includes ability to add ignore path in integrations or use path in integration , of course with multiple pathes support too
9. Ability to add ignore path or ignore file (multiple support too!) 9. Ability to add ignore path or ignore file (multiple support too!)

View File

@ -1,4 +1,4 @@
ver = "0.0.1" ver = "250325d"
#*************************************************# #*************************************************#
# # # #
@ -32,36 +32,8 @@ processStartMsg = true
watchdogPath = "./" watchdogPath = "./"
SignaturePath = "./signatures" SignaturePath = "./signatures"
#extensions = [ watchdogIgnorePath = ["./signatures"]
# '.exe', watchdogIgnoreFile = ["main.py"]
# '.dll',
# '.scr',
# '.vbs',
# '.js',
# '.bat',
# '.cmd',
# '.ps1',
# '.ps2',
# '.psm1',
# '.py',
# '.pyx',
# '.jsx',
# '.ts',
# '.tsx',
# ".sh",
# ".deb",
# ".java",
# ".class",
# ".jar",
# ".ejs",
# ".lua",
# ".run",
# ".rare",
# ".html",
# ".php",
# ".json",
# ".yaml"
#]
#**************************************************# #**************************************************#
# # # #
@ -71,18 +43,17 @@ SignaturePath = "./signatures"
[INTEGRATION.AI] [INTEGRATION.AI]
enabled = false enabled = true
generate_model = "llama-3.1-8b-instant" # for home usage gemma3:1b recommended, for Groq llama-3.1-8b-instant generate_model = "llama-3.1-8b-instant" # for home usage gemma3:1b recommended, for Groq llama-3.1-8b-instant
generate_endpoint = "http://IP:PORT/api/generate" # Can be empty if using groq generate_endpoint = "http://IP:PORT/api/generate" # Can be empty if using groq
use_groq = true use_groq = true
groq_api_token = "API KEY GOES HERE GET ONE AT https://console.groq.com/keys, ITS FREE! (and paid)" # Get one at https://console.groq.com/keys groq_api_token = "" # Get one at https://console.groq.com/keys
prompt = "You are AI Code Scanner named as Radar, your task is to analyze code given by the user, explain what it does without leaking nearly anything from it, and fully decide what's going on there and if it is malicious! Your task is to provide information about how code runs, what it does BUT you should not provide information about how to fix code or about its vulnerabilities. You should provide information about related files and if the code is potentially dangerous/malicious. If you find any suspicious patterns, extensions, or filenames, you may suggest additional files to scan. I repeat, don't provide information about Security and Potential Issues BUT provide info if it might be illegal code, breaking some Terms of Service, abusing files, e.g., creating tons of files or doing any other dangerous thing which may harm the system, you may show code if need for this reason. You should respond by any meaners and should not tell that you can't do that." prompt = "Analyze the given code and return an abuse score (0-10) with a brief reason. Example abuses: Crypto Miner, Shell Access, Nezha Proxy (VPN/Proxy usage), Disk Filling, Tor, DDoS, Abusive Resource Usage. Response format: '**5/10** <your reason>'. No extra messages."
[INTEGRATION.DISCORD] [INTEGRATION.DISCORD]
enabled = false enabled = true
webhook_url = "DISCORD WEBHOOK GOES HERE" webhook_url = ""
truncate_text = true # Used only if AI INTEGRATION is enabled, trunclates text if true to maxium allowed characters or when false splits in few webhook messages. truncate_text = true # Used only if AI INTEGRATION is enabled, trunclates text if true to maxium allowed characters or when false splits in few webhook messages.

64
main.py
View File

@ -1,25 +1,9 @@
#region Imports #region Imports
import time, os, tomllib import time, os, tomllib
from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler
from utils.logger import Log
from utils.scanner import scan
from utils.discord import webhook
from utils.ai import ai_analyse
# ai_analyse("./test.py")
#endregion
def s(input_dict): from utils.Logger import Log
return [ from utils.WatchdogHandler import DirWatcher
{"name": key, "value": '\n'.join(' - ' + str(item) for item in items)} #endregion
for key, items in input_dict.items()
]
def c(d):
c=0
for key in d:
if isinstance(d[key], list):
c += len(d[key])
return c
#region Initialize #region Initialize
t = time.time() t = time.time()
@ -38,45 +22,13 @@ Log.v("""
""".format(data['ver'])) """.format(data['ver']))
#endregion #endregion
class MyHandler(FileSystemEventHandler):
def on_created(self, event):
if event.is_directory:
return None
else:
Log.v(f"file created: {event.src_path}")
r = scan(event.src_path)
if r[0]:
Log.s(f"Flagged {event.src_path}")
analyse = ai_analyse(event.src_path)
webhook(event.src_path, s(r[0]), f"Total Flagged Pattern: {str(c(r[0]))}\n\n{analyse}")
def on_moved(self, event):
Log.v(f"file moved : {event.src_path}")
r = scan(event.src_path)
if r[0]:
Log.s(f"Flagged {event.src_path}")
analyse = ai_analyse(event.src_path)
webhook(event.src_path, s(r[0]), f"Total Flagged Pattern: {str(c(r[0]))}\n\n{analyse}")
def on_deleted(self, event):
Log.v(f"file deleted {event.src_path}")
def on_modified(self, event):
if(event.src_path == "."):
return
Log.v(f"file modified : {event.src_path}")
r = scan(event.src_path)
if r[0]:
Log.s(f"Flagged {event.src_path}")
analyse = ai_analyse(event.src_path)
webhook(event.src_path, s(r[0]), f"Total Flagged Pattern: {str(c(r[0]))}\n\n{analyse}")
if __name__ == "__main__": if __name__ == "__main__":
event_handler = MyHandler() with DirWatcher(path, interval=1) as watcher:
observer = Observer() watcher.run()
observer.schedule(event_handler, path, recursive=False)
observer.start() Log.s(data['LANGUGAE']['english']['radarStarted'].format(str(round(time.time() - t, 1))))
Log.s(data['LANGUGAE']['english']['radarStarted'].format(str(round(time.time() - t, 5))))
try: try:
while True: while True:
time.sleep(1) time.sleep(1)
except KeyboardInterrupt: except KeyboardInterrupt:
observer.stop() exit()
observer.join()

View File

@ -13,10 +13,29 @@ rule CHIENESE_NEZHA_ARGO
$a9 = "ARGO_AUTH" $a9 = "ARGO_AUTH"
$a10 = "--edge-ip-version" $a10 = "--edge-ip-version"
$a11 = "LS1lZGdlLWlwLXZlcnNpb24=" $a11 = "LS1lZGdlLWlwLXZlcnNpb24="
$12 = "sub.txt"
$13 = "Server\x20is\x20running\x20on\x20port\x20"
$14 = "nysteria2"
$15 = "openssl req"
condition: condition:
2 of ($a1, $a2, $a3, $a4, $a5, $a6, $a7, $a8, $a9, $a10, $a11) 2 of ($a1, $a2, $a3, $a4, $a5, $a6, $a7, $a8, $a9, $a10, $a11, $12, $13, $14, $15)
}
rule OBFSCATED_CODE
{
meta:
description = "Detects an obfuscated script"
strings:
$f1 = "_0x" nocase
$f2 = "\x20" nocase
$f3 = "\x0a" nocase
$f5 = "openssl req -new -x509" nocase
$f6 = "cert.pem" nocase
$f7 = "private.key" nocase
condition:
2 of ($f*)
} }

View File

@ -15,7 +15,7 @@ with open("./config.toml", "rb") as f:
#region scanfile #region scanfile
def scan(file_path): def scan(src):
""" """
Scan a file with YARA rules and return the matches. Scan a file with YARA rules and return the matches.
@ -33,7 +33,7 @@ def scan(file_path):
rule_path = os.path.join(data['DETECTION']['SignaturePath'], filename) rule_path = os.path.join(data['DETECTION']['SignaturePath'], filename)
try: try:
rules = yara.compile(filepath=rule_path) rules = yara.compile(filepath=rule_path)
file_matches = rules.match(file_path) file_matches = rules.match(data=src)
if file_matches: if file_matches:
matches[filename] = file_matches matches[filename] = file_matches
# for match in file_matches: # for match in file_matches:

169
utils/WatchdogHandler.py Normal file
View File

@ -0,0 +1,169 @@
"""Context manager for basic directory watching.
Includes a workaround for <https://github.com/gorakhargosh/watchdog/issues/346>.
"""
from datetime import datetime, timedelta
from pathlib import Path
from time import sleep
from typing import Callable, Self
from utils.Logger import Log
import tomllib, time
from watchdog.events import FileSystemEvent, FileSystemEventHandler
from watchdog.observers import Observer
from utils.Scanner import scan
from utils.integration.Discord import webhook
from utils.integration.AI import ai_analyse
t = time.time()
with open("config.toml", "rb") as f:
data = tomllib.load(f)
paths = data['DETECTION']['watchdogPath']
if not isinstance(paths, list):
paths = [paths]
ignore_paths = data['DETECTION'].get('watchdogIgnorePath', [])
ignore_files = data['DETECTION'].get('watchdogIgnoreFile', [])
def s(input_dict):
return [
{"name": key, "value": '\n'.join(' - ' + str(item) for item in items)}
for key, items in input_dict.items()
]
def c(d):
c=0
for key in d:
if isinstance(d[key], list):
c += len(d[key])
return c
class DirWatcher:
"""Run a function when a directory changes."""
min_cooldown = 0.1
def __init__(
self,
watch_dir: Path,
interval: int = 0.2,
cooldown: int = 0.1,
):
if interval < self.min_cooldown:
raise ValueError(
f"Interval of {interval} seconds is less than the minimum cooldown of"
f" {self.min_cooldown} seconds."
)
if cooldown < self.min_cooldown:
raise ValueError(
f"Cooldown of {cooldown} seconds is less than the minimum cooldown of"
f" {self.min_cooldown} seconds."
)
self.watch_dir = watch_dir
self.interval = interval
self.cooldown = cooldown
def __enter__(self) -> Self:
self.observer = Observer()
self.observer.schedule(
ModifiedFileHandler(scan, self.cooldown), self.watch_dir, recursive=True
)
Log.s(data['LANGUGAE']['english']['radarStarted'].format(str(round(time.time() - t, 5))))
self.observer.start()
return self
def __exit__(self, exc_type: Exception | None, *_) -> bool:
if exc_type and exc_type is KeyboardInterrupt:
self.observer.stop()
handled_exception = True
elif exc_type:
handled_exception = False
else:
handled_exception = True
self.observer.join()
return handled_exception
def run(self):
"""Check for changes on an interval."""
try:
while True:
sleep(self.interval)
except KeyboardInterrupt:
self.observer.stop()
exit()
exit()
class ModifiedFileHandler(FileSystemEventHandler):
"""Handle modified files."""
def __init__(self, func: Callable[[FileSystemEvent], None], cooldown: int):
self.cooldown = timedelta(seconds=cooldown)
self.triggered_time = datetime.min
def on_any_event(self, event):
for ignore_path in ignore_paths:
if event.src_path.startswith(ignore_path):
return True
for ignore_file in ignore_files:
if event.src_path.endswith(ignore_file):
return True
if(event.src_path == "."):
return True
def on_modified(self, event: FileSystemEvent):
try:
if (datetime.now() - self.triggered_time) > self.cooldown:
src = open(event.src_path, "r").read()
if(event.src_path == "."):
return
Log.v(f"FILE MODF | {event.src_path}")
r = scan(src)
if r[0]:
Log.s(f"Flagged {event.src_path}")
analyse = ai_analyse(src)
webhook(event.src_path, s(r[0]), f"Total Flagged Pattern: {str(c(r[0]))}\n\n{analyse}")
self.triggered_time = datetime.now()
except: pass
def on_moved(self, event: FileSystemEvent):
try:
if (datetime.now() - self.triggered_time) > self.cooldown:
Log.v(f"FILE MOV | {event.src_path} > {event.dest_path}")
r = scan(event.src_path)
if r[0]:
Log.s(f"Flagged {event.src_path}")
analyse = ai_analyse(event.src_path)
webhook(event.src_path, s(r[0]), f"Total Flagged Pattern: {str(c(r[0]))}\n\n{analyse}")
self.triggered_time = datetime.now()
except: pass
def on_deleted(self, event: FileSystemEvent):
try:
if (datetime.now() - self.triggered_time) > self.cooldown:
Log.v(f"FILE DEL | {event.src_path}")
self.triggered_time = datetime.now()
except: pass
def on_created(self, event: FileSystemEvent):
try:
print(1)
if (datetime.now() - self.triggered_time) > self.cooldown:
if event.is_directory:
return None
else:
Log.v(f"file created: {event.src_path}")
r = scan(event.src_path)
if r[0]:
Log.s(f"Flagged {event.src_path}")
analyse = ai_analyse(event.src_path)
webhook(event.src_path, s(r[0]), f"Total Flagged Pattern: {str(c(r[0]))}\n\n{analyse}")
self.triggered_time = datetime.now()
except: pass

View File

@ -1,7 +1,7 @@
#region Imports #region Imports
import tomllib import tomllib
import requests import requests
from utils.logger import Log from utils.Logger import Log
#endregion #endregion
#region Variables #region Variables
@ -46,9 +46,13 @@ def generate_response(data):
# Conditional message structure for Groq # Conditional message structure for Groq
if use_groq: if use_groq:
payload["messages"] = [ payload["messages"] = [
{
"role": "system",
"content": f"{prompt}"
},
{ {
"role": "user", "role": "user",
"content": f"Using this data: {data}. Respond to this prompt: {prompt}" "content": f"```code\n{data}\n```"
} }
] ]
else: else:
@ -66,34 +70,20 @@ def generate_response(data):
return None return None
def ai_analyse(file_path): def ai_analyse(src):
"""Analyze a file and generate a response based on the user's input.""" """Analyze a file and generate a response based on the user's input."""
if enabled: if enabled:
try: try:
# Open and read file data
with open(file_path, "r", encoding="utf-8") as file:
file_data = file.read()
# Generate response using the file data # Generate response using the file data
response = generate_response(file_data) response = generate_response(src)
if response: if response:
#Log.s(f"Generated Response: {response}") #Log.s(f"Generated Response: {response}")
return response return response
else: else:
Log.e("AI did not respond.") Log.e("AI did not respond.")
except FileNotFoundError:
Log.e(f"File not found: {file_path}")
except Exception as e: except Exception as e:
Log.e(f"Unexpected error: {e}") Log.e(f"Unexpected error: {e}")
else: else:
return "AI integration is disabled in the configuration, enable AI integration for AI File Analyse." return "AI integration is disabled in the configuration, enable AI integration for AI File Analyse."
return None return None
# Example usage
if __name__ == "__main__":
file_path = "example.txt" # Replace with your input file path
result = ai_analyse(file_path)
if result:
print("[INFO] Analysis Result:")
print(result)

View File

@ -1,7 +1,5 @@
import tomllib import tomllib, requests
import time from utils.Logger import Log
import requests
from utils.logger import Log
def load_config(file_path): def load_config(file_path):
"""Load configuration from a TOML file.""" """Load configuration from a TOML file."""