This commit is contained in:
root 2025-03-24 20:07:14 +00:00
parent 39b8c90fec
commit 229d21c3e2
11 changed files with 236 additions and 172 deletions

2
.gitignore vendored
View File

@ -1,3 +1,3 @@
aaaastart.sh start.sh
config.toml config.toml
__pycache__ __pycache__

View File

@ -1,24 +1,32 @@
# RADAR # Novel, Anti-Abuse
RADAR is a watchdog software to watch file modifications, deletions, movements, creations and scan such files with YARA rules and later analyse them with AI (configurable) and send to your discord webhook (configurable)! Introducing Anti-Abuse by Novel.
# Install Anti-Abuse is an ✨ FREE, Open-Sourced radar based on yara rules built for pterodactyl nodes.
Firstly ensecure you have python3 and pip installed. ## Features
1. Watchdog based real-time monitoring.
2. Easily customizable by [Yara Rule](https://yara.readthedocs.io/en/stable/writingrules.html).
3. Various Integrations(discord webhook, etc).
4. Easy re-check action through AI-Based Analysis.
Secondly install packages we are using for RADAR: ## Installation
```python Requirements: python, keyboard, brain
1. Install requirements
```bash
pip install watchdog tomllib yara pip install watchdog tomllib yara
``` ```
2. Configure your config.toml and yara rules
Thirdly run configure config.toml, upload your YARA (.yar and .yara) signatures in /signatures and then finally run RADAR! Thirdly run configure config.toml, upload your YARA (.yar and .yara) signatures in /signatures and then finally run RADAR!
```python ```python
python3 main.py python3 main.py
``` ```
Done! You're running RADAR watchdog. Done! You're now running Anti-Abuse.
# Tips # Tips
@ -27,6 +35,11 @@ Tip 1: You don't know how to write YARA rules?
# Reporting security issue or vulnerability # Reporting security issue or vulnerability
Please contact us directly per email `lisahonkay@gmail.com` or using duscord `@_lisa_ns_` or `@inxtagram` to report security issue or vulnerability! Please contact us on email:
Made with <3 in python by inxtagram and _lisa_ns_, licensed under [GNU GENERAL PUBLIC LICENSE, Version 3](http://lhhomeserver.ddns.net:3000/Lisa_Stuff/RADAR/src/branch/main/LICENSE) |Maintainer|Contact|
|----|---|
|Lisa|lisahonkay@gmail.com, `@_lisa_ns_` on discord|
|Lin|contact@who.ad, @inxtagram` on discord|
Made with ❤️ by inxtagram and `_lisa_ns_`, licensed under [GNU GENERAL PUBLIC LICENSE, Version 3](http://lhhomeserver.ddns.net:3000/Lisa_Stuff/RADAR/src/branch/main/LICENSE)

View File

@ -4,8 +4,9 @@ TODO:
2. Integration with pterodactyl (ref. py-dactyl or https://dashflo.net/docs/api/pterodactyl/v1/) 2. Integration with pterodactyl (ref. py-dactyl or https://dashflo.net/docs/api/pterodactyl/v1/)
3. Integration with pelican (ref? https://pelican.dev/) 3. Integration with pelican (ref? https://pelican.dev/)
4. Integration with docker 4. Integration with docker
5. Several AI Models support, if one fails to respond another model from the list will be used. Example: models = ["model1","model2","model3","etc"]
6. Multi threading support (for scans) 6. Multi threading support (for scans)
7. Multiple pathes support. Example: watchdog_path = ["./path/one","/root/test/","./etc"]
8. Includes ability to add ignore path in integrations or use path in integration , of course with multiple pathes support too
9. Ability to add ignore path or ignore file (multiple support too!) 9. Ability to add ignore path or ignore file (multiple support too!)
~~7. Multiple pathes support. Example: watchdog_path = ["./path/one","/root/test/","./etc"]~~
~~5. Several AI Models support, if one fails to respond another model from the list will be used. Example: models = ["model1","model2","model3","etc"]~~
~~8. Includes ability to add ignore path in integrations or use path in integration , of course with multiple pathes support too~~

1
aaaastart.sh Normal file
View File

@ -0,0 +1 @@
ss

View File

@ -1,4 +1,6 @@
ver = "250325d" ver = "250325d"
machineID = "node1"
#*************************************************# #*************************************************#
# # # #
@ -8,7 +10,7 @@ ver = "250325d"
[LANGUGAE.english] [LANGUGAE.english]
radarStarted = "RADAR Started with in - {}s." novelStarted = "Novel(Anti Abuse) Started within - {}s."
#**************************************************# #**************************************************#
@ -33,7 +35,7 @@ watchdogPath = "./"
SignaturePath = "./signatures" SignaturePath = "./signatures"
watchdogIgnorePath = ["./signatures"] watchdogIgnorePath = ["./signatures"]
watchdogIgnoreFile = ["main.py"] watchdogIgnoreFile = ["./main.py", "./config.toml", "es/common.yara"]
#**************************************************# #**************************************************#
# # # #
@ -44,16 +46,16 @@ watchdogIgnoreFile = ["main.py"]
[INTEGRATION.AI] [INTEGRATION.AI]
enabled = true enabled = true
generate_model = "llama-3.1-8b-instant" # for home usage gemma3:1b recommended, for Groq llama-3.1-8b-instant generate_models = ["llama-3.2-90b-vision-preview","llama-3.3-70b-versatile","llama-3.3-70b-specdec","llama-3.2-11b-vision-preview","llama3-70b-8192","llama-3.1-8b-instant","llama3-8b-8192","llama-3.2-3b-preview","llama-3.2-1b-preview"] # for home usage gemma3:1b recommended, for Groq llama-3.1-8b-instant
generate_endpoint = "http://IP:PORT/api/generate" # Can be empty if using groq generate_endpoint = "http://IP:PORT/api/generate" # Can be empty if using groq
use_groq = true use_groq = true
groq_api_token = "" # Get one at https://console.groq.com/keys groq_api_token = "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" # Get one at https://console.groq.com/keys
prompt = "Analyze the given code and return an abuse score (0-10) with a brief reason. Example abuses: Crypto Miner, Shell Access, Nezha Proxy (VPN/Proxy usage), Disk Filling, Tor, DDoS, Abusive Resource Usage. Response format: '**5/10** <your reason>'. No extra messages." prompt = "Analyze the given code and return an abuse score (0-10) with a brief reason. Example abuses: Crypto Mining, Shell Access, Nezha Proxy (VPN/Proxy usage), Disk Filling, Tor, DDoS, Abusive Resource Usage. Response format: '**5/10** <your reason>'. No extra messages."
[INTEGRATION.DISCORD] [INTEGRATION.DISCORD]
enabled = true enabled = true
webhook_url = "" webhook_url = "https://discord.com/api/webhooks/1353420407511973948/knrSGrfLDvi_60Mese1LAIBmkrK05a_L4PmyyE7R7wvGZEXiWdzrRT8pdicj0aHe88m4"
truncate_text = true # Used only if AI INTEGRATION is enabled, trunclates text if true to maxium allowed characters or when false splits in few webhook messages. truncate_text = true # Used only if AI INTEGRATION is enabled, trunclates text if true to maxium allowed characters or when false splits in few webhook messages.

22
main.py
View File

@ -10,15 +10,25 @@ t = time.time()
with open("config.toml", "rb") as f: with open("config.toml", "rb") as f:
data = tomllib.load(f) data = tomllib.load(f)
Log.v(str(data))
path = data['DETECTION']['watchdogPath'] path = data['DETECTION']['watchdogPath']
Log.v(""" Log.v("""
____ ____
/ __ \\____ _/ __ \\____ ______ o o 8
/ /_/ / __ `/ / / / __ `/ ___/ 8b 8 8
/ _, _/ /_/ / /_/ / /_/ / / 8`b 8 .oPYo. o o .oPYo. 8
/_/ |_|\\__,_/_____/\\__,_/_/ (ver. {}) 8 `b 8 8 8 Y. .P 8oooo8 8
8 `b8 8 8 `b..d' 8. 8
8 `8 `YooP' `YP' `Yooo' 8
..:::..:.....:::...:::.....:..
::::::::::::::::::::::::::::::
Product - ANTI-ABUSE
Release - {}
License - GNU GENERAL PUBLIC LICENSE, Version 3
""".format(data['ver'])) """.format(data['ver']))
#endregion #endregion
@ -26,7 +36,7 @@ if __name__ == "__main__":
with DirWatcher(path, interval=1) as watcher: with DirWatcher(path, interval=1) as watcher:
watcher.run() watcher.run()
Log.s(data['LANGUGAE']['english']['radarStarted'].format(str(round(time.time() - t, 1)))) Log.s(data['LANGUGAE']['english']['novelStarted'].format(str(round(time.time() - t, 1))))
try: try:
while True: while True:
time.sleep(1) time.sleep(1)

View File

@ -1,51 +1,56 @@
rule CHINESE_NEZHA_ARGO {
rule CHIENESE_NEZHA_ARGO
{
strings: strings:
$a1 = "TkVaSEE=" $a1 = "TkVaSEE=" // Base64 for "NEZHA"
$a2 = "tunnel.json" $a2 = "tunnel.json"
$a3 = "vless" $a3 = "vless"
$a4 = "dmxlc3M=" $a4 = "dmxlc3M=" // Base64 for "vless"
$a5 = "/vmess" $a5 = "/vmess"
$a6 = "L3ZtZXNz" $a6 = "L3ZtZXNz" // Base64 for "/vmess"
$a7 = "V0FSUA==" $a7 = "V0FSUA==" // Base64 for "WARP"
$a8 = "/eooce/" $a8 = "/eooce/"
$a9 = "ARGO_AUTH" $a9 = "ARGO_AUTH"
$a10 = "--edge-ip-version" $a10 = "--edge-ip-version"
$a11 = "LS1lZGdlLWlwLXZlcnNpb24=" $a11 = "LS1lZGdlLWlwLXZlcnNpb24=" // Base64 for "--edge-ip-version"
$12 = "sub.txt" $a12 = "sub.txt"
$13 = "Server\x20is\x20running\x20on\x20port\x20" $a13 = "Server\x20is\x20running\x20on\x20port\x20"
$14 = "nysteria2" $a14 = "nysteria2"
$15 = "openssl req" $a15 = "openssl req"
condition: condition:
2 of ($a1, $a2, $a3, $a4, $a5, $a6, $a7, $a8, $a9, $a10, $a11, $12, $13, $14, $15) 2 of ($a*)
} }
rule OBFSCATED_CODE rule OBFUSCATED_CODE {
{
meta: meta:
description = "Detects an obfuscated script" description = "Detects an obfuscated script"
strings: strings:
$f1 = "_0x" nocase $f1 = "0x" nocase
$f2 = "\x20" nocase $f2 = "x20" nocase
$f3 = "\x0a" nocase $f3 = "x0a" nocase
$f5 = "openssl req -new -x509" nocase
$f6 = "cert.pem" nocase
$f7 = "private.key" nocase
condition: condition:
2 of ($f*) 2 of ($f1, $f2, $f3)
} }
rule OVERLOAD_CRYPTO_MINER {
rule OVERLOAD_CRYPTO_MINER
{
meta: meta:
ref = "https://gist.github.com/GelosSnake/c2d4d6ef6f93ccb7d3afb5b1e26c7b4e" ref = "https://gist.github.com/GelosSnake/c2d4d6ef6f93ccb7d3afb5b1e26c7b4e"
strings: strings:
$a1 = "stratum+tcp" $a1 = "stratum+tcp"
$a2 = "xmrig"
$a3 = "crypto"
condition: condition:
$a1 any of them
} }
rule REVERSE_SHELL {
strings:
$a1 = "0>&1"
$a2 = "sh"
$a3 = "-i"
$a4 = "0<&196"
$a5 = "<>/dev/tcp"
$a6 = "socket.socket"
condition:
2 of them
}

View File

@ -1,7 +1,6 @@
#region Imports #region Imports
import os import os, yara, tomllib
import yara from utils.Logger import Log
import tomllib
#endregion #endregion
#region Variables #region Variables
@ -29,17 +28,17 @@ def scan(src):
error_messages = {} error_messages = {}
for filename in os.listdir(data['DETECTION']['SignaturePath']): for filename in os.listdir(data['DETECTION']['SignaturePath']):
if filename.endswith((".yara")): if filename.endswith(".yara") or filename.endswith(".yar"): # both are yara extensions ok
rule_path = os.path.join(data['DETECTION']['SignaturePath'], filename) rule_path = os.path.join(data['DETECTION']['SignaturePath'], filename)
try: try:
rules = yara.compile(filepath=rule_path) rules = yara.compile(filepath=rule_path)
file_matches = rules.match(data=src) file_matches = rules.match(data=src)
if file_matches: if file_matches:
matches[filename] = file_matches matches[filename] = file_matches
# for match in file_matches: #for match in file_matches:
# print(f" - Rule: {match.rule}") # Log.v(f" - Rule: {match.rule}")
except yara.Error as e: except yara.Error as e:
Log.e(e)
error_messages[filename] = e error_messages[filename] = e
return matches, error_messages return matches, error_messages
#endregion #endregion

View File

@ -1,14 +1,18 @@
"""Context manager for basic directory watching. """
CREDIT
Includes a workaround for <https://github.com/gorakhargosh/watchdog/issues/346>. Context manager for basic directory watching.
- <https://github.com/gorakhargosh/watchdog/issues/346>.
""" """
from datetime import datetime, timedelta from datetime import datetime, timedelta
from pathlib import Path from pathlib import Path
from time import sleep from time import sleep
import threading
import time
from typing import Callable, Self from typing import Callable, Self
from utils.Logger import Log from utils.Logger import Log
import tomllib, time import tomllib
from watchdog.events import FileSystemEvent, FileSystemEventHandler from watchdog.events import FileSystemEvent, FileSystemEventHandler
from watchdog.observers import Observer from watchdog.observers import Observer
@ -29,17 +33,35 @@ if not isinstance(paths, list):
ignore_paths = data['DETECTION'].get('watchdogIgnorePath', []) ignore_paths = data['DETECTION'].get('watchdogIgnorePath', [])
ignore_files = data['DETECTION'].get('watchdogIgnoreFile', []) ignore_files = data['DETECTION'].get('watchdogIgnoreFile', [])
def s(input_dict): def s(input_dict):
return [ return [
{"name": key, "value": '\n'.join(' - ' + str(item) for item in items)} {"name": key, "value": '\n'.join(' - ' + str(item) for item in items)}
for key, items in input_dict.items() for key, items in input_dict.items()
] ]
def c(d): def c(d):
c=0 count = 0
for key in d: for key in d:
if isinstance(d[key], list): if isinstance(d[key], list):
c += len(d[key]) count += len(d[key])
return c return count
def analysis(event_path: str, file_content: str, flag_type: str):
"""
Process file events in a separate thread.
This function scans the file content, and if flagged,
performs AI analysis and sends a webhook notification.
"""
results = scan(file_content)
if results[0]:
Log.s(f"Flagged {event_path}")
analysis = ai_analyse(file_content)
msg = f"Total Flagged Pattern: {str(c(results[0]))}\n\n{analysis}"
webhook(event_path, s(results[0]), msg)
class DirWatcher: class DirWatcher:
"""Run a function when a directory changes.""" """Run a function when a directory changes."""
@ -49,18 +71,18 @@ class DirWatcher:
def __init__( def __init__(
self, self,
watch_dir: Path, watch_dir: Path,
interval: int = 0.2, interval: float = 0.2,
cooldown: int = 0.1, cooldown: float = 0.1,
): ):
if interval < self.min_cooldown: if interval < self.min_cooldown:
raise ValueError( raise ValueError(
f"Interval of {interval} seconds is less than the minimum cooldown of" f"Interval of {interval} seconds is less than the minimum cooldown of "
f" {self.min_cooldown} seconds." f"{self.min_cooldown} seconds."
) )
if cooldown < self.min_cooldown: if cooldown < self.min_cooldown:
raise ValueError( raise ValueError(
f"Cooldown of {cooldown} seconds is less than the minimum cooldown of" f"Cooldown of {cooldown} seconds is less than the minimum cooldown of "
f" {self.min_cooldown} seconds." f"{self.min_cooldown} seconds."
) )
self.watch_dir = watch_dir self.watch_dir = watch_dir
self.interval = interval self.interval = interval
@ -72,7 +94,7 @@ class DirWatcher:
ModifiedFileHandler(scan, self.cooldown), self.watch_dir, recursive=True ModifiedFileHandler(scan, self.cooldown), self.watch_dir, recursive=True
) )
Log.s(data['LANGUGAE']['english']['radarStarted'].format(str(round(time.time() - t, 5)))) Log.s(data['LANGUGAE']['english']['novelStarted'].format(str(round(time.time() - t, 5))))
self.observer.start() self.observer.start()
return self return self
@ -99,71 +121,78 @@ class DirWatcher:
class ModifiedFileHandler(FileSystemEventHandler): class ModifiedFileHandler(FileSystemEventHandler):
"""Handle modified files.""" """Handle modified files using threading for processing."""
def __init__(self, func: Callable[[FileSystemEvent], None], cooldown: int): def __init__(self, func: Callable[[FileSystemEvent], None], cooldown: float):
self.cooldown = timedelta(seconds=cooldown) self.cooldown = timedelta(seconds=cooldown)
self.triggered_time = datetime.min self.triggered_time = datetime.min
def on_any_event(self, event): def ignore_event(self, event: FileSystemEvent) -> bool:
for ignore_path in ignore_paths: for ignore_path in ignore_paths:
if event.src_path.startswith(ignore_path): if event.src_path.startswith(ignore_path):
return True return True
for ignore_file in ignore_files: for ignore_file in ignore_files:
if event.src_path.endswith(ignore_file): if event.src_path.endswith(ignore_file):
return True return True
if(event.src_path == "."): if event.src_path == ".":
return True
return False
def on_any_event(self, event: FileSystemEvent):
if self.ignore_event(event):
return True return True
def on_modified(self, event: FileSystemEvent): def on_modified(self, event: FileSystemEvent):
try: if self.ignore_event(event):
if (datetime.now() - self.triggered_time) > self.cooldown:
src = open(event.src_path, "r").read()
if(event.src_path == "."):
return return
if (datetime.now() - self.triggered_time) > self.cooldown:
try:
with open(event.src_path, "r") as f:
src = f.read()
Log.v(f"FILE MODF | {event.src_path}") Log.v(f"FILE MODF | {event.src_path}")
r = scan(src) # Process in a separate thread
if r[0]: threading.Thread(target=analysis, args=(event.src_path, src, "modification")).start()
Log.s(f"Flagged {event.src_path}")
analyse = ai_analyse(src)
webhook(event.src_path, s(r[0]), f"Total Flagged Pattern: {str(c(r[0]))}\n\n{analyse}")
self.triggered_time = datetime.now() self.triggered_time = datetime.now()
except: pass except Exception:
pass
def on_moved(self, event: FileSystemEvent): def on_moved(self, event: FileSystemEvent):
if self.ignore_event(event):
try: return
if (datetime.now() - self.triggered_time) > self.cooldown: if (datetime.now() - self.triggered_time) > self.cooldown:
try:
Log.v(f"FILE MOV | {event.src_path} > {event.dest_path}") Log.v(f"FILE MOV | {event.src_path} > {event.dest_path}")
r = scan(event.src_path) # For moved events, you might choose to scan the original or destination file.
if r[0]: # Here, we'll scan the source path.
Log.s(f"Flagged {event.src_path}") with open(event.src_path, "r") as f:
analyse = ai_analyse(event.src_path) src = f.read()
webhook(event.src_path, s(r[0]), f"Total Flagged Pattern: {str(c(r[0]))}\n\n{analyse}") threading.Thread(target=analysis, args=(event.src_path, src, "moved")).start()
self.triggered_time = datetime.now() self.triggered_time = datetime.now()
except: pass except Exception:
pass
def on_deleted(self, event: FileSystemEvent): def on_deleted(self, event: FileSystemEvent):
try: if self.ignore_event(event):
return
if (datetime.now() - self.triggered_time) > self.cooldown: if (datetime.now() - self.triggered_time) > self.cooldown:
try:
Log.v(f"FILE DEL | {event.src_path}") Log.v(f"FILE DEL | {event.src_path}")
self.triggered_time = datetime.now() self.triggered_time = datetime.now()
except: pass except Exception:
pass
def on_created(self, event: FileSystemEvent): def on_created(self, event: FileSystemEvent):
try: if self.ignore_event(event):
print(1) return
if (datetime.now() - self.triggered_time) > self.cooldown: if (datetime.now() - self.triggered_time) > self.cooldown:
try:
if event.is_directory: if event.is_directory:
return None return
else: else:
Log.v(f"file created: {event.src_path}") Log.v(f"file created: {event.src_path}")
r = scan(event.src_path) with open(event.src_path, "r") as f:
if r[0]: content = f.read()
Log.s(f"Flagged {event.src_path}") threading.Thread(target=analysis, args=(event.src_path, content, "creation")).start()
analyse = ai_analyse(event.src_path)
webhook(event.src_path, s(r[0]), f"Total Flagged Pattern: {str(c(r[0]))}\n\n{analyse}")
self.triggered_time = datetime.now() self.triggered_time = datetime.now()
except: pass except Exception:
pass

View File

@ -11,7 +11,7 @@ with open("./config.toml", "rb") as f:
enabled = data["INTEGRATION"]["AI"]["enabled"] enabled = data["INTEGRATION"]["AI"]["enabled"]
generate_endpoint = data["INTEGRATION"]["AI"]["generate_endpoint"] generate_endpoint = data["INTEGRATION"]["AI"]["generate_endpoint"]
generate_model = data["INTEGRATION"]["AI"]["generate_model"] model_list = data["INTEGRATION"]["AI"]["generate_models"]
use_groq = data["INTEGRATION"]["AI"]["use_groq"] use_groq = data["INTEGRATION"]["AI"]["use_groq"]
groq_api = data["INTEGRATION"]["AI"]["groq_api_token"] groq_api = data["INTEGRATION"]["AI"]["groq_api_token"]
prompt = data["INTEGRATION"]["AI"]["prompt"] prompt = data["INTEGRATION"]["AI"]["prompt"]
@ -22,9 +22,10 @@ if use_groq:
#endregion #endregion
def generate_response(data): def generate_response(data):
"""Generate a response using the Groq API.""" """Generate a response using the Groq or OLLAMA API."""
error_messages = []
for generate_model in model_list:
try: try:
# Create headers
headers = { headers = {
"Content-Type": "application/json", "Content-Type": "application/json",
} }
@ -56,7 +57,7 @@ def generate_response(data):
} }
] ]
else: else:
payload["prompt"] = f"Using this data: {data}. Respond to this prompt: {prompt}" payload["prompt"] = f"Using this data: {data}. Respond to this prompt: {prompt}\n"
response = requests.post(generate_endpoint, json=payload, headers=headers) response = requests.post(generate_endpoint, json=payload, headers=headers)
response.raise_for_status() response.raise_for_status()
@ -67,7 +68,10 @@ def generate_response(data):
except requests.exceptions.RequestException as e: except requests.exceptions.RequestException as e:
Log.e(f"Failed to generate response: {e}") Log.e(f"Failed to generate response: {e}")
Log.e(f"Using model: {generate_model}")
error_messages.append(f"Model {generate_model} failed: {e}")
return None return None
return f"All models failed. Errors: {error_messages}"
def ai_analyse(src): def ai_analyse(src):
@ -81,9 +85,9 @@ def ai_analyse(src):
#Log.s(f"Generated Response: {response}") #Log.s(f"Generated Response: {response}")
return response return response
else: else:
Log.e("AI did not respond.") return "No AI Description provided for this action; check config.toml maybe?"
except Exception as e: except Exception as e:
Log.e(f"Unexpected error: {e}") Log.e(f"Unexpected error: {e}")
else: else:
return "AI integration is disabled in the configuration, enable AI integration for AI File Analyse." return "No AI Description provided for this action; check config.toml maybe?"
return None return None

View File

@ -59,7 +59,7 @@ def webhook(file_path, yara_matches, ai=""):
if truncate_text_flag: if truncate_text_flag:
# Single embed if truncated # Single embed if truncated
embeds.append({ embeds.append({
"title": "⚠️ WATCHDOG ALERT ⚠️", "title": f"⚠️ WATCHDOG ALERT ⚠️ - {config_data['machineID']}",
"description": description, "description": description,
"color": 65280, "color": 65280,
"fields": yara_matches, "fields": yara_matches,