Skip to content

Commit be3061f

Browse files
committed
File system cleanup
1 parent 97de871 commit be3061f

9 files changed

Lines changed: 70 additions & 67 deletions

File tree

Sources/Agentic_System/.gitignore

Lines changed: 0 additions & 1 deletion
This file was deleted.

Sources/Agentic_System/README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88
3. Put your OpenAI key into a `keys.cfg` in Sources/Agentic_System
99
4. Replace the smolagents site-packaged located in `.venv/lib/python3.12/site-packages` or similar with the provided fork of smolagents<br>
1010
You can simply remove the existing smolagents in site-packages and move + rename the fork as `smolagents`
11-
5. run `python3 rises-the-fog.py (--debug)`
11+
5. run `python3 start_scripts/rises-the-fog.py (--debug)`
1212

1313
### Technical flow
1414
#### The first multi agent system is implemented and starts by initializing a root manager whose goal is to actually orchestrate the creation of program templates. It starts by selecting a "code region" that it determines to be interesting; this is done by querying a RAG DB (json file) that contains over 8000 regression tests, their FuzzIL form, and execution data via trace flags. We instruct the system to select a code region by using the execution data. On top of that, the system has access to a vector RAG DB with: V8 docs, JS MDM docs, C++ docs, and various research papers that it can query to gather more information. The vectorization library we use is META’s FAISS -"Facebook AI Similarity Search". After this is done it will select a code region such as: "Keyed array element access & elements-kind transitions (KeyedStoreIC/KeyedLoadIC, ElementsTransition, GrowElements/CopyElements, and Array builtin fast paths)".

Sources/Agentic_System/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
from pathlib import Path
44

55
_module_dir = Path(__file__).parent
6-
_module_path = _module_dir / "rises-the-fog.py"
6+
_module_path = _module_dir / "start_scripts" / "rises-the-fog.py"
77

88
if str(_module_dir) not in sys.path:
99
sys.path.insert(0, str(_module_dir))
File renamed without changes.
File renamed without changes.

Sources/Agentic_System/ethiopian_boiled_egg.py renamed to Sources/Agentic_System/start_scripts/ethiopian_boiled_egg.py

Lines changed: 34 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -6,14 +6,23 @@
66
import sys
77
import subprocess
88
from pathlib import Path
9-
import logging
9+
import logging
1010
from datetime import datetime
1111
import pytz
12-
import config_loader as config_loader
12+
13+
import site
14+
15+
# Ensure the Agentic_System package root is on sys.path so sibling modules import correctly
16+
_agentic_root = Path(__file__).resolve().parents[1]
17+
if str(_agentic_root) not in sys.path:
18+
sys.path.insert(0, str(_agentic_root))
19+
20+
import config_loader as config_loader
1321
from agents.EBG_crash import EBG_Crash
1422
from agents.EBG_plateau import EBG_Plateau
1523
from smolagents import LiteLLMModel
1624
from config_loader import get_openai_api_key, get_anthropic_api_key, get_deepseek_api_key
25+
1726
logger = logging.getLogger("boiled_eggs")
1827
if not logger.handlers:
1928
logger.addHandler(logging.NullHandler())
@@ -23,12 +32,10 @@
2332

2433
BASE_MODEL_ID = "deepseek"
2534

26-
import site
27-
from pathlib import Path
28-
2935
# Prefer the project's virtualenv site-packages if present, so tools like chromadb are importable
3036
try:
31-
_root = Path(__file__).resolve().parents[2]
37+
# Repo root (unchanged despite this file moving one level deeper)
38+
_root = Path(__file__).resolve().parents[3]
3239
_venv_site = _root / ".venv" / "lib" / f"python{sys.version_info.major}.{sys.version_info.minor}" / "site-packages"
3340
if _venv_site.exists():
3441
site.addsitedir(str(_venv_site))
@@ -42,30 +49,33 @@ def __init__(self, mode: str = "Crash", fuzzer_id: str = "fuzzer-1", crash_progr
4249
self.openai_api_key = get_openai_api_key()
4350
self.anthropic_api_key = get_anthropic_api_key()
4451
self.deepseek_api_key = get_deepseek_api_key()
45-
52+
4653
if self.deepseek_api_key:
4754
os.environ["DEEPSEEK_API_KEY"] = self.deepseek_api_key
48-
55+
4956
self.model = LiteLLMModel(model_id=BASE_MODEL_ID, api_key=self.openai_api_key)
5057
print("System is running in " + mode + " mode")
5158
if mode == "Crash":
5259
self.system = EBG_Crash(self.model, api_key=self.openai_api_key, anthropic_api_key=self.anthropic_api_key, crash_program_hash=crash_program_hash)
5360
elif mode == "Plateau":
5461
self.system = EBG_Plateau(self.model, api_key=self.openai_api_key, anthropic_api_key=self.anthropic_api_key, fuzzer_id=fuzzer_id)
5562

56-
def run(force_logging: bool = True):
5763

58-
site.addsitedir(Path(__file__).parent.parent)
59-
#smolagent-fork
64+
def run(force_logging: bool = True):
65+
# Add the previous parent directory (Sources) to site dirs, preserving old behavior
66+
site.addsitedir(Path(__file__).resolve().parents[2])
67+
# smolagent-fork
6068

6169
parser = argparse.ArgumentParser(description="Ethiopian Boiled Eggs agentic system")
6270
parser.add_argument("--debug", action="store_true", help="Enable debug logging to fog logs")
6371
args = parser.parse_args()
64-
#force logging
72+
# force logging
6573
args.debug = force_logging
6674

6775
if args.debug:
68-
log_dir = Path(__file__).parent / 'agents' / 'ebg_logs'
76+
# Logs live under Agentic_System/agents/ebg_logs even though this file moved into start_scripts
77+
agentic_root = Path(__file__).resolve().parents[1]
78+
log_dir = agentic_root / 'agents' / 'ebg_logs'
6979
log_dir.mkdir(parents=True, exist_ok=True)
7080
latest_num = 0
7181
if os.path.exists(log_dir / 'ethiopian_boiled_egg.log'):
@@ -77,13 +87,12 @@ def run(force_logging: bool = True):
7787
if num > latest_num:
7888
latest_num = num
7989
log_path = str(log_dir / f'ethiopian_boiled_egg{latest_num + 1}.log')
80-
else:
90+
else:
8191
log_path = str(log_dir / f'ethiopian_boiled_egg.log')
8292

8393
if os.path.exists(log_path):
8494
print(f"Log file already exists: {log_path}")
8595

86-
8796
# Configure logger to write messages as-is (no prefixes) for 1:1 capture
8897
logger.handlers.clear()
8998
file_handler = logging.FileHandler(log_path, mode='a', encoding='utf-8')
@@ -92,22 +101,24 @@ def run(force_logging: bool = True):
92101
logger.setLevel(logging.INFO)
93102
logger.disabled = False
94103

95-
96104
class _StreamToLogger:
97105
def __init__(self, log_fn):
98106
self.log_fn = log_fn
99107
self._buffer = ''
108+
100109
def write(self, message):
101110
if not isinstance(message, str):
102111
message = message.decode('utf-8', errors='ignore')
103112
self._buffer += message
104113
while '\n' in self._buffer:
105114
line, self._buffer = self._buffer.split('\n', 1)
106115
self.log_fn(line)
116+
107117
def flush(self):
108118
if self._buffer:
109119
self.log_fn(self._buffer)
110120
self._buffer = ''
121+
111122
def isatty(self):
112123
return False
113124

@@ -121,16 +132,19 @@ def isatty(self):
121132
logger.info(f"time: {datetime.now(est_timezone)}")
122133
a = EthiopianBoiledEgg(mode="Plateau")
123134
path = os.path.join(os.getenv('FUZZILLI_PATH', ''), "Sources", "Agentic_System")
124-
if (not os.path.exists(os.path.join(path, "regressions.json"))):
135+
regressions_dir = os.path.join(path, "regressions")
136+
if not os.path.exists(os.path.join(regressions_dir, "regressions.json")):
125137
try:
126-
subprocess.run(["unzstd", os.path.join(path, "regressions.json.zst")], check=True)
127-
#unzstd regressions.json.zst
138+
subprocess.run(["unzstd", os.path.join(regressions_dir, "regressions.json.zst")], check=True)
139+
# unzstd regressions.json.zst
128140
except subprocess.CalledProcessError as e:
129141
logger.error(f"Error decompressing regressions.json.zst: {e}")
130142
exit(1)
131143
else:
132144
logger.info("Regressions.json decompressed successfully")
133145
a.system.start_system()
134146

147+
135148
if __name__ == "__main__":
136-
sys.exit(run())
149+
sys.exit(run())
150+

Sources/Agentic_System/rises-the-fog.py renamed to Sources/Agentic_System/start_scripts/rises-the-fog.py

Lines changed: 33 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -6,14 +6,23 @@
66
import sys
77
import subprocess
88
from pathlib import Path
9-
import logging
9+
import logging
1010
from datetime import datetime
1111
import pytz
12-
import config_loader as config_loader
12+
13+
import site
14+
15+
# Ensure the Agentic_System package root is on sys.path so sibling modules import correctly
16+
_agentic_root = Path(__file__).resolve().parents[1]
17+
if str(_agentic_root) not in sys.path:
18+
sys.path.insert(0, str(_agentic_root))
19+
20+
import config_loader as config_loader
1321
from agents.FoG import Father
14-
#from agents.EBG import EBG
22+
# from agents.EBG import EBG
1523
from smolagents import LiteLLMModel
1624
from config_loader import get_openai_api_key, get_anthropic_api_key, get_deepseek_api_key
25+
1726
logger = logging.getLogger("rises_the_fog")
1827
if not logger.handlers:
1928
logger.addHandler(logging.NullHandler())
@@ -23,12 +32,10 @@
2332

2433
BASE_MODEL_ID = "gpt-5-mini"
2534

26-
import site
27-
from pathlib import Path
28-
2935
# Prefer the project's virtualenv site-packages if present, so tools like chromadb are importable
3036
try:
31-
_root = Path(__file__).resolve().parents[2]
37+
# Repo root (unchanged despite this file moving one level deeper)
38+
_root = Path(__file__).resolve().parents[3]
3239
_venv_site = _root / ".venv" / "lib" / f"python{sys.version_info.major}.{sys.version_info.minor}" / "site-packages"
3340
if _venv_site.exists():
3441
site.addsitedir(str(_venv_site))
@@ -42,28 +49,30 @@ def __init__(self):
4249
self.openai_api_key = get_openai_api_key()
4350
self.anthropic_api_key = get_anthropic_api_key()
4451
self.deepseek_api_key = get_deepseek_api_key()
45-
52+
4653
if self.deepseek_api_key:
4754
os.environ["DEEPSEEK_API_KEY"] = self.deepseek_api_key
48-
55+
4956
self.model = LiteLLMModel(model_id=BASE_MODEL_ID, api_key=self.openai_api_key)
5057
self.system = Father(self.model, api_key=self.deepseek_api_key, anthropic_api_key=self.anthropic_api_key)
5158
# self.ebg = EBG(self.model, api_key=self.openai_api_key, anthropic_api_key=self.anthropic_api_key)
52-
5359

54-
def run(force_logging: bool = True):
5560

56-
site.addsitedir(Path(__file__).parent.parent)
57-
#smolagent-fork
61+
def run(force_logging: bool = True):
62+
# Add the previous parent directory (Sources) to site dirs, preserving old behavior
63+
site.addsitedir(Path(__file__).resolve().parents[2])
64+
# smolagent-fork
5865

5966
parser = argparse.ArgumentParser(description="Rise the FoG agentic system")
6067
parser.add_argument("--debug", action="store_true", help="Enable debug logging to fog logs")
6168
args = parser.parse_args()
62-
#force logging
69+
# force logging
6370
args.debug = force_logging
6471

6572
if args.debug:
66-
log_dir = Path(__file__).parent / 'agents' / 'fog_logs'
73+
# Logs live under Agentic_System/agents/fog_logs even though this file moved into start_scripts
74+
agentic_root = Path(__file__).resolve().parents[1]
75+
log_dir = agentic_root / 'agents' / 'fog_logs'
6776
log_dir.mkdir(parents=True, exist_ok=True)
6877
latest_num = 0
6978
if os.path.exists(log_dir / 'rises_the_fog.log'):
@@ -75,13 +84,12 @@ def run(force_logging: bool = True):
7584
if num > latest_num:
7685
latest_num = num
7786
log_path = str(log_dir / f'rises_the_fog{latest_num + 1}.log')
78-
else:
87+
else:
7988
log_path = str(log_dir / f'rises_the_fog.log')
8089

8190
if os.path.exists(log_path):
8291
print(f"Log file already exists: {log_path}")
8392

84-
8593
# Configure logger to write messages as-is (no prefixes) for 1:1 capture
8694
logger.handlers.clear()
8795
file_handler = logging.FileHandler(log_path, mode='a', encoding='utf-8')
@@ -90,22 +98,24 @@ def run(force_logging: bool = True):
9098
logger.setLevel(logging.INFO)
9199
logger.disabled = False
92100

93-
94101
class _StreamToLogger:
95102
def __init__(self, log_fn):
96103
self.log_fn = log_fn
97104
self._buffer = ''
105+
98106
def write(self, message):
99107
if not isinstance(message, str):
100108
message = message.decode('utf-8', errors='ignore')
101109
self._buffer += message
102110
while '\n' in self._buffer:
103111
line, self._buffer = self._buffer.split('\n', 1)
104112
self.log_fn(line)
113+
105114
def flush(self):
106115
if self._buffer:
107116
self.log_fn(self._buffer)
108117
self._buffer = ''
118+
109119
def isatty(self):
110120
return False
111121

@@ -119,10 +129,11 @@ def isatty(self):
119129
logger.info(f"time: {datetime.now(est_timezone)}")
120130
a = FatherOfGod()
121131
path = os.path.join(os.getenv('FUZZILLI_PATH', ''), "Sources", "Agentic_System")
122-
if (not os.path.exists(os.path.join(path, "regressions.json"))):
132+
regressions_dir = os.path.join(path, "regressions")
133+
if not os.path.exists(os.path.join(regressions_dir, "regressions.json")):
123134
try:
124-
subprocess.run(["unzstd", os.path.join(path, "regressions.json.zst")], check=True)
125-
#unzstd regressions.json.zst
135+
subprocess.run(["unzstd", os.path.join(regressions_dir, "regressions.json.zst")], check=True)
136+
# unzstd regressions.json.zst
126137
except subprocess.CalledProcessError as e:
127138
logger.error(f"Error decompressing regressions.json.zst: {e}")
128139
exit(1)
@@ -133,3 +144,4 @@ def isatty(self):
133144

134145
if __name__ == "__main__":
135146
sys.exit(run())
147+

Sources/Agentic_System/test-ebg.py

Lines changed: 0 additions & 22 deletions
This file was deleted.

Sources/Agentic_System/tools/FoG_tools.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@
2323
pass
2424

2525
# Cached regressions.json data to avoid reloading on every tool call
26-
_REGRESSIONS_PATH = (Path(__file__).parent.parent / "regressions.json").resolve()
26+
_REGRESSIONS_PATH = (Path(__file__).parent.parent / "regressions" / "regressions.json").resolve()
2727
_REGRESSIONS_CACHE = None
2828
_TEMPLATES_PATH = (Path(__file__).parent.parent / "templates" / "templates.json").resolve()
2929
_TEMPLATES_CACHE = None

0 commit comments

Comments
 (0)